event-data-repository_1 | 2019-09-22 10:26:28.543 INFO [main] IgniteKernal%TemenosGrid - Config URL: n/a event-data-repository_1 | 2019-09-22 10:26:28.603 INFO [main] IgniteKernal%TemenosGrid - IgniteConfiguration [igniteInstanceName=TemenosGrid, pubPoolSize=9, svcPoolSize=9, callbackPoolSize=9, stripedPoolSize=9, sysPoolSize=9, mgmtPoolSize=4, igfsPoolSize=9, dataStreamerPoolSize=9, utilityCachePoolSize=9, utilityCacheKeepAliveTime=60000, p2pPoolSize=2, qryPoolSize=9, igniteHome=null, igniteWorkDir=/tmp/ignite/work, mbeanSrv=com.sun.jmx.mbeanserver.JmxMBeanServer@6f94fa3e, nodeId=8651ce98-6d79-483a-8758-af6642cbb189, marsh=BinaryMarshaller [], marshLocJobs=false, daemon=false, p2pEnabled=true, netTimeout=5000, sndRetryDelay=1000, sndRetryCnt=3, metricsHistSize=10000, metricsUpdateFreq=2000, metricsExpTime=9223372036854775807, discoSpi=TcpDiscoverySpi [addrRslvr=null, sockTimeout=0, ackTimeout=0, marsh=null, reconCnt=10, reconDelay=2000, maxAckTimeout=600000, forceSrvMode=false, clientReconnectDisabled=false, internalLsnr=null], segPlc=STOP, segResolveAttempts=2, waitForSegOnStart=true, allResolversPassReq=true, segChkFreq=10000, commSpi=TcpCommunicationSpi [connectGate=null, connPlc=org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi$FirstConnectionPolicy@41709512, enableForcibleNodeKill=false, enableTroubleshootingLog=false, locAddr=null, locHost=null, locPort=47100, locPortRange=100, shmemPort=-1, directBuf=true, directSndBuf=false, idleConnTimeout=600000, connTimeout=5000, maxConnTimeout=600000, reconCnt=10, sockSndBuf=32768, sockRcvBuf=32768, msgQueueLimit=0, slowClientQueueLimit=0, nioSrvr=null, shmemSrv=null, usePairedConnections=false, connectionsPerNode=1, tcpNoDelay=true, filterReachableAddresses=false, ackSndThreshold=32, unackedMsgsBufSize=0, sockWriteTimeout=2000, boundTcpPort=-1, boundTcpShmemPort=-1, selectorsCnt=4, selectorSpins=0, addrRslvr=null, ctxInitLatch=java.util.concurrent.CountDownLatch@42039326[Count = 1], stopping=false], evtSpi=org.apache.ignite.spi.eventstorage.NoopEventStorageSpi@33308786, colSpi=NoopCollisionSpi [], deploySpi=LocalDeploymentSpi [], indexingSpi=org.apache.ignite.spi.indexing.noop.NoopIndexingSpi@7d1cfb8b, addrRslvr=null, encryptionSpi=org.apache.ignite.spi.encryption.noop.NoopEncryptionSpi@48e92c5c, clientMode=false, rebalanceThreadPoolSize=1, txCfg=TransactionConfiguration [txSerEnabled=false, dfltIsolation=REPEATABLE_READ, dfltConcurrency=PESSIMISTIC, dfltTxTimeout=0, txTimeoutOnPartitionMapExchange=0, pessimisticTxLogSize=0, pessimisticTxLogLinger=10000, tmLookupClsName=null, txManagerFactory=null, useJtaSync=false], cacheSanityCheckEnabled=true, discoStartupDelay=60000, deployMode=SHARED, p2pMissedCacheSize=100, locHost=null, timeSrvPortBase=31100, timeSrvPortRange=100, failureDetectionTimeout=30000, sysWorkerBlockedTimeout=null, clientFailureDetectionTimeout=60000, metricsLogFreq=60000, hadoopCfg=null, connectorCfg=ConnectorConfiguration [jettyPath=null, host=null, port=11211, noDelay=true, directBuf=false, sndBufSize=32768, rcvBufSize=32768, idleQryCurTimeout=600000, idleQryCurCheckFreq=60000, sndQueueLimit=0, selectorCnt=4, idleTimeout=7000, sslEnabled=false, sslClientAuth=false, sslCtxFactory=null, sslFactory=null, portRange=100, threadPoolSize=9, msgInterceptor=null], odbcCfg=null, warmupClos=null, atomicCfg=AtomicConfiguration [seqReserveSize=1000, cacheMode=PARTITIONED, backups=1, aff=null, grpName=null], classLdr=null, sslCtxFactory=null, platformCfg=null, binaryCfg=null, memCfg=null, pstCfg=null, dsCfg=DataStorageConfiguration [sysRegionInitSize=41943040, sysRegionMaxSize=104857600, pageSize=0, concLvl=0, dfltDataRegConf=DataRegionConfiguration [name=default, maxSize=2088558592, initSize=268435456, swapPath=null, pageEvictionMode=DISABLED, evictionThreshold=0.9, emptyPagesPoolSize=100, metricsEnabled=false, metricsSubIntervalCount=5, metricsRateTimeInterval=60000, persistenceEnabled=false, checkpointPageBufSize=0], dataRegions=[DataRegionConfiguration [name=1G_Region, maxSize=419430400, initSize=104857600, swapPath=null, pageEvictionMode=RANDOM_2_LRU, evictionThreshold=0.6, emptyPagesPoolSize=100, metricsEnabled=true, metricsSubIntervalCount=5, metricsRateTimeInterval=60000, persistenceEnabled=false, checkpointPageBufSize=0]], storagePath=/tmp/ignite/des/work, checkpointFreq=180000, lockWaitTime=10000, checkpointThreads=4, checkpointWriteOrder=SEQUENTIAL, walHistSize=20, maxWalArchiveSize=1073741824, walSegments=10, walSegmentSize=67108864, walPath=/tmp/ignite/des/wal, walArchivePath=/tmp/ignite/des/wal/archive, metricsEnabled=false, walMode=LOG_ONLY, walTlbSize=131072, walBuffSize=0, walFlushFreq=2000, walFsyncDelay=1000, walRecordIterBuffSize=67108864, alwaysWriteFullPages=false, fileIOFactory=org.apache.ignite.internal.processors.cache.persistence.file.AsyncFileIOFactory@7dda48d9, metricsSubIntervalCnt=5, metricsRateTimeInterval=60000, walAutoArchiveAfterInactivity=-1, writeThrottlingEnabled=false, walCompactionEnabled=false, walCompactionLevel=1, checkpointReadLockTimeout=null], activeOnStart=true, autoActivation=true, longQryWarnTimeout=3000, sqlConnCfg=null, cliConnCfg=ClientConnectorConfiguration [host=null, port=10800, portRange=100, sockSndBufSize=0, sockRcvBufSize=0, tcpNoDelay=true, maxOpenCursorsPerConn=128, threadPoolSize=9, idleTimeout=0, jdbcEnabled=true, odbcEnabled=true, thinCliEnabled=true, sslEnabled=false, useIgniteSslCtxFactory=true, sslClientAuth=false, sslCtxFactory=null], mvccVacuumThreadCnt=2, mvccVacuumFreq=5000, authEnabled=false, failureHnd=null, commFailureRslvr=null] event-data-repository_1 | 2019-09-22 10:26:28.606 INFO [main] IgniteKernal%TemenosGrid - Daemon mode: off event-data-repository_1 | 2019-09-22 10:26:28.607 INFO [main] IgniteKernal%TemenosGrid - OS: Linux 4.9.184-linuxkit amd64 event-data-repository_1 | 2019-09-22 10:26:28.607 INFO [main] IgniteKernal%TemenosGrid - OS user: root event-data-repository_1 | 2019-09-22 10:26:28.608 INFO [main] IgniteKernal%TemenosGrid - PID: 6 event-data-repository_1 | 2019-09-22 10:26:28.608 INFO [main] IgniteKernal%TemenosGrid - Language runtime: Java Platform API Specification ver. 1.8 event-data-repository_1 | 2019-09-22 10:26:28.611 INFO [main] IgniteKernal%TemenosGrid - VM information: OpenJDK Runtime Environment 1.8.0_151-b12 Oracle Corporation OpenJDK 64-Bit Server VM 25.151-b12 event-data-repository_1 | 2019-09-22 10:26:28.613 INFO [main] IgniteKernal%TemenosGrid - VM total memory: 3.0GB event-data-repository_1 | 2019-09-22 10:26:28.615 INFO [main] IgniteKernal%TemenosGrid - Remote Management [restart: off, REST: on, JMX (remote: on, port: 50510, auth: off, ssl: off)] event-data-repository_1 | 2019-09-22 10:26:28.617 INFO [main] IgniteKernal%TemenosGrid - Logger: Log4J2Logger [quiet=false, config=log4j2.xml] event-data-repository_1 | 2019-09-22 10:26:28.618 INFO [main] IgniteKernal%TemenosGrid - IGNITE_HOME=null event-data-repository_1 | 2019-09-22 10:26:28.619 INFO [main] IgniteKernal%TemenosGrid - VM arguments: [-XX:+HeapDumpOnOutOfMemoryError, -XX:+AlwaysPreTouch, -XX:+UseG1GC, -XX:+ScavengeBeforeFullGC, -XX:+DisableExplicitGC, -Xms512M, -Xmx3072M, -Dfile.encoding=UTF-8, -Djava.net.preferIPv4Stack=true, -DIGNITE_MAX_INDEX_PAYLOAD_SIZE=135, -DIGNITE_QUIET=true, -DIGNITE_NO_ASCII=true, -DIGNITE_CONSOLE_APPENDER=false, -DIGNITE_PERFORMANCE_SUGGESTIONS_DISABLED=true, -Dcom.sun.management.jmxremote, -Dcom.sun.management.jmxremote.port=50510, -Dcom.sun.management.jmxremote.authenticate=false, -Dcom.sun.management.jmxremote.ssl=false, -Dcom.sun.management.jmxremote.rmi.port=50511, -Dcom.sun.management.jmxremote.local.only=false, -Djava.rmi.server.hostname=0.0.0.0] event-data-repository_1 | 2019-09-22 10:26:28.621 INFO [main] IgniteKernal%TemenosGrid - System cache's DataRegion size is configured to 40 MB. Use DataStorageConfiguration.systemRegionInitialSize property to change the setting. event-data-repository_1 | 2019-09-22 10:26:28.625 INFO [main] IgniteKernal%TemenosGrid - Configured caches [in 'sysMemPlc' dataRegion: ['ignite-sys-cache'], in 'default' dataRegion: ['TemenosDfXSDEventCache', 'DesEpaFailoverMemoryLockingCache'], in '1G_Region' dataRegion: ['FDataEventCache', 'TemenosDfEventCache', 'TemenosDfEventErrorCache']] event-data-repository_1 | 2019-09-22 10:26:28.628 WARN [main] IgniteKernal%TemenosGrid - Peer class loading is enabled (disable it in production for performance and deployment consistency reasons) event-data-repository_1 | 2019-09-22 10:26:28.645 INFO [main] IgniteKernal%TemenosGrid - Local node user attribute [PreLoadTables=] event-data-repository_1 | 2019-09-22 10:26:28.908 INFO [main] IgnitePluginProcessor - Configured plugins: event-data-repository_1 | 2019-09-22 10:26:28.911 INFO [main] IgnitePluginProcessor - ^-- WhiteListSecurity 1.0.0 event-data-repository_1 | 2019-09-22 10:26:28.912 INFO [main] IgnitePluginProcessor - ^-- Temenos event-data-repository_1 | 2019-09-22 10:26:28.915 INFO [main] IgnitePluginProcessor - event-data-repository_1 | 2019-09-22 10:26:28.918 INFO [main] FailureProcessor - Configured failure handler: [hnd=StopNodeOrHaltFailureHandler [tryStop=false, timeout=0, super=AbstractFailureHandler [ignoredFailureTypes=[SYSTEM_WORKER_BLOCKED, SYSTEM_CRITICAL_OPERATION_TIMEOUT]]]] event-data-repository_1 | 2019-09-22 10:26:28.940 WARN [main] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:29.508 WARN [main] CACHE - IgniteHome=null Gridname=TemenosGrid event-data-repository_1 | 2019-09-22 10:26:29.674 INFO [main] TcpCommunicationSpi - Successfully bound communication NIO server to TCP port [port=47100, locHost=0.0.0.0/0.0.0.0, selectorsCnt=4, selectorSpins=0, pairedConn=false] event-data-repository_1 | 2019-09-22 10:26:29.683 WARN [main] TcpCommunicationSpi - Message queue limit is set to 0 which may lead to potential OOMEs when running cache operations in FULL_ASYNC or PRIMARY_SYNC modes due to message queues growth on sender and receiver sides. event-data-repository_1 | 2019-09-22 10:26:29.764 WARN [main] NoopCheckpointSpi - Checkpoints are disabled (to enable configure any GridCheckpointSpi implementation) event-data-repository_1 | 2019-09-22 10:26:29.910 WARN [main] GridCollisionManager - Collision resolution is disabled (all jobs will be activated upon arrival). event-data-repository_1 | 2019-09-22 10:26:29.953 INFO [main] IgniteKernal%TemenosGrid - Security status [authentication=off, tls/ssl=off] event-data-repository_1 | 2019-09-22 10:26:30.496 WARN [main] IgniteCacheDatabaseSharedManager - DataRegionConfiguration.maxWalArchiveSize instead DataRegionConfiguration.walHistorySize would be used for removing old archive wal files event-data-repository_1 | 2019-09-22 10:26:30.572 INFO [main] PartitionsEvictManager - Evict partition permits=2 event-data-repository_1 | 2019-09-22 10:26:31.704 INFO [main] ClientListenerProcessor - Client connector processor has started on TCP port 10800 event-data-repository_1 | 2019-09-22 10:26:31.964 INFO [main] GridTcpRestProtocol - Command protocol successfully started [name=TCP binary, host=0.0.0.0/0.0.0.0, port=11211] event-data-repository_1 | 2019-09-22 10:26:32.059 INFO [main] IgniteKernal%TemenosGrid - Non-loopback local IPs: 172.30.0.4 event-data-repository_1 | 2019-09-22 10:26:32.072 INFO [main] IgniteKernal%TemenosGrid - Enabled local MACs: 0242AC1E0004 event-data-repository_1 | 2019-09-22 10:26:32.200 INFO [main] TcpDiscoverySpi - Connection check threshold is calculated: 30000 event-data-repository_1 | 2019-09-22 10:26:32.221 INFO [main] TcpDiscoverySpi - Successfully bound to TCP port [port=47500, localHost=0.0.0.0/0.0.0.0, locNodeId=8651ce98-6d79-483a-8758-af6642cbb189] event-data-repository_1 | 2019-09-22 10:26:32.239 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery accepted incoming connection [rmtAddr=/172.30.0.17, rmtPort=38457] event-data-repository_1 | 2019-09-22 10:26:32.305 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery spawning a new thread for connection [rmtAddr=/172.30.0.17, rmtPort=38457] event-data-repository_1 | 2019-09-22 10:26:32.316 INFO [tcp-disco-sock-reader-#4%TemenosGrid%] TcpDiscoverySpi - Started serving remote node connection [rmtAddr=/172.30.0.17:38457, rmtPort=38457] event-data-repository_1 | 2019-09-22 10:26:32.342 INFO [disco-notifier-worker-#43%TemenosGrid%] MvccProcessorImpl - Assigned mvcc coordinator [crd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], crdNode=TcpDiscoveryNode [id=8651ce98-6d79-483a-8758-af6642cbb189, addrs=[127.0.0.1, 172.30.0.4], sockAddrs=[/127.0.0.1:47500, f5c42f352918/172.30.0.4:47500], discPort=47500, order=1, intOrder=1, lastExchangeTime=1569147992217, loc=true, ver=2.7.5#20190603-sha1:be4f2a15, isClient=false]] event-data-repository_1 | 2019-09-22 10:26:32.527 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:32.713 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=true, crd=true, evt=NODE_JOINED, evtNode=8651ce98-6d79-483a-8758-af6642cbb189, customEvt=null, allowMerge=false] event-data-repository_1 | 2019-09-22 10:26:32.953 INFO [exchange-worker-#45%TemenosGrid%] msg - Components activation performed in 240 ms. event-data-repository_1 | 2019-09-22 10:26:33.324 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - IgniteHome=null Gridname=TemenosGrid event-data-repository_1 | 2019-09-22 10:26:33.636 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Added new node to topology: TcpDiscoveryNode [id=6ef17cac-0fcb-4368-9c3f-2f7bfe61fad5, addrs=[127.0.0.1, 172.30.0.17], sockAddrs=[/127.0.0.1:0, 8671a0955b5e/172.30.0.17:0], discPort=0, order=2, intOrder=2, lastExchangeTime=1569147992460, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true] event-data-repository_1 | 2019-09-22 10:26:33.644 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Topology snapshot [ver=2, locNode=8651ce98, servers=1, clients=1, state=ACTIVE, CPUs=18, offheap=4.7GB, heap=4.0GB] event-data-repository_1 | 2019-09-22 10:26:33.863 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Started cache [name=ignite-sys-cache, id=-2100569601, memoryPolicyName=sysMemPlc, mode=REPLICATED, atomicity=TRANSACTIONAL, backups=2147483647, mvcc=false], encryptionEnabled=false] event-data-repository_1 | 2019-09-22 10:26:34.214 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Started cache [name=FDataEventCache, id=-2019592648, memoryPolicyName=1G_Region, mode=PARTITIONED, atomicity=ATOMIC, backups=1, mvcc=false], encryptionEnabled=false] event-data-repository_1 | 2019-09-22 10:26:34.216 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery accepted incoming connection [rmtAddr=/172.30.0.12, rmtPort=49349] event-data-repository_1 | 2019-09-22 10:26:34.218 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery spawning a new thread for connection [rmtAddr=/172.30.0.12, rmtPort=49349] event-data-repository_1 | 2019-09-22 10:26:34.220 INFO [tcp-disco-sock-reader-#6%TemenosGrid%] TcpDiscoverySpi - Started serving remote node connection [rmtAddr=/172.30.0.12:49349, rmtPort=49349] event-data-repository_1 | 2019-09-22 10:26:34.295 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Started cache [name=TemenosDfEventErrorCache, id=-1889985431, memoryPolicyName=1G_Region, mode=PARTITIONED, atomicity=TRANSACTIONAL, backups=1, mvcc=false], encryptionEnabled=false] event-data-repository_1 | 2019-09-22 10:26:34.313 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Started cache [name=DesEpaFailoverMemoryLockingCache, id=-468445694, memoryPolicyName=default, mode=REPLICATED, atomicity=TRANSACTIONAL, backups=2147483647, mvcc=false], encryptionEnabled=false] event-data-repository_1 | 2019-09-22 10:26:34.330 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:34.391 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Started cache [name=TemenosDfEventCache, id=517879827, memoryPolicyName=1G_Region, mode=PARTITIONED, atomicity=TRANSACTIONAL, backups=1, mvcc=false], encryptionEnabled=false] event-data-repository_1 | 2019-09-22 10:26:34.457 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Started cache [name=TemenosDfXSDEventCache, id=975938502, memoryPolicyName=default, mode=REPLICATED, atomicity=TRANSACTIONAL, backups=2147483647, mvcc=false], encryptionEnabled=false] event-data-repository_1 | 2019-09-22 10:26:34.489 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Starting caches on local join performed in 1523 ms. event-data-repository_1 | 2019-09-22 10:26:34.505 INFO [exchange-worker-#45%TemenosGrid%] GridAffinityAssignmentCache - Local node affinity assignment distribution is not ideal [cache=TemenosDfEventErrorCache, expectedPrimary=2048.00, actualPrimary=2048, expectedBackups=2048.00, actualBackups=0, warningThreshold=50.00%] event-data-repository_1 | 2019-09-22 10:26:34.526 INFO [exchange-worker-#45%TemenosGrid%] GridAffinityAssignmentCache - Local node affinity assignment distribution is not ideal [cache=FDataEventCache, expectedPrimary=2048.00, actualPrimary=2048, expectedBackups=2048.00, actualBackups=0, warningThreshold=50.00%] event-data-repository_1 | 2019-09-22 10:26:34.534 INFO [exchange-worker-#45%TemenosGrid%] GridAffinityAssignmentCache - Local node affinity assignment distribution is not ideal [cache=TemenosDfEventCache, expectedPrimary=2048.00, actualPrimary=2048, expectedBackups=2048.00, actualBackups=0, warningThreshold=50.00%] event-data-repository_1 | 2019-09-22 10:26:34.574 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Skipped waiting for partitions release future (local node is joining) [topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]] event-data-repository_1 | 2019-09-22 10:26:34.586 INFO [grid-nio-worker-tcp-comm-0-#26%TemenosGrid%] TcpCommunicationSpi - Accepted incoming communication connection [locAddr=/172.30.0.4:47100, rmtAddr=/172.30.0.17:34958] event-data-repository_1 | 2019-09-22 10:26:34.815 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - IgniteHome=null Gridname=TemenosGrid event-data-repository_1 | 2019-09-22 10:26:34.948 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Added new node to topology: TcpDiscoveryNode [id=01498fbf-ab3d-4335-89f5-41baab8cd985, addrs=[127.0.0.1, 172.30.0.12], sockAddrs=[/127.0.0.1:0, cbe1e92a7d23/172.30.0.12:0], discPort=0, order=3, intOrder=3, lastExchangeTime=1569147994271, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true] event-data-repository_1 | 2019-09-22 10:26:34.949 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Topology snapshot [ver=3, locNode=8651ce98, servers=1, clients=2, state=ACTIVE, CPUs=27, offheap=7.0GB, heap=5.0GB] event-data-repository_1 | 2019-09-22 10:26:35.683 INFO [grid-nio-worker-tcp-comm-1-#27%TemenosGrid%] TcpCommunicationSpi - Accepted incoming communication connection [locAddr=/172.30.0.4:47100, rmtAddr=/172.30.0.12:39500] event-data-repository_1 | 2019-09-22 10:26:35.847 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - finishExchangeOnCoordinator [topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]] event-data-repository_1 | 2019-09-22 10:26:35.848 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Affinity changes (coordinator) applied in 0 ms. event-data-repository_1 | 2019-09-22 10:26:35.886 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Partitions validation performed in 37 ms. event-data-repository_1 | 2019-09-22 10:26:35.923 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery accepted incoming connection [rmtAddr=/172.30.0.16, rmtPort=48605] event-data-repository_1 | 2019-09-22 10:26:35.924 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery spawning a new thread for connection [rmtAddr=/172.30.0.16, rmtPort=48605] event-data-repository_1 | 2019-09-22 10:26:35.925 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Partitions assignment performed in 37 ms. event-data-repository_1 | 2019-09-22 10:26:35.931 INFO [tcp-disco-sock-reader-#8%TemenosGrid%] TcpDiscoverySpi - Started serving remote node connection [rmtAddr=/172.30.0.16:48605, rmtPort=48605] event-data-repository_1 | 2019-09-22 10:26:36.124 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:36.251 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery accepted incoming connection [rmtAddr=/172.30.0.14, rmtPort=57851] event-data-repository_1 | 2019-09-22 10:26:36.252 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery spawning a new thread for connection [rmtAddr=/172.30.0.14, rmtPort=57851] event-data-repository_1 | 2019-09-22 10:26:36.254 INFO [tcp-disco-sock-reader-#10%TemenosGrid%] TcpDiscoverySpi - Started serving remote node connection [rmtAddr=/172.30.0.14:57851, rmtPort=57851] event-data-repository_1 | 2019-09-22 10:26:36.286 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Preparing Full Message performed in 289 ms. event-data-repository_1 | 2019-09-22 10:26:36.291 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Sending Full Message to all nodes performed in 0 ms. event-data-repository_1 | 2019-09-22 10:26:36.292 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=1, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=1, minorTopVer=0], err=null] event-data-repository_1 | 2019-09-22 10:26:36.305 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Detecting lost partitions performed in 11 ms. event-data-repository_1 | 2019-09-22 10:26:36.322 INFO [exchange-worker-#45%TemenosGrid%] MvccProcessorImpl - Initialize local node as mvcc coordinator [node=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330] event-data-repository_1 | 2019-09-22 10:26:36.371 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - IgniteHome=null Gridname=TemenosGrid event-data-repository_1 | 2019-09-22 10:26:36.402 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Finish proxy initialization, cacheName=TemenosDfEventErrorCache, localNodeId=8651ce98-6d79-483a-8758-af6642cbb189 event-data-repository_1 | 2019-09-22 10:26:36.405 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Finish proxy initialization, cacheName=ignite-sys-cache, localNodeId=8651ce98-6d79-483a-8758-af6642cbb189 event-data-repository_1 | 2019-09-22 10:26:36.407 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Finish proxy initialization, cacheName=DesEpaFailoverMemoryLockingCache, localNodeId=8651ce98-6d79-483a-8758-af6642cbb189 event-data-repository_1 | 2019-09-22 10:26:36.409 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Finish proxy initialization, cacheName=FDataEventCache, localNodeId=8651ce98-6d79-483a-8758-af6642cbb189 event-data-repository_1 | 2019-09-22 10:26:36.413 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Finish proxy initialization, cacheName=TemenosDfEventCache, localNodeId=8651ce98-6d79-483a-8758-af6642cbb189 event-data-repository_1 | 2019-09-22 10:26:36.414 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Finish proxy initialization, cacheName=TemenosDfXSDEventCache, localNodeId=8651ce98-6d79-483a-8758-af6642cbb189 event-data-repository_1 | 2019-09-22 10:26:36.422 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0], evt=NODE_JOINED, evtNode=TcpDiscoveryNode [id=8651ce98-6d79-483a-8758-af6642cbb189, addrs=[127.0.0.1, 172.30.0.4], sockAddrs=[/127.0.0.1:47500, f5c42f352918/172.30.0.4:47500], discPort=47500, order=1, intOrder=1, lastExchangeTime=1569147994886, loc=true, ver=2.7.5#20190603-sha1:be4f2a15, isClient=false], done=true], topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0], durationFromInit=3742] event-data-repository_1 | 2019-09-22 10:26:36.429 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0], crd=true] event-data-repository_1 | 2019-09-22 10:26:36.453 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Added new node to topology: TcpDiscoveryNode [id=efab2d3b-b6d1-4b9e-9ec0-b52f8e88d12a, addrs=[127.0.0.1, 172.30.0.16], sockAddrs=[/127.0.0.1:0, 361694cb036f/172.30.0.16:0], discPort=0, order=4, intOrder=4, lastExchangeTime=1569147996102, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true] event-data-repository_1 | 2019-09-22 10:26:36.455 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Topology snapshot [ver=4, locNode=8651ce98, servers=1, clients=3, state=ACTIVE, CPUs=36, offheap=9.3GB, heap=7.0GB] event-data-repository_1 | 2019-09-22 10:26:36.506 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:36.513 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (obsolete exchange ID) [top=AffinityTopologyVersion [topVer=1, minorTopVer=0], evt=NODE_JOINED, node=8651ce98-6d79-483a-8758-af6642cbb189] event-data-repository_1 | 2019-09-22 10:26:36.527 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=2, minorTopVer=0], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=false, crd=true, evt=NODE_JOINED, evtNode=6ef17cac-0fcb-4368-9c3f-2f7bfe61fad5, customEvt=null, allowMerge=true] event-data-repository_1 | 2019-09-22 10:26:36.531 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=2, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=2, minorTopVer=0], err=null] event-data-repository_1 | 2019-09-22 10:26:36.628 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=2, minorTopVer=0], evt=NODE_JOINED, evtNode=TcpDiscoveryNode [id=6ef17cac-0fcb-4368-9c3f-2f7bfe61fad5, addrs=[127.0.0.1, 172.30.0.17], sockAddrs=[/127.0.0.1:0, 8671a0955b5e/172.30.0.17:0], discPort=0, order=2, intOrder=2, lastExchangeTime=1569147992460, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true], done=true], topVer=AffinityTopologyVersion [topVer=2, minorTopVer=0], durationFromInit=101] event-data-repository_1 | 2019-09-22 10:26:36.689 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=2, minorTopVer=0], crd=true] event-data-repository_1 | 2019-09-22 10:26:36.725 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (obsolete exchange ID) [top=AffinityTopologyVersion [topVer=2, minorTopVer=0], evt=NODE_JOINED, node=6ef17cac-0fcb-4368-9c3f-2f7bfe61fad5] event-data-repository_1 | 2019-09-22 10:26:36.728 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=3, minorTopVer=0], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=false, crd=true, evt=NODE_JOINED, evtNode=01498fbf-ab3d-4335-89f5-41baab8cd985, customEvt=null, allowMerge=true] event-data-repository_1 | 2019-09-22 10:26:36.731 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=3, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=3, minorTopVer=0], err=null] event-data-repository_1 | 2019-09-22 10:26:36.749 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=3, minorTopVer=0], evt=NODE_JOINED, evtNode=TcpDiscoveryNode [id=01498fbf-ab3d-4335-89f5-41baab8cd985, addrs=[127.0.0.1, 172.30.0.12], sockAddrs=[/127.0.0.1:0, cbe1e92a7d23/172.30.0.12:0], discPort=0, order=3, intOrder=3, lastExchangeTime=1569147994271, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true], done=true], topVer=AffinityTopologyVersion [topVer=3, minorTopVer=0], durationFromInit=14] event-data-repository_1 | 2019-09-22 10:26:36.808 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=3, minorTopVer=0], crd=true] event-data-repository_1 | 2019-09-22 10:26:36.829 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (obsolete exchange ID) [top=AffinityTopologyVersion [topVer=3, minorTopVer=0], evt=NODE_JOINED, node=01498fbf-ab3d-4335-89f5-41baab8cd985] event-data-repository_1 | 2019-09-22 10:26:36.837 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=4, minorTopVer=0], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=false, crd=true, evt=NODE_JOINED, evtNode=efab2d3b-b6d1-4b9e-9ec0-b52f8e88d12a, customEvt=null, allowMerge=true] event-data-repository_1 | 2019-09-22 10:26:36.858 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=4, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=4, minorTopVer=0], err=null] event-data-repository_1 | 2019-09-22 10:26:36.843 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - IgniteHome=null Gridname=TemenosGrid event-data-repository_1 | 2019-09-22 10:26:36.901 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=4, minorTopVer=0], evt=NODE_JOINED, evtNode=TcpDiscoveryNode [id=efab2d3b-b6d1-4b9e-9ec0-b52f8e88d12a, addrs=[127.0.0.1, 172.30.0.16], sockAddrs=[/127.0.0.1:0, 361694cb036f/172.30.0.16:0], discPort=0, order=4, intOrder=4, lastExchangeTime=1569147996102, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true], done=true], topVer=AffinityTopologyVersion [topVer=4, minorTopVer=0], durationFromInit=54] event-data-repository_1 | 2019-09-22 10:26:36.904 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=4, minorTopVer=0], crd=true] event-data-repository_1 | 2019-09-22 10:26:36.932 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Added new node to topology: TcpDiscoveryNode [id=8bf7a3ba-0b77-4bdb-adf2-01a90881ad63, addrs=[127.0.0.1, 172.30.0.14], sockAddrs=[f1b0a3175535/172.30.0.14:0, /127.0.0.1:0], discPort=0, order=5, intOrder=5, lastExchangeTime=1569147996383, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true] event-data-repository_1 | 2019-09-22 10:26:36.932 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Topology snapshot [ver=5, locNode=8651ce98, servers=1, clients=4, state=ACTIVE, CPUs=45, offheap=12.0GB, heap=9.0GB] event-data-repository_1 | 2019-09-22 10:26:36.975 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (obsolete exchange ID) [top=AffinityTopologyVersion [topVer=4, minorTopVer=0], evt=NODE_JOINED, node=efab2d3b-b6d1-4b9e-9ec0-b52f8e88d12a] event-data-repository_1 | 2019-09-22 10:26:36.976 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=5, minorTopVer=0], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=false, crd=true, evt=NODE_JOINED, evtNode=8bf7a3ba-0b77-4bdb-adf2-01a90881ad63, customEvt=null, allowMerge=true] event-data-repository_1 | 2019-09-22 10:26:36.984 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=5, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=5, minorTopVer=0], err=null] event-data-repository_1 | 2019-09-22 10:26:36.996 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=5, minorTopVer=0], evt=NODE_JOINED, evtNode=TcpDiscoveryNode [id=8bf7a3ba-0b77-4bdb-adf2-01a90881ad63, addrs=[127.0.0.1, 172.30.0.14], sockAddrs=[f1b0a3175535/172.30.0.14:0, /127.0.0.1:0], discPort=0, order=5, intOrder=5, lastExchangeTime=1569147996383, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true], done=true], topVer=AffinityTopologyVersion [topVer=5, minorTopVer=0], durationFromInit=32] event-data-repository_1 | 2019-09-22 10:26:36.996 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=5, minorTopVer=0], crd=true] event-data-repository_1 | 2019-09-22 10:26:37.083 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (nothing scheduled) [top=AffinityTopologyVersion [topVer=5, minorTopVer=0], force=false, evt=NODE_JOINED, node=8bf7a3ba-0b77-4bdb-adf2-01a90881ad63] event-data-repository_1 | 2019-09-22 10:26:37.259 INFO [main] IgniteKernal%TemenosGrid - To start Console Management & Monitoring run ignitevisorcmd.{sh|bat} event-data-repository_1 | 2019-09-22 10:26:37.314 INFO [main] IgniteKernal%TemenosGrid - Data Regions Configured: event-data-repository_1 | 2019-09-22 10:26:37.316 INFO [main] IgniteKernal%TemenosGrid - ^-- default [initSize=256.0 MiB, maxSize=1.9 GiB, persistence=false] event-data-repository_1 | 2019-09-22 10:26:37.317 INFO [main] IgniteKernal%TemenosGrid - ^-- 1G_Region [initSize=100.0 MiB, maxSize=400.0 MiB, persistence=false] event-data-repository_1 | 2019-09-22 10:26:37.319 INFO [main] IgniteKernal%TemenosGrid - event-data-repository_1 | event-data-repository_1 | >>> +----------------------------------------------------------------------+ event-data-repository_1 | >>> Ignite ver. 2.7.5#20190603-sha1:be4f2a158bcf79a52ab4f372a6576f20c4f86954 event-data-repository_1 | >>> +----------------------------------------------------------------------+ event-data-repository_1 | >>> OS name: Linux 4.9.184-linuxkit amd64 event-data-repository_1 | >>> CPU(s): 9 event-data-repository_1 | >>> Heap: 3.0GB event-data-repository_1 | >>> VM name: 6@f5c42f352918 event-data-repository_1 | >>> Ignite instance name: TemenosGrid event-data-repository_1 | >>> Local node [ID=8651CE98-6D79-483A-8758-AF6642CBB189, order=1, clientMode=false] event-data-repository_1 | >>> Local node addresses: [f5c42f352918/127.0.0.1, /172.30.0.4] event-data-repository_1 | >>> Local ports: TCP:10800 TCP:11211 TCP:47100 TCP:47500 event-data-repository_1 | event-data-repository_1 | 2019-09-22 10:26:37.321 INFO [main] GridDiscoveryManager - Topology snapshot [ver=1, locNode=8651ce98, servers=1, clients=0, state=ACTIVE, CPUs=9, offheap=2.3GB, heap=3.0GB] event-data-repository_1 | 2019-09-22 10:26:37.346 WARN [main] GridEventStorageManager - Added listener for disabled event type: CACHE_REBALANCE_STOPPED event-data-repository_1 | 2019-09-22 10:26:37.352 WARN [main] GridEventStorageManager - Added listener for disabled event type: CACHE_REBALANCE_STARTED event-data-repository_1 | 2019-09-22 10:26:37.423 WARN [main] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:37.572 INFO [grid-nio-worker-tcp-comm-2-#28%TemenosGrid%] TcpCommunicationSpi - Accepted incoming communication connection [locAddr=/172.30.0.4:47100, rmtAddr=/172.30.0.16:39138] event-data-repository_1 | 2019-09-22 10:26:38.200 INFO [grid-nio-worker-tcp-comm-3-#29%TemenosGrid%] TcpCommunicationSpi - Accepted incoming communication connection [locAddr=/172.30.0.4:47100, rmtAddr=/172.30.0.14:53262] event-data-repository_1 | 2019-09-22 10:26:38.395 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery accepted incoming connection [rmtAddr=/172.30.0.10, rmtPort=47999] event-data-repository_1 | 2019-09-22 10:26:38.396 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery spawning a new thread for connection [rmtAddr=/172.30.0.10, rmtPort=47999] event-data-repository_1 | 2019-09-22 10:26:38.398 INFO [tcp-disco-sock-reader-#12%TemenosGrid%] TcpDiscoverySpi - Started serving remote node connection [rmtAddr=/172.30.0.10:47999, rmtPort=47999] event-data-repository_1 | 2019-09-22 10:26:38.628 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery accepted incoming connection [rmtAddr=/172.30.0.5, rmtPort=49867] event-data-repository_1 | 2019-09-22 10:26:38.629 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery spawning a new thread for connection [rmtAddr=/172.30.0.5, rmtPort=49867] event-data-repository_1 | 2019-09-22 10:26:38.659 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:38.662 INFO [tcp-disco-sock-reader-#13%TemenosGrid%] TcpDiscoverySpi - Started serving remote node connection [rmtAddr=/172.30.0.5:49867, rmtPort=49867] event-data-repository_1 | 2019-09-22 10:26:38.753 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery accepted incoming connection [rmtAddr=/172.30.0.13, rmtPort=47601] event-data-repository_1 | 2019-09-22 10:26:38.754 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery spawning a new thread for connection [rmtAddr=/172.30.0.13, rmtPort=47601] event-data-repository_1 | 2019-09-22 10:26:38.763 INFO [tcp-disco-sock-reader-#15%TemenosGrid%] TcpDiscoverySpi - Started serving remote node connection [rmtAddr=/172.30.0.13:47601, rmtPort=47601] event-data-repository_1 | 2019-09-22 10:26:38.984 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - IgniteHome=null Gridname=TemenosGrid event-data-repository_1 | 2019-09-22 10:26:39.111 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:39.192 WARN [main] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:39.476 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - IgniteHome=null Gridname=TemenosGrid event-data-repository_1 | 2019-09-22 10:26:39.747 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:39.753 WARN [main] CACHE - Ignite Cache: FDataEventCache created from cacheManager event-data-repository_1 | StartDataRepository-->Ignite Initialization. Got cache FDataEventCache from Ignite. event-data-repository_1 | 2019-09-22 10:26:39.811 WARN [main] GridEventStorageManager - Added listener for disabled event type: CACHE_REBALANCE_STOPPED event-data-repository_1 | 2019-09-22 10:26:39.813 WARN [main] GridEventStorageManager - Added listener for disabled event type: CACHE_REBALANCE_STARTED event-data-repository_1 | 2019-09-22 10:26:39.820 WARN [main] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:40.191 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - IgniteHome=null Gridname=TemenosGrid event-data-repository_1 | 2019-09-22 10:26:40.207 WARN [main] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:40.264 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Added new node to topology: TcpDiscoveryNode [id=836b6c75-15d2-4628-9969-b554f82434b4, addrs=[127.0.0.1, 172.30.0.10], sockAddrs=[fbb16eceff6e/172.30.0.10:0, /127.0.0.1:0], discPort=0, order=6, intOrder=6, lastExchangeTime=1569147998604, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true] event-data-repository_1 | 2019-09-22 10:26:40.266 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Topology snapshot [ver=6, locNode=8651ce98, servers=1, clients=5, state=ACTIVE, CPUs=54, offheap=14.0GB, heap=10.0GB] event-data-repository_1 | 2019-09-22 10:26:40.278 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=6, minorTopVer=0], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=false, crd=true, evt=NODE_JOINED, evtNode=836b6c75-15d2-4628-9969-b554f82434b4, customEvt=null, allowMerge=true] event-data-repository_1 | 2019-09-22 10:26:40.292 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=6, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=6, minorTopVer=0], err=null] event-data-repository_1 | 2019-09-22 10:26:40.320 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=6, minorTopVer=0], evt=NODE_JOINED, evtNode=TcpDiscoveryNode [id=836b6c75-15d2-4628-9969-b554f82434b4, addrs=[127.0.0.1, 172.30.0.10], sockAddrs=[fbb16eceff6e/172.30.0.10:0, /127.0.0.1:0], discPort=0, order=6, intOrder=6, lastExchangeTime=1569147998604, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true], done=true], topVer=AffinityTopologyVersion [topVer=6, minorTopVer=0], durationFromInit=55] event-data-repository_1 | 2019-09-22 10:26:40.326 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=6, minorTopVer=0], crd=true] event-data-repository_1 | 2019-09-22 10:26:40.364 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Added new node to topology: TcpDiscoveryNode [id=068b66e7-f766-40b2-bca6-524980e6f569, addrs=[127.0.0.1, 172.30.0.5], sockAddrs=[/127.0.0.1:0, 45729869a33a/172.30.0.5:0], discPort=0, order=7, intOrder=7, lastExchangeTime=1569147998750, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true] event-data-repository_1 | 2019-09-22 10:26:40.403 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Topology snapshot [ver=7, locNode=8651ce98, servers=1, clients=6, state=ACTIVE, CPUs=63, offheap=16.0GB, heap=11.0GB] event-data-repository_1 | 2019-09-22 10:26:40.412 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Added new node to topology: TcpDiscoveryNode [id=9aafa8bc-4fd4-4336-9791-6e3d1e49ef4f, addrs=[127.0.0.1, 172.30.0.13], sockAddrs=[/127.0.0.1:0, f11caf81c0b8/172.30.0.13:0], discPort=0, order=8, intOrder=8, lastExchangeTime=1569147998830, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true] event-data-repository_1 | 2019-09-22 10:26:40.414 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Topology snapshot [ver=8, locNode=8651ce98, servers=1, clients=7, state=ACTIVE, CPUs=72, offheap=19.0GB, heap=12.0GB] event-data-repository_1 | 2019-09-22 10:26:40.428 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (obsolete exchange ID) [top=AffinityTopologyVersion [topVer=6, minorTopVer=0], evt=NODE_JOINED, node=836b6c75-15d2-4628-9969-b554f82434b4] event-data-repository_1 | 2019-09-22 10:26:40.451 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=7, minorTopVer=0], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=false, crd=true, evt=NODE_JOINED, evtNode=068b66e7-f766-40b2-bca6-524980e6f569, customEvt=null, allowMerge=true] event-data-repository_1 | 2019-09-22 10:26:40.456 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=7, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=7, minorTopVer=0], err=null] event-data-repository_1 | 2019-09-22 10:26:40.498 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=7, minorTopVer=0], evt=NODE_JOINED, evtNode=TcpDiscoveryNode [id=068b66e7-f766-40b2-bca6-524980e6f569, addrs=[127.0.0.1, 172.30.0.5], sockAddrs=[/127.0.0.1:0, 45729869a33a/172.30.0.5:0], discPort=0, order=7, intOrder=7, lastExchangeTime=1569147998750, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true], done=true], topVer=AffinityTopologyVersion [topVer=7, minorTopVer=0], durationFromInit=34] event-data-repository_1 | 2019-09-22 10:26:40.506 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=7, minorTopVer=0], crd=true] event-data-repository_1 | 2019-09-22 10:26:40.545 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (obsolete exchange ID) [top=AffinityTopologyVersion [topVer=7, minorTopVer=0], evt=NODE_JOINED, node=068b66e7-f766-40b2-bca6-524980e6f569] event-data-repository_1 | 2019-09-22 10:26:40.563 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=8, minorTopVer=0], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=false, crd=true, evt=NODE_JOINED, evtNode=9aafa8bc-4fd4-4336-9791-6e3d1e49ef4f, customEvt=null, allowMerge=true] event-data-repository_1 | 2019-09-22 10:26:40.566 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=8, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=8, minorTopVer=0], err=null] event-data-repository_1 | 2019-09-22 10:26:40.578 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=8, minorTopVer=0], evt=NODE_JOINED, evtNode=TcpDiscoveryNode [id=9aafa8bc-4fd4-4336-9791-6e3d1e49ef4f, addrs=[127.0.0.1, 172.30.0.13], sockAddrs=[/127.0.0.1:0, f11caf81c0b8/172.30.0.13:0], discPort=0, order=8, intOrder=8, lastExchangeTime=1569147998830, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true], done=true], topVer=AffinityTopologyVersion [topVer=8, minorTopVer=0], durationFromInit=19] event-data-repository_1 | 2019-09-22 10:26:40.586 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=8, minorTopVer=0], crd=true] event-data-repository_1 | 2019-09-22 10:26:40.624 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (nothing scheduled) [top=AffinityTopologyVersion [topVer=8, minorTopVer=0], force=false, evt=NODE_JOINED, node=9aafa8bc-4fd4-4336-9791-6e3d1e49ef4f] event-data-repository_1 | 2019-09-22 10:26:40.772 WARN [main] CACHE - Ignite Cache: TemenosDfXSDEventCache created from cacheManager event-data-repository_1 | StartDataRepository-->Ignite Initialization. Got cache TemenosDfXSDEventCache from Ignite. event-data-repository_1 | 2019-09-22 10:26:40.774 WARN [main] GridEventStorageManager - Added listener for disabled event type: CACHE_REBALANCE_STOPPED event-data-repository_1 | 2019-09-22 10:26:40.775 WARN [main] GridEventStorageManager - Added listener for disabled event type: CACHE_REBALANCE_STARTED event-data-repository_1 | 2019-09-22 10:26:40.792 WARN [main] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:40.955 WARN [main] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:41.293 INFO [grid-nio-worker-tcp-comm-0-#26%TemenosGrid%] TcpCommunicationSpi - Accepted incoming communication connection [locAddr=/172.30.0.4:47100, rmtAddr=/172.30.0.5:35966] event-data-repository_1 | 2019-09-22 10:26:41.322 INFO [grid-nio-worker-tcp-comm-1-#27%TemenosGrid%] TcpCommunicationSpi - Accepted incoming communication connection [locAddr=/172.30.0.4:47100, rmtAddr=/172.30.0.13:44314] event-data-repository_1 | 2019-09-22 10:26:41.529 INFO [grid-nio-worker-tcp-comm-2-#28%TemenosGrid%] TcpCommunicationSpi - Accepted incoming communication connection [locAddr=/172.30.0.4:47100, rmtAddr=/172.30.0.10:33806] event-data-repository_1 | 2019-09-22 10:26:42.036 WARN [main] CACHE - Ignite Cache: TemenosDfEventCache created from cacheManager event-data-repository_1 | StartDataRepository-->Ignite Initialization. Got cache TemenosDfEventCache from Ignite. event-data-repository_1 | StartDataRepository-->Ignite Initialization finished. event-data-repository_1 | [10:26:42] New version is available at ignite.apache.org: 2.7.6 event-data-repository_1 | 2019-09-22 10:26:42.686 WARN [ignite-update-notifier-timer] GridUpdateNotifier - New version is available at ignite.apache.org: 2.7.6 event-data-repository_1 | 2019-09-22 10:26:49.115 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery accepted incoming connection [rmtAddr=/172.30.0.11, rmtPort=40113] event-data-repository_1 | 2019-09-22 10:26:49.117 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery spawning a new thread for connection [rmtAddr=/172.30.0.11, rmtPort=40113] event-data-repository_1 | 2019-09-22 10:26:49.118 INFO [tcp-disco-sock-reader-#18%TemenosGrid%] TcpDiscoverySpi - Started serving remote node connection [rmtAddr=/172.30.0.11:40113, rmtPort=40113] event-data-repository_1 | 2019-09-22 10:26:49.150 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:49.200 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - IgniteHome=null Gridname=TemenosGrid event-data-repository_1 | 2019-09-22 10:26:49.217 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Added new node to topology: TcpDiscoveryNode [id=e3cc16e4-3f27-4308-a2bd-0ac76d2f77df, addrs=[127.0.0.1, 172.30.0.11], sockAddrs=[3b8c20511cbc/172.30.0.11:0, /127.0.0.1:0], discPort=0, order=9, intOrder=9, lastExchangeTime=1569148009146, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true] event-data-repository_1 | 2019-09-22 10:26:49.221 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Topology snapshot [ver=9, locNode=8651ce98, servers=1, clients=8, state=ACTIVE, CPUs=81, offheap=21.0GB, heap=13.0GB] event-data-repository_1 | 2019-09-22 10:26:49.253 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=9, minorTopVer=0], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=false, crd=true, evt=NODE_JOINED, evtNode=e3cc16e4-3f27-4308-a2bd-0ac76d2f77df, customEvt=null, allowMerge=true] event-data-repository_1 | 2019-09-22 10:26:49.254 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=9, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=9, minorTopVer=0], err=null] event-data-repository_1 | 2019-09-22 10:26:49.257 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=9, minorTopVer=0], evt=NODE_JOINED, evtNode=TcpDiscoveryNode [id=e3cc16e4-3f27-4308-a2bd-0ac76d2f77df, addrs=[127.0.0.1, 172.30.0.11], sockAddrs=[3b8c20511cbc/172.30.0.11:0, /127.0.0.1:0], discPort=0, order=9, intOrder=9, lastExchangeTime=1569148009146, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true], done=true], topVer=AffinityTopologyVersion [topVer=9, minorTopVer=0], durationFromInit=10] event-data-repository_1 | 2019-09-22 10:26:49.257 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=9, minorTopVer=0], crd=true] event-data-repository_1 | 2019-09-22 10:26:49.293 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (nothing scheduled) [top=AffinityTopologyVersion [topVer=9, minorTopVer=0], force=false, evt=NODE_JOINED, node=e3cc16e4-3f27-4308-a2bd-0ac76d2f77df] event-data-repository_1 | 2019-09-22 10:26:49.309 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery accepted incoming connection [rmtAddr=/172.30.0.15, rmtPort=33101] event-data-repository_1 | 2019-09-22 10:26:49.310 INFO [tcp-disco-srvr-#3%TemenosGrid%] TcpDiscoverySpi - TCP discovery spawning a new thread for connection [rmtAddr=/172.30.0.15, rmtPort=33101] event-data-repository_1 | 2019-09-22 10:26:49.311 INFO [tcp-disco-sock-reader-#20%TemenosGrid%] TcpDiscoverySpi - Started serving remote node connection [rmtAddr=/172.30.0.15:33101, rmtPort=33101] event-data-repository_1 | 2019-09-22 10:26:49.449 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - Failed in IgniteFactory::getIgniteConfiguration with file: /app//conf/cache/ignite-cache.xml trying classpath event-data-repository_1 | 2019-09-22 10:26:49.494 WARN [tcp-disco-msg-worker-#2%TemenosGrid%] CACHE - IgniteHome=null Gridname=TemenosGrid event-data-repository_1 | 2019-09-22 10:26:49.544 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Added new node to topology: TcpDiscoveryNode [id=1b00378f-305e-48d7-8334-1fe568b37bf8, addrs=[127.0.0.1, 172.30.0.15], sockAddrs=[89234d316dbc/172.30.0.15:0, /127.0.0.1:0], discPort=0, order=10, intOrder=10, lastExchangeTime=1569148009440, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true] event-data-repository_1 | 2019-09-22 10:26:49.549 INFO [disco-event-worker-#44%TemenosGrid%] GridDiscoveryManager - Topology snapshot [ver=10, locNode=8651ce98, servers=1, clients=9, state=ACTIVE, CPUs=90, offheap=23.0GB, heap=14.0GB] event-data-repository_1 | 2019-09-22 10:26:49.564 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=0], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=false, crd=true, evt=NODE_JOINED, evtNode=1b00378f-305e-48d7-8334-1fe568b37bf8, customEvt=null, allowMerge=true] event-data-repository_1 | 2019-09-22 10:26:49.567 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=10, minorTopVer=0], resVer=AffinityTopologyVersion [topVer=10, minorTopVer=0], err=null] event-data-repository_1 | 2019-09-22 10:26:49.573 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=0], evt=NODE_JOINED, evtNode=TcpDiscoveryNode [id=1b00378f-305e-48d7-8334-1fe568b37bf8, addrs=[127.0.0.1, 172.30.0.15], sockAddrs=[89234d316dbc/172.30.0.15:0, /127.0.0.1:0], discPort=0, order=10, intOrder=10, lastExchangeTime=1569148009440, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true], done=true], topVer=AffinityTopologyVersion [topVer=10, minorTopVer=0], durationFromInit=10] event-data-repository_1 | 2019-09-22 10:26:49.578 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=0], crd=true] event-data-repository_1 | 2019-09-22 10:26:49.599 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (nothing scheduled) [top=AffinityTopologyVersion [topVer=10, minorTopVer=0], force=false, evt=NODE_JOINED, node=1b00378f-305e-48d7-8334-1fe568b37bf8] event-data-repository_1 | 2019-09-22 10:26:49.835 INFO [grid-nio-worker-tcp-comm-3-#29%TemenosGrid%] TcpCommunicationSpi - Accepted incoming communication connection [locAddr=/172.30.0.4:47100, rmtAddr=/172.30.0.11:53236] event-data-repository_1 | 2019-09-22 10:26:50.038 INFO [grid-nio-worker-tcp-comm-0-#26%TemenosGrid%] TcpCommunicationSpi - Accepted incoming communication connection [locAddr=/172.30.0.4:47100, rmtAddr=/172.30.0.15:34116] event-data-repository_1 | 2019-09-22 10:26:50.356 INFO [exchange-worker-#45%TemenosGrid%] time - Started exchange init [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=1], mvccCrd=MvccCoordinator [nodeId=8651ce98-6d79-483a-8758-af6642cbb189, crdVer=1569147992330, topVer=AffinityTopologyVersion [topVer=1, minorTopVer=0]], mvccCrdChange=false, crd=true, evt=DISCOVERY_CUSTOM_EVT, evtNode=e3cc16e4-3f27-4308-a2bd-0ac76d2f77df, customEvt=DynamicCacheChangeBatch [id=a8d02885d61-ef68a681-4feb-44f7-ac6c-f79d838564e2, reqs=[DynamicCacheChangeRequest [cacheName=F_DATA_EVENTS, hasCfg=true, nodeId=e3cc16e4-3f27-4308-a2bd-0ac76d2f77df, clientStartOnly=false, stop=false, destroy=false, disabledAfterStartfalse]], exchangeActions=ExchangeActions [startCaches=[F_DATA_EVENTS], stopCaches=null, startGrps=[F_DATA_EVENTS], stopGrps=[], resetParts=null, stateChangeRequest=null], startCaches=false], allowMerge=false] event-data-repository_1 | 2019-09-22 10:26:50.358 INFO [exchange-worker-#45%TemenosGrid%] CacheAffinitySharedManager - Updating caches registry performed in 1 ms. event-data-repository_1 | 2019-09-22 10:26:50.388 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Started cache [name=F_DATA_EVENTS, id=-925827883, memoryPolicyName=default, mode=PARTITIONED, atomicity=ATOMIC, backups=0, mvcc=false], encryptionEnabled=false] event-data-repository_1 | 2019-09-22 10:26:50.403 INFO [exchange-worker-#45%TemenosGrid%] CacheAffinitySharedManager - Caches starting performed in 36 ms. event-data-repository_1 | 2019-09-22 10:26:50.409 INFO [exchange-worker-#45%TemenosGrid%] CacheAffinitySharedManager - Affinity initialization for started caches performed in 4 ms. event-data-repository_1 | 2019-09-22 10:26:50.463 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finished waiting for partition release future [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=1], waitTime=0ms, futInfo=NA, mode=DISTRIBUTED] event-data-repository_1 | 2019-09-22 10:26:50.473 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finished waiting for partitions release latch: ServerLatch [permits=0, pendingAcks=[], super=CompletableLatch [id=exchange, topVer=AffinityTopologyVersion [topVer=10, minorTopVer=1]]] event-data-repository_1 | 2019-09-22 10:26:50.486 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finished waiting for partition release future [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=1], waitTime=0ms, futInfo=NA, mode=LOCAL] event-data-repository_1 | 2019-09-22 10:26:50.583 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - finishExchangeOnCoordinator [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=1], resVer=AffinityTopologyVersion [topVer=10, minorTopVer=1]] event-data-repository_1 | 2019-09-22 10:26:50.584 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Affinity changes (coordinator) applied in 0 ms. event-data-repository_1 | 2019-09-22 10:26:50.587 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Partitions validation performed in 2 ms. event-data-repository_1 | 2019-09-22 10:26:50.593 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Partitions assignment performed in 5 ms. event-data-repository_1 | 2019-09-22 10:26:50.638 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Preparing Full Message performed in 38 ms. event-data-repository_1 | 2019-09-22 10:26:50.639 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Sending Full Message to all nodes performed in 0 ms. event-data-repository_1 | 2019-09-22 10:26:50.640 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Finish exchange future [startVer=AffinityTopologyVersion [topVer=10, minorTopVer=1], resVer=AffinityTopologyVersion [topVer=10, minorTopVer=1], err=null] event-data-repository_1 | 2019-09-22 10:26:50.672 INFO [exchange-worker-#45%TemenosGrid%] GridCacheProcessor - Finish proxy initialization, cacheName=F_DATA_EVENTS, localNodeId=8651ce98-6d79-483a-8758-af6642cbb189 event-data-repository_1 | 2019-09-22 10:26:50.679 INFO [exchange-worker-#45%TemenosGrid%] GridDhtPartitionsExchangeFuture - Completed partition exchange [localNode=8651ce98-6d79-483a-8758-af6642cbb189, exchange=GridDhtPartitionsExchangeFuture [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=1], evt=DISCOVERY_CUSTOM_EVT, evtNode=TcpDiscoveryNode [id=e3cc16e4-3f27-4308-a2bd-0ac76d2f77df, addrs=[127.0.0.1, 172.30.0.11], sockAddrs=[3b8c20511cbc/172.30.0.11:0, /127.0.0.1:0], discPort=0, order=9, intOrder=9, lastExchangeTime=1569148009146, loc=false, ver=2.7.5#20190603-sha1:be4f2a15, isClient=true], done=true], topVer=AffinityTopologyVersion [topVer=10, minorTopVer=1], durationFromInit=324] event-data-repository_1 | 2019-09-22 10:26:50.689 INFO [exchange-worker-#45%TemenosGrid%] time - Finished exchange init [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=1], crd=true] event-data-repository_1 | 2019-09-22 10:26:50.713 INFO [sys-#99%TemenosGrid%] GridCachePartitionExchangeManager - Fast replied to single message [exchId=GridDhtPartitionExchangeId [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=1], discoEvt=null, nodeId=e3cc16e4, evt=DISCOVERY_CUSTOM_EVT], nodeId=1b00378f-305e-48d7-8334-1fe568b37bf8] event-data-repository_1 | 2019-09-22 10:26:50.834 INFO [exchange-worker-#45%TemenosGrid%] GridCachePartitionExchangeManager - Skipping rebalancing (nothing scheduled) [top=AffinityTopologyVersion [topVer=10, minorTopVer=1], force=false, evt=DISCOVERY_CUSTOM_EVT, node=e3cc16e4-3f27-4308-a2bd-0ac76d2f77df] event-data-repository_1 | 2019-09-22 10:27:37.252 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:01:00.009] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=0.97%, avg=2.91%, GC=0%] event-data-repository_1 | ^-- PageMemory [pages=16665] event-data-repository_1 | ^-- Heap [used=138MB, free=95.49%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=65MB, free=97.47%, comm=436MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.18%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=48MB, free=87.93%, comm=100MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=9, qSize=0] event-data-repository_1 | 2019-09-22 10:27:37.253 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=3, reusePages=0] event-data-repository_1 | 2019-09-22 10:28:37.263 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:02:00.017] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=0.7%, avg=1.87%, GC=0%] event-data-repository_1 | ^-- PageMemory [pages=16753] event-data-repository_1 | ^-- Heap [used=159MB, free=94.81%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=65MB, free=97.46%, comm=436MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=48MB, free=87.93%, comm=100MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:28:37.263 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=6, reusePages=0] event-data-repository_1 | 2019-09-22 10:29:21.723 INFO [sys-stripe-1-#2%TemenosGrid%] GridDeploymentLocalStore - Class locally deployed: class com.temenos.tafj.cache.impl.FDataEventProcessedTimeEntryProcessor event-data-repository_1 | 2019-09-22 10:29:37.263 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:03:00.017] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=7.67%, avg=2.47%, GC=0%] event-data-repository_1 | ^-- PageMemory [pages=17552] event-data-repository_1 | ^-- Heap [used=163MB, free=94.68%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=68MB, free=97.34%, comm=436MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=51MB, free=87.15%, comm=100MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=9, qSize=0] event-data-repository_1 | 2019-09-22 10:29:37.263 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:29:37.263 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=520, reusePages=0] event-data-repository_1 | 2019-09-22 10:30:30.008 INFO [sys-stripe-0-#1%TemenosGrid%] PageMemoryNoStoreImpl - Allocated next memory segment [plcName=1G_Region, chunkSize=268.4 MB] event-data-repository_1 | 2019-09-22 10:30:37.261 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:04:00.017] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=25.93%, avg=4.74%, GC=0.03%] event-data-repository_1 | ^-- PageMemory [pages=34153] event-data-repository_1 | ^-- Heap [used=311MB, free=89.87%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=134MB, free=94.82%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=116MB, free=70.85%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=8, qSize=0] event-data-repository_1 | 2019-09-22 10:30:37.263 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:30:37.263 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=13493, reusePages=0] event-data-repository_1 | 2019-09-22 10:31:37.267 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:05:00.023] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=18.27%, avg=7.55%, GC=0.2%] event-data-repository_1 | ^-- PageMemory [pages=63217] event-data-repository_1 | ^-- Heap [used=123MB, free=95.97%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=248MB, free=90.42%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=230MB, free=42.3%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:31:37.268 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:31:37.268 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=36483, reusePages=0] event-data-repository_1 | 2019-09-22 10:31:43.296 WARN [sys-stripe-7-#8%TemenosGrid%] IgniteCacheDatabaseSharedManager - Page-based evictions started. Consider increasing 'maxSize' on Data Region configuration: 1G_Region event-data-repository_1 | 2019-09-22 10:32:37.271 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:06:00.023] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=16.63%, avg=8.51%, GC=0.07%] event-data-repository_1 | ^-- PageMemory [pages=70009] event-data-repository_1 | ^-- Heap [used=387MB, free=87.39%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=275MB, free=89.39%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=257MB, free=35.63%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:32:37.272 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:32:37.273 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=41243, reusePages=0] event-data-repository_1 | 2019-09-22 10:33:37.283 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:07:00.031] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=13.57%, avg=9.44%, GC=0.03%] event-data-repository_1 | ^-- PageMemory [pages=76713] event-data-repository_1 | ^-- Heap [used=278MB, free=90.95%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=301MB, free=88.37%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=283MB, free=29.04%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:33:37.293 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:33:37.293 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=47708, reusePages=99] event-data-repository_1 | 2019-09-22 10:34:37.303 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:08:00.056] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=4.6%, avg=9.87%, GC=0%] event-data-repository_1 | ^-- PageMemory [pages=82997] event-data-repository_1 | ^-- Heap [used=406MB, free=86.75%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=326MB, free=87.42%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=308MB, free=22.87%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:34:37.310 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:34:37.310 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=52786, reusePages=96] event-data-repository_1 | 2019-09-22 10:35:37.319 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:09:00.074] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=0.6%, avg=8.84%, GC=0%] event-data-repository_1 | ^-- PageMemory [pages=82998] event-data-repository_1 | ^-- Heap [used=176MB, free=94.24%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=326MB, free=87.42%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=308MB, free=22.87%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:35:37.319 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:35:37.319 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=52786, reusePages=96] event-data-repository_1 | 2019-09-22 10:36:37.325 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:10:00.079] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=1.07%, avg=8.73%, GC=0%] event-data-repository_1 | ^-- PageMemory [pages=84790] event-data-repository_1 | ^-- Heap [used=242MB, free=92.11%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=333MB, free=87.15%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=315MB, free=21.11%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:36:37.326 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:36:37.326 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=53924, reusePages=0] event-data-repository_1 | 2019-09-22 10:37:37.331 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:11:00.085] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=0.33%, avg=7.88%, GC=0%] event-data-repository_1 | ^-- PageMemory [pages=84790] event-data-repository_1 | ^-- Heap [used=269MB, free=91.23%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=333MB, free=87.15%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=315MB, free=21.11%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:37:37.331 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:37:37.332 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=53924, reusePages=0] event-data-repository_1 | 2019-09-22 10:38:37.338 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:12:00.093] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=0.37%, avg=7.2%, GC=0%] event-data-repository_1 | ^-- PageMemory [pages=84790] event-data-repository_1 | ^-- Heap [used=296MB, free=90.35%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=333MB, free=87.15%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=315MB, free=21.11%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:38:37.338 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:38:37.338 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=53924, reusePages=0] event-data-repository_1 | 2019-09-22 10:39:37.338 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:13:00.093] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=0.33%, avg=6.64%, GC=0%] event-data-repository_1 | ^-- PageMemory [pages=84790] event-data-repository_1 | ^-- Heap [used=323MB, free=89.48%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=333MB, free=87.15%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=315MB, free=21.11%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:39:37.339 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:39:37.339 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=53924, reusePages=0] event-data-repository_1 | 2019-09-22 10:40:37.343 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:14:00.098] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=3.73%, avg=6.28%, GC=0%] event-data-repository_1 | ^-- PageMemory [pages=84790] event-data-repository_1 | ^-- Heap [used=288MB, free=90.6%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=333MB, free=87.15%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=315MB, free=21.11%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:40:37.343 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:40:37.343 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=53626, reusePages=88] event-data-repository_1 | 2019-09-22 10:41:37.349 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:15:00.103] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=25.17%, avg=6.6%, GC=0.03%] event-data-repository_1 | ^-- PageMemory [pages=87072] event-data-repository_1 | ^-- Heap [used=445MB, free=85.51%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=342MB, free=86.8%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=324MB, free=18.87%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:41:37.349 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:41:37.350 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=54349, reusePages=42] event-data-repository_1 | 2019-09-22 10:42:37.350 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:16:00.106] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=18.33%, avg=7.37%, GC=0.03%] event-data-repository_1 | ^-- PageMemory [pages=91644] event-data-repository_1 | ^-- Heap [used=311MB, free=89.85%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=360MB, free=86.11%, comm=692MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=342MB, free=14.38%, comm=356MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:42:37.350 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:42:37.350 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=57493, reusePages=16] event-data-repository_1 | 2019-09-22 10:43:28.386 INFO [sys-stripe-7-#8%TemenosGrid%] PageMemoryNoStoreImpl - Allocated next memory segment [plcName=1G_Region, chunkSize=46.1 MB] event-data-repository_1 | 2019-09-22 10:43:37.351 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:17:00.106] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=17.57%, avg=7.83%, GC=0.03%] event-data-repository_1 | ^-- PageMemory [pages=96406] event-data-repository_1 | ^-- Heap [used=319MB, free=89.61%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=378MB, free=85.38%, comm=736MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=361MB, free=9.7%, comm=400MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:43:37.351 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:43:37.351 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=61744, reusePages=74] event-data-repository_1 | 2019-09-22 10:44:37.355 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - event-data-repository_1 | Metrics for local node (to disable set 'metricsLogFrequency' to 0) event-data-repository_1 | ^-- Node [id=8651ce98, name=TemenosGrid, uptime=00:18:00.112] event-data-repository_1 | ^-- H/N/C [hosts=10, nodes=10, CPUs=90] event-data-repository_1 | ^-- CPU [cur=15.27%, avg=8.27%, GC=0.07%] event-data-repository_1 | ^-- PageMemory [pages=104464] event-data-repository_1 | ^-- Heap [used=342MB, free=88.84%, comm=512MB] event-data-repository_1 | ^-- Off-heap [used=410MB, free=84.16%, comm=736MB] event-data-repository_1 | ^-- sysMemPlc region [used=0MB, free=99.21%, comm=40MB] event-data-repository_1 | ^-- default region [used=16MB, free=99.16%, comm=256MB] event-data-repository_1 | ^-- 1G_Region region [used=392MB, free=1.78%, comm=400MB] event-data-repository_1 | ^-- TxLog region [used=0MB, free=100%, comm=40MB] event-data-repository_1 | ^-- Outbound messages queue [size=0] event-data-repository_1 | ^-- Public thread pool [active=0, idle=0, qSize=0] event-data-repository_1 | ^-- System thread pool [active=0, idle=6, qSize=0] event-data-repository_1 | 2019-09-22 10:44:37.356 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=8, reusePages=0] event-data-repository_1 | 2019-09-22 10:44:37.356 INFO [grid-timeout-worker-#25%TemenosGrid%] IgniteKernal%TemenosGrid - FreeList [name=TemenosGrid, buckets=256, dataPages=69540, reusePages=0] event-data-repository_1 | 2019-09-22 10:44:50.980 ERROR [sys-stripe-8-#9%TemenosGrid%] - Critical system error detected. Will be handled accordingly to configured handler [hnd=StopNodeOrHaltFailureHandler [tryStop=false, timeout=0, super=AbstractFailureHandler [ignoredFailureTypes=[SYSTEM_WORKER_BLOCKED, SYSTEM_CRITICAL_OPERATION_TIMEOUT]]], failureCtx=FailureContext [type=CRITICAL_ERROR, err=class o.a.i.i.mem.IgniteOutOfMemoryException: Out of memory in data region [name=1G_Region, initSize=100.0 MiB, maxSize=400.0 MiB, persistenceEnabled=false] Try the following: event-data-repository_1 | ^-- Increase maximum off-heap memory size (DataRegionConfiguration.maxSize) event-data-repository_1 | ^-- Enable Ignite persistence (DataRegionConfiguration.persistenceEnabled) event-data-repository_1 | ^-- Enable eviction or expiration policies]] event-data-repository_1 | org.apache.ignite.internal.mem.IgniteOutOfMemoryException: Out of memory in data region [name=1G_Region, initSize=100.0 MiB, maxSize=400.0 MiB, persistenceEnabled=false] Try the following: event-data-repository_1 | ^-- Increase maximum off-heap memory size (DataRegionConfiguration.maxSize) event-data-repository_1 | ^-- Enable Ignite persistence (DataRegionConfiguration.persistenceEnabled) event-data-repository_1 | ^-- Enable eviction or expiration policies event-data-repository_1 | at org.apache.ignite.internal.pagemem.impl.PageMemoryNoStoreImpl.allocatePage(PageMemoryNoStoreImpl.java:314) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.persistence.freelist.AbstractFreeList.allocateDataPage(AbstractFreeList.java:464) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.persistence.freelist.AbstractFreeList.insertDataRow(AbstractFreeList.java:491) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.persistence.freelist.CacheFreeListImpl.insertDataRow(CacheFreeListImpl.java:59) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.persistence.freelist.CacheFreeListImpl.insertDataRow(CacheFreeListImpl.java:35) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.persistence.RowStore.addRow(RowStore.java:98) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.createRow(IgniteCacheOffheapManagerImpl.java:1691) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheMapEntry$UpdateClosure.call(GridCacheMapEntry.java:5701) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheMapEntry$UpdateClosure.call(GridCacheMapEntry.java:5643) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.persistence.tree.BPlusTree$Invoke.invokeClosure(BPlusTree.java:3719) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.persistence.tree.BPlusTree$Invoke.access$5900(BPlusTree.java:3613) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.persistence.tree.BPlusTree.invokeDown(BPlusTree.java:1895) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.persistence.tree.BPlusTree.invoke(BPlusTree.java:1779) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.invoke0(IgniteCacheOffheapManagerImpl.java:1638) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.invoke(IgniteCacheOffheapManagerImpl.java:1621) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.IgniteCacheOffheapManagerImpl.invoke(IgniteCacheOffheapManagerImpl.java:428) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheMapEntry.storeValue(GridCacheMapEntry.java:4248) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheMapEntry.storeValue(GridCacheMapEntry.java:4226) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheMapEntry.innerGet0(GridCacheMapEntry.java:888) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheMapEntry.innerGet(GridCacheMapEntry.java:600) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateWithBatch(GridDhtAtomicCache.java:2095) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.update(GridDhtAtomicCache.java:1935) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateAllAsyncInternal0(GridDhtAtomicCache.java:1780) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateAllAsyncInternal(GridDhtAtomicCache.java:1668) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.processNearAtomicUpdateRequest(GridDhtAtomicCache.java:3138) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.access$400(GridDhtAtomicCache.java:135) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicCache$5.apply(GridDhtAtomicCache.java:271) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicCache$5.apply(GridDhtAtomicCache.java:266) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheIoManager.processMessage(GridCacheIoManager.java:1056) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheIoManager.onMessage0(GridCacheIoManager.java:581) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheIoManager.handleMessage(GridCacheIoManager.java:380) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheIoManager.handleMessage(GridCacheIoManager.java:306) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheIoManager.access$100(GridCacheIoManager.java:101) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.processors.cache.GridCacheIoManager$1.onMessage(GridCacheIoManager.java:295) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.managers.communication.GridIoManager.invokeListener(GridIoManager.java:1569) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.managers.communication.GridIoManager.processRegularMessage0(GridIoManager.java:1197) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.managers.communication.GridIoManager.access$4200(GridIoManager.java:127) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.managers.communication.GridIoManager$9.run(GridIoManager.java:1093) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.util.StripedExecutor$Stripe.body(StripedExecutor.java:505) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at org.apache.ignite.internal.util.worker.GridWorker.run(GridWorker.java:120) [ignite-core-2.7.5.jar:2.7.5] event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) [?:1.8.0_151] event-data-repository_1 | 2019-09-22 10:44:51.060 WARN [sys-stripe-8-#9%TemenosGrid%] FailureProcessor - No deadlocked threads detected. event-data-repository_1 | 2019-09-22 10:44:55.134 WARN [jvm-pause-detector-worker] IgniteKernal%TemenosGrid - Possible too long JVM pause: 4043 milliseconds. event-data-repository_1 | 2019-09-22 10:44:55.308 WARN [sys-stripe-8-#9%TemenosGrid%] FailureProcessor - Thread dump at 2019/09/22 10:44:55 UTC event-data-repository_1 | Thread [name="sys-#218%TemenosGrid%", id=442, state=TIMED_WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@4e10a94d, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection118-thread-1", id=441, state=TIMED_WAITING, blockCnt=0, waitCnt=7] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@6796dcd4, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-#217%TemenosGrid%", id=439, state=TIMED_WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@4e10a94d, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection116-thread-1", id=438, state=TIMED_WAITING, blockCnt=0, waitCnt=10] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@520f15a, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection114-thread-1", id=436, state=TIMED_WAITING, blockCnt=0, waitCnt=14] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@34fefbd1, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection112-thread-1", id=434, state=TIMED_WAITING, blockCnt=0, waitCnt=14] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@1c095f57, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection110-thread-1", id=432, state=TIMED_WAITING, blockCnt=0, waitCnt=12] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@4d28407e, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection108-thread-1", id=430, state=TIMED_WAITING, blockCnt=0, waitCnt=18] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@4f00d2a0, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-#216%TemenosGrid%", id=428, state=TIMED_WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@4e10a94d, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-#215%TemenosGrid%", id=427, state=TIMED_WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@4e10a94d, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-#214%TemenosGrid%", id=426, state=TIMED_WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@4e10a94d, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-#213%TemenosGrid%", id=425, state=TIMED_WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@4e10a94d, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection10-thread-4", id=415, state=TIMED_WAITING, blockCnt=0, waitCnt=40994] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@4198a25e, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection84-thread-3", id=406, state=TIMED_WAITING, blockCnt=0, waitCnt=192] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@ce388e2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection90-thread-3", id=405, state=TIMED_WAITING, blockCnt=0, waitCnt=192] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@5ac68dce, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection16-thread-4", id=404, state=TIMED_WAITING, blockCnt=0, waitCnt=192] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@55621d58, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection104-thread-3", id=403, state=TIMED_WAITING, blockCnt=0, waitCnt=192] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@1cbe4086, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection14-thread-4", id=402, state=TIMED_WAITING, blockCnt=0, waitCnt=188] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@c8a6f6b, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection82-thread-3", id=401, state=TIMED_WAITING, blockCnt=0, waitCnt=192] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@624c3bc9, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection106-thread-3", id=400, state=TIMED_WAITING, blockCnt=0, waitCnt=190] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@27016205, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection12-thread-3", id=399, state=TIMED_WAITING, blockCnt=0, waitCnt=190] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@49d5bc13, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection96-thread-3", id=398, state=TIMED_WAITING, blockCnt=0, waitCnt=192] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@167b6375, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection86-thread-3", id=397, state=TIMED_WAITING, blockCnt=0, waitCnt=192] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@15ea9000, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection102-thread-3", id=396, state=TIMED_WAITING, blockCnt=0, waitCnt=190] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@3957d9f2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection100-thread-3", id=395, state=TIMED_WAITING, blockCnt=0, waitCnt=190] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@5d06cfef, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection20-thread-3", id=394, state=TIMED_WAITING, blockCnt=0, waitCnt=188] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@102b6ff1, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection94-thread-3", id=393, state=TIMED_WAITING, blockCnt=0, waitCnt=192] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@51809764, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection88-thread-3", id=392, state=TIMED_WAITING, blockCnt=0, waitCnt=192] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@569f5cef, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection98-thread-3", id=391, state=TIMED_WAITING, blockCnt=0, waitCnt=190] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@3d8d9864, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection18-thread-3", id=390, state=TIMED_WAITING, blockCnt=0, waitCnt=190] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@76922cc3, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection92-thread-3", id=389, state=TIMED_WAITING, blockCnt=0, waitCnt=194] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@237caec2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection6-thread-3", id=384, state=TIMED_WAITING, blockCnt=0, waitCnt=75258] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@2929129a, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection4-thread-3", id=383, state=TIMED_WAITING, blockCnt=0, waitCnt=75397] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@62b89c33, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection8-thread-4", id=381, state=TIMED_WAITING, blockCnt=0, waitCnt=75300] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@8bc1e31, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jdbc-connection2-thread-3", id=380, state=TIMED_WAITING, blockCnt=0, waitCnt=75403] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@66364868, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="RMI TCP Connection(18)-172.30.0.1", id=370, state=RUNNABLE, blockCnt=0, waitCnt=5] event-data-repository_1 | at java.net.SocketInputStream.socketRead0(Native Method) event-data-repository_1 | at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:171) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:141) event-data-repository_1 | at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) event-data-repository_1 | at java.io.BufferedInputStream.read(BufferedInputStream.java:265) event-data-repository_1 | - locked java.io.BufferedInputStream@fa8e666 event-data-repository_1 | at java.io.FilterInputStream.read(FilterInputStream.java:83) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport.handleMessages(TCPTransport.java:550) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.run0(TCPTransport.java:826) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.lambda$run$0(TCPTransport.java:683) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler$$Lambda$116/934611515.run(Unknown Source) event-data-repository_1 | at java.security.AccessController.doPrivileged(Native Method) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.run(TCPTransport.java:682) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Locked synchronizers: event-data-repository_1 | java.util.concurrent.ThreadPoolExecutor$Worker@424bcf34 event-data-repository_1 | Thread [name="RMI TCP Connection(idle)", id=255, state=TIMED_WAITING, blockCnt=10, waitCnt=13] event-data-repository_1 | Lock [object=java.util.concurrent.SynchronousQueue$TransferStack@5eb7114f, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) event-data-repository_1 | at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) event-data-repository_1 | at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="JMX server connection timeout 254", id=254, state=TIMED_WAITING, blockCnt=325, waitCnt=326] event-data-repository_1 | Lock [object=[I@66e5e115, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at com.sun.jmx.remote.internal.ServerCommunicatorAdmin$Timeout.run(ServerCommunicatorAdmin.java:168) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="RMI TCP Connection(2)-172.30.0.1", id=253, state=TIMED_WAITING, blockCnt=133, waitCnt=139] event-data-repository_1 | Lock [object=com.sun.jmx.remote.internal.ArrayNotificationBuffer@4229a5d4, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at com.sun.jmx.remote.internal.ArrayNotificationBuffer.fetchNotifications(ArrayNotificationBuffer.java:449) event-data-repository_1 | at com.sun.jmx.remote.internal.ArrayNotificationBuffer$ShareBuffer.fetchNotifications(ArrayNotificationBuffer.java:227) event-data-repository_1 | at com.sun.jmx.remote.internal.ServerNotifForwarder.fetchNotifs(ServerNotifForwarder.java:274) event-data-repository_1 | at javax.management.remote.rmi.RMIConnectionImpl$4.run(RMIConnectionImpl.java:1270) event-data-repository_1 | at javax.management.remote.rmi.RMIConnectionImpl$4.run(RMIConnectionImpl.java:1268) event-data-repository_1 | at javax.management.remote.rmi.RMIConnectionImpl.fetchNotifications(RMIConnectionImpl.java:1274) event-data-repository_1 | at sun.reflect.GeneratedMethodAccessor172.invoke(Unknown Source) event-data-repository_1 | at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) event-data-repository_1 | at java.lang.reflect.Method.invoke(Method.java:498) event-data-repository_1 | at sun.rmi.server.UnicastServerRef.dispatch(UnicastServerRef.java:357) event-data-repository_1 | at sun.rmi.transport.Transport$1.run(Transport.java:200) event-data-repository_1 | at sun.rmi.transport.Transport$1.run(Transport.java:197) event-data-repository_1 | at java.security.AccessController.doPrivileged(Native Method) event-data-repository_1 | at sun.rmi.transport.Transport.serviceCall(Transport.java:196) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport.handleMessages(TCPTransport.java:568) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.run0(TCPTransport.java:826) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.lambda$run$0(TCPTransport.java:683) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler$$Lambda$116/934611515.run(Unknown Source) event-data-repository_1 | at java.security.AccessController.doPrivileged(Native Method) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.run(TCPTransport.java:682) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Locked synchronizers: event-data-repository_1 | java.util.concurrent.ThreadPoolExecutor$Worker@3c885a3a event-data-repository_1 | Thread [name="RMI Scheduler(0)", id=252, state=TIMED_WAITING, blockCnt=0, waitCnt=6] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@61b81185, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1093) event-data-repository_1 | at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:809) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1074) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="ttl-cleanup-worker-#109%TemenosGrid%", id=228, state=TIMED_WAITING, blockCnt=0, waitCnt=2159] event-data-repository_1 | at java.lang.Thread.sleep(Native Method) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.sleep(IgniteUtils.java:7775) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheSharedTtlCleanupManager$CleanupWorker.body(GridCacheSharedTtlCleanupManager.java:149) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-client-message-worker-#21%TemenosGrid%", id=226, state=TIMED_WAITING, blockCnt=0, waitCnt=1463] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@6fcff183, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorker.body(ServerImpl.java:7183) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorkerThread.body(ServerImpl.java:7119) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-sock-reader-#20%TemenosGrid%", id=225, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.SocketInputStream.socketRead0(Native Method) event-data-repository_1 | at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:171) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:141) event-data-repository_1 | at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) event-data-repository_1 | at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) event-data-repository_1 | at java.io.BufferedInputStream.read(BufferedInputStream.java:345) event-data-repository_1 | - locked java.io.BufferedInputStream@39a4da6e event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerInputStreamWrapper.read(JdkMarshallerInputStreamWrapper.java:53) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.read(ObjectInputStream.java:2657) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2673) event-data-repository_1 | at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:3150) event-data-repository_1 | at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:859) event-data-repository_1 | at java.io.ObjectInputStream.(ObjectInputStream.java:355) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerObjectInputStream.(JdkMarshallerObjectInputStream.java:43) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshaller.unmarshal0(JdkMarshaller.java:137) event-data-repository_1 | at o.a.i.marshaller.AbstractNodeNameAwareMarshaller.unmarshal(AbstractNodeNameAwareMarshaller.java:94) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.unmarshal(IgniteUtils.java:10086) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$SocketReader.body(ServerImpl.java:6246) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-client-message-worker-#19%TemenosGrid%", id=223, state=TIMED_WAITING, blockCnt=0, waitCnt=1478] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@4930a14d, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorker.body(ServerImpl.java:7183) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorkerThread.body(ServerImpl.java:7119) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-sock-reader-#18%TemenosGrid%", id=222, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.SocketInputStream.socketRead0(Native Method) event-data-repository_1 | at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:171) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:141) event-data-repository_1 | at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) event-data-repository_1 | at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) event-data-repository_1 | at java.io.BufferedInputStream.read(BufferedInputStream.java:345) event-data-repository_1 | - locked java.io.BufferedInputStream@7fe87926 event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerInputStreamWrapper.read(JdkMarshallerInputStreamWrapper.java:53) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.read(ObjectInputStream.java:2657) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2673) event-data-repository_1 | at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:3150) event-data-repository_1 | at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:859) event-data-repository_1 | at java.io.ObjectInputStream.(ObjectInputStream.java:355) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerObjectInputStream.(JdkMarshallerObjectInputStream.java:43) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshaller.unmarshal0(JdkMarshaller.java:137) event-data-repository_1 | at o.a.i.marshaller.AbstractNodeNameAwareMarshaller.unmarshal(AbstractNodeNameAwareMarshaller.java:94) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.unmarshal(IgniteUtils.java:10086) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$SocketReader.body(ServerImpl.java:6246) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="Timer-9", id=209, state=TIMED_WAITING, blockCnt=0, waitCnt=218] event-data-repository_1 | Lock [object=java.util.TaskQueue@fe87a67, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at java.util.TimerThread.mainLoop(Timer.java:552) event-data-repository_1 | at java.util.TimerThread.run(Timer.java:505) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-client-message-worker-#17%TemenosGrid%", id=186, state=TIMED_WAITING, blockCnt=0, waitCnt=1505] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@1fbbd1f0, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorker.body(ServerImpl.java:7183) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorkerThread.body(ServerImpl.java:7119) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-client-message-worker-#16%TemenosGrid%", id=182, state=TIMED_WAITING, blockCnt=0, waitCnt=1531] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@53cd2183, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorker.body(ServerImpl.java:7183) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorkerThread.body(ServerImpl.java:7119) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-sock-reader-#15%TemenosGrid%", id=181, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.SocketInputStream.socketRead0(Native Method) event-data-repository_1 | at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:171) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:141) event-data-repository_1 | at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) event-data-repository_1 | at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) event-data-repository_1 | at java.io.BufferedInputStream.read(BufferedInputStream.java:345) event-data-repository_1 | - locked java.io.BufferedInputStream@373b2b6b event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerInputStreamWrapper.read(JdkMarshallerInputStreamWrapper.java:53) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.read(ObjectInputStream.java:2657) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2673) event-data-repository_1 | at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:3150) event-data-repository_1 | at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:859) event-data-repository_1 | at java.io.ObjectInputStream.(ObjectInputStream.java:355) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerObjectInputStream.(JdkMarshallerObjectInputStream.java:43) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshaller.unmarshal0(JdkMarshaller.java:137) event-data-repository_1 | at o.a.i.marshaller.AbstractNodeNameAwareMarshaller.unmarshal(AbstractNodeNameAwareMarshaller.java:94) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.unmarshal(IgniteUtils.java:10086) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$SocketReader.body(ServerImpl.java:6246) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-sock-reader-#13%TemenosGrid%", id=179, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.SocketInputStream.socketRead0(Native Method) event-data-repository_1 | at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:171) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:141) event-data-repository_1 | at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) event-data-repository_1 | at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) event-data-repository_1 | at java.io.BufferedInputStream.read(BufferedInputStream.java:345) event-data-repository_1 | - locked java.io.BufferedInputStream@51e069aa event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerInputStreamWrapper.read(JdkMarshallerInputStreamWrapper.java:53) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.read(ObjectInputStream.java:2657) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2673) event-data-repository_1 | at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:3150) event-data-repository_1 | at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:859) event-data-repository_1 | at java.io.ObjectInputStream.(ObjectInputStream.java:355) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerObjectInputStream.(JdkMarshallerObjectInputStream.java:43) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshaller.unmarshal0(JdkMarshaller.java:137) event-data-repository_1 | at o.a.i.marshaller.AbstractNodeNameAwareMarshaller.unmarshal(AbstractNodeNameAwareMarshaller.java:94) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.unmarshal(IgniteUtils.java:10086) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$SocketReader.body(ServerImpl.java:6246) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-client-message-worker-#14%TemenosGrid%", id=180, state=TIMED_WAITING, blockCnt=0, waitCnt=1544] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@40e70f6c, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorker.body(ServerImpl.java:7183) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorkerThread.body(ServerImpl.java:7119) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-sock-reader-#12%TemenosGrid%", id=176, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.SocketInputStream.socketRead0(Native Method) event-data-repository_1 | at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:171) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:141) event-data-repository_1 | at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) event-data-repository_1 | at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) event-data-repository_1 | at java.io.BufferedInputStream.read(BufferedInputStream.java:345) event-data-repository_1 | - locked java.io.BufferedInputStream@4e08be52 event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerInputStreamWrapper.read(JdkMarshallerInputStreamWrapper.java:53) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.read(ObjectInputStream.java:2657) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2673) event-data-repository_1 | at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:3150) event-data-repository_1 | at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:859) event-data-repository_1 | at java.io.ObjectInputStream.(ObjectInputStream.java:355) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerObjectInputStream.(JdkMarshallerObjectInputStream.java:43) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshaller.unmarshal0(JdkMarshaller.java:137) event-data-repository_1 | at o.a.i.marshaller.AbstractNodeNameAwareMarshaller.unmarshal(AbstractNodeNameAwareMarshaller.java:94) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.unmarshal(IgniteUtils.java:10086) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$SocketReader.body(ServerImpl.java:6246) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="Timer-7", id=168, state=TIMED_WAITING, blockCnt=0, waitCnt=219] event-data-repository_1 | Lock [object=java.util.TaskQueue@c906e55, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at java.util.TimerThread.mainLoop(Timer.java:552) event-data-repository_1 | at java.util.TimerThread.run(Timer.java:505) event-data-repository_1 | event-data-repository_1 | Thread [name="session-timeout-worker-#36%TemenosGrid%", id=86, state=TIMED_WAITING, blockCnt=0, waitCnt=1094] event-data-repository_1 | at java.lang.Thread.sleep(Native Method) event-data-repository_1 | at o.a.i.i.processors.rest.GridRestProcessor$4.body(GridRestProcessor.java:495) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="srvc-deploy-#92%TemenosGrid%", id=158, state=WAITING, blockCnt=0, waitCnt=6] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@4d651a66, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) event-data-repository_1 | at java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:442) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1074) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) event-data-repository_1 | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-client-message-worker-#11%TemenosGrid%", id=156, state=TIMED_WAITING, blockCnt=0, waitCnt=1560] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@779eeb70, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorker.body(ServerImpl.java:7183) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorkerThread.body(ServerImpl.java:7119) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-sock-reader-#10%TemenosGrid%", id=154, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.SocketInputStream.socketRead0(Native Method) event-data-repository_1 | at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:171) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:141) event-data-repository_1 | at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) event-data-repository_1 | at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) event-data-repository_1 | at java.io.BufferedInputStream.read(BufferedInputStream.java:345) event-data-repository_1 | - locked java.io.BufferedInputStream@7295cffc event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerInputStreamWrapper.read(JdkMarshallerInputStreamWrapper.java:53) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.read(ObjectInputStream.java:2657) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2673) event-data-repository_1 | at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:3150) event-data-repository_1 | at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:859) event-data-repository_1 | at java.io.ObjectInputStream.(ObjectInputStream.java:355) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerObjectInputStream.(JdkMarshallerObjectInputStream.java:43) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshaller.unmarshal0(JdkMarshaller.java:137) event-data-repository_1 | at o.a.i.marshaller.AbstractNodeNameAwareMarshaller.unmarshal(AbstractNodeNameAwareMarshaller.java:94) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.unmarshal(IgniteUtils.java:10086) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$SocketReader.body(ServerImpl.java:6246) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-client-message-worker-#9%TemenosGrid%", id=153, state=TIMED_WAITING, blockCnt=0, waitCnt=1630] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@ffa1680, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorker.body(ServerImpl.java:7183) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorkerThread.body(ServerImpl.java:7119) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-sock-reader-#8%TemenosGrid%", id=150, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.SocketInputStream.socketRead0(Native Method) event-data-repository_1 | at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:171) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:141) event-data-repository_1 | at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) event-data-repository_1 | at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) event-data-repository_1 | at java.io.BufferedInputStream.read(BufferedInputStream.java:345) event-data-repository_1 | - locked java.io.BufferedInputStream@1c9bd2b6 event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerInputStreamWrapper.read(JdkMarshallerInputStreamWrapper.java:53) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.read(ObjectInputStream.java:2657) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2673) event-data-repository_1 | at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:3150) event-data-repository_1 | at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:859) event-data-repository_1 | at java.io.ObjectInputStream.(ObjectInputStream.java:355) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerObjectInputStream.(JdkMarshallerObjectInputStream.java:43) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshaller.unmarshal0(JdkMarshaller.java:137) event-data-repository_1 | at o.a.i.marshaller.AbstractNodeNameAwareMarshaller.unmarshal(AbstractNodeNameAwareMarshaller.java:94) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.unmarshal(IgniteUtils.java:10086) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$SocketReader.body(ServerImpl.java:6246) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-31-#85%TemenosGrid%", id=144, state=TIMED_WAITING, blockCnt=4, waitCnt=811] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-30-#84%TemenosGrid%", id=143, state=TIMED_WAITING, blockCnt=3, waitCnt=814] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-29-#83%TemenosGrid%", id=142, state=TIMED_WAITING, blockCnt=3, waitCnt=802] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-28-#82%TemenosGrid%", id=141, state=TIMED_WAITING, blockCnt=5, waitCnt=821] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-27-#81%TemenosGrid%", id=140, state=TIMED_WAITING, blockCnt=5, waitCnt=818] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-26-#80%TemenosGrid%", id=139, state=TIMED_WAITING, blockCnt=6, waitCnt=814] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-25-#79%TemenosGrid%", id=138, state=TIMED_WAITING, blockCnt=6, waitCnt=815] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-24-#78%TemenosGrid%", id=137, state=TIMED_WAITING, blockCnt=4, waitCnt=823] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-23-#77%TemenosGrid%", id=136, state=TIMED_WAITING, blockCnt=2, waitCnt=808] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-22-#76%TemenosGrid%", id=135, state=TIMED_WAITING, blockCnt=3, waitCnt=810] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-21-#75%TemenosGrid%", id=134, state=TIMED_WAITING, blockCnt=5, waitCnt=817] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-20-#74%TemenosGrid%", id=133, state=TIMED_WAITING, blockCnt=3, waitCnt=813] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-19-#73%TemenosGrid%", id=132, state=TIMED_WAITING, blockCnt=3, waitCnt=805] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-18-#72%TemenosGrid%", id=131, state=TIMED_WAITING, blockCnt=5, waitCnt=820] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-17-#71%TemenosGrid%", id=130, state=TIMED_WAITING, blockCnt=5, waitCnt=807] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-16-#70%TemenosGrid%", id=129, state=TIMED_WAITING, blockCnt=3, waitCnt=818] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-15-#69%TemenosGrid%", id=128, state=TIMED_WAITING, blockCnt=5, waitCnt=808] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-14-#68%TemenosGrid%", id=127, state=TIMED_WAITING, blockCnt=6, waitCnt=822] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-13-#67%TemenosGrid%", id=126, state=TIMED_WAITING, blockCnt=5, waitCnt=815] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-12-#66%TemenosGrid%", id=125, state=TIMED_WAITING, blockCnt=5, waitCnt=821] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-11-#65%TemenosGrid%", id=124, state=TIMED_WAITING, blockCnt=4, waitCnt=808] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-10-#64%TemenosGrid%", id=123, state=TIMED_WAITING, blockCnt=6, waitCnt=810] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-9-#63%TemenosGrid%", id=122, state=TIMED_WAITING, blockCnt=3, waitCnt=806] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-8-#62%TemenosGrid%", id=121, state=TIMED_WAITING, blockCnt=3, waitCnt=811] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-7-#61%TemenosGrid%", id=120, state=TIMED_WAITING, blockCnt=3, waitCnt=805] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-6-#60%TemenosGrid%", id=119, state=TIMED_WAITING, blockCnt=6, waitCnt=810] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-5-#59%TemenosGrid%", id=118, state=TIMED_WAITING, blockCnt=5, waitCnt=818] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-4-#58%TemenosGrid%", id=117, state=TIMED_WAITING, blockCnt=2, waitCnt=813] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-3-#57%TemenosGrid%", id=116, state=TIMED_WAITING, blockCnt=8, waitCnt=812] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-2-#56%TemenosGrid%", id=115, state=TIMED_WAITING, blockCnt=7, waitCnt=805] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-1-#55%TemenosGrid%", id=114, state=TIMED_WAITING, blockCnt=3, waitCnt=805] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-0-#54%TemenosGrid%", id=113, state=TIMED_WAITING, blockCnt=5, waitCnt=814] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:338) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableNonCoalescing(GridCacheWriteBehindStore.java:1071) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1023) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-client-message-worker-#7%TemenosGrid%", id=112, state=TIMED_WAITING, blockCnt=0, waitCnt=1600] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@1723ec, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorker.body(ServerImpl.java:7183) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorkerThread.body(ServerImpl.java:7119) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-7-#53%TemenosGrid%", id=111, state=TIMED_WAITING, blockCnt=0, waitCnt=11107] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@65c4afa2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2163) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableCoalescing(GridCacheWriteBehindStore.java:1042) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1016) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-6-#52%TemenosGrid%", id=110, state=TIMED_WAITING, blockCnt=0, waitCnt=11050] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@65c4afa2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2163) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableCoalescing(GridCacheWriteBehindStore.java:1042) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1016) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-5-#51%TemenosGrid%", id=109, state=TIMED_WAITING, blockCnt=0, waitCnt=11014] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@65c4afa2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2163) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableCoalescing(GridCacheWriteBehindStore.java:1042) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1016) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-4-#50%TemenosGrid%", id=108, state=TIMED_WAITING, blockCnt=0, waitCnt=11030] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@65c4afa2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2163) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableCoalescing(GridCacheWriteBehindStore.java:1042) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1016) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-3-#49%TemenosGrid%", id=107, state=TIMED_WAITING, blockCnt=0, waitCnt=11027] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@65c4afa2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2163) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableCoalescing(GridCacheWriteBehindStore.java:1042) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1016) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-2-#48%TemenosGrid%", id=106, state=TIMED_WAITING, blockCnt=0, waitCnt=11041] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@65c4afa2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2163) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableCoalescing(GridCacheWriteBehindStore.java:1042) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1016) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-1-#47%TemenosGrid%", id=105, state=TIMED_WAITING, blockCnt=0, waitCnt=10998] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@65c4afa2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2163) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableCoalescing(GridCacheWriteBehindStore.java:1042) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1016) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="flusher-0-#46%TemenosGrid%", id=104, state=TIMED_WAITING, blockCnt=0, waitCnt=11039] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@65c4afa2, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2163) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.awaitOperationsAvailableCoalescing(GridCacheWriteBehindStore.java:1042) event-data-repository_1 | at o.a.i.i.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1016) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-sock-reader-#6%TemenosGrid%", id=103, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.SocketInputStream.socketRead0(Native Method) event-data-repository_1 | at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:171) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:141) event-data-repository_1 | at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) event-data-repository_1 | at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) event-data-repository_1 | at java.io.BufferedInputStream.read(BufferedInputStream.java:345) event-data-repository_1 | - locked java.io.BufferedInputStream@4e6445b6 event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerInputStreamWrapper.read(JdkMarshallerInputStreamWrapper.java:53) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.read(ObjectInputStream.java:2657) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2673) event-data-repository_1 | at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:3150) event-data-repository_1 | at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:859) event-data-repository_1 | at java.io.ObjectInputStream.(ObjectInputStream.java:355) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerObjectInputStream.(JdkMarshallerObjectInputStream.java:43) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshaller.unmarshal0(JdkMarshaller.java:137) event-data-repository_1 | at o.a.i.marshaller.AbstractNodeNameAwareMarshaller.unmarshal(AbstractNodeNameAwareMarshaller.java:94) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.unmarshal(IgniteUtils.java:10086) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$SocketReader.body(ServerImpl.java:6246) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="exchange-worker-#45%TemenosGrid%", id=101, state=TIMED_WAITING, blockCnt=0, waitCnt=222] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@65811d01, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.i.processors.cache.GridCachePartitionExchangeManager$ExchangeWorker.body0(GridCachePartitionExchangeManager.java:2613) event-data-repository_1 | at o.a.i.i.processors.cache.GridCachePartitionExchangeManager$ExchangeWorker.body(GridCachePartitionExchangeManager.java:2540) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="ignite-update-notifier-timer", id=100, state=TIMED_WAITING, blockCnt=0, waitCnt=2] event-data-repository_1 | Lock [object=java.util.TaskQueue@3a904927, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at java.util.TimerThread.mainLoop(Timer.java:552) event-data-repository_1 | at java.util.TimerThread.run(Timer.java:505) event-data-repository_1 | event-data-repository_1 | Thread [name="upd-ver-checker", id=99, state=TIMED_WAITING, blockCnt=0, waitCnt=219] event-data-repository_1 | at java.lang.Thread.sleep(Native Method) event-data-repository_1 | at o.a.i.i.processors.cluster.GridUpdateNotifier$1.run(GridUpdateNotifier.java:115) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-client-message-worker-#5%TemenosGrid%", id=98, state=TIMED_WAITING, blockCnt=0, waitCnt=1479] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@3a105da, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorker.body(ServerImpl.java:7183) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorkerThread.body(ServerImpl.java:7119) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="disco-event-worker-#44%TemenosGrid%", id=97, state=WAITING, blockCnt=0, waitCnt=751] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@43d803b3, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) event-data-repository_1 | at java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:442) event-data-repository_1 | at o.a.i.i.managers.discovery.GridDiscoveryManager$DiscoveryWorker.body0(GridDiscoveryManager.java:2856) event-data-repository_1 | at o.a.i.i.managers.discovery.GridDiscoveryManager$DiscoveryWorker.body(GridDiscoveryManager.java:2825) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-sock-reader-#4%TemenosGrid%", id=96, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.SocketInputStream.socketRead0(Native Method) event-data-repository_1 | at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:171) event-data-repository_1 | at java.net.SocketInputStream.read(SocketInputStream.java:141) event-data-repository_1 | at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) event-data-repository_1 | at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) event-data-repository_1 | at java.io.BufferedInputStream.read(BufferedInputStream.java:345) event-data-repository_1 | - locked java.io.BufferedInputStream@45159806 event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerInputStreamWrapper.read(JdkMarshallerInputStreamWrapper.java:53) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.read(ObjectInputStream.java:2657) event-data-repository_1 | at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2673) event-data-repository_1 | at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:3150) event-data-repository_1 | at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:859) event-data-repository_1 | at java.io.ObjectInputStream.(ObjectInputStream.java:355) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshallerObjectInputStream.(JdkMarshallerObjectInputStream.java:43) event-data-repository_1 | at o.a.i.marshaller.jdk.JdkMarshaller.unmarshal0(JdkMarshaller.java:137) event-data-repository_1 | at o.a.i.marshaller.AbstractNodeNameAwareMarshaller.unmarshal(AbstractNodeNameAwareMarshaller.java:94) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.unmarshal(IgniteUtils.java:10086) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$SocketReader.body(ServerImpl.java:6246) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-srvr-#3%TemenosGrid%", id=95, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.PlainSocketImpl.socketAccept(Native Method) event-data-repository_1 | at java.net.AbstractPlainSocketImpl.accept(AbstractPlainSocketImpl.java:409) event-data-repository_1 | at java.net.ServerSocket.implAccept(ServerSocket.java:545) event-data-repository_1 | at java.net.ServerSocket.accept(ServerSocket.java:513) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$TcpServer.body(ServerImpl.java:5845) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$TcpServerThread.body(ServerImpl.java:5763) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-disco-msg-worker-#2%TemenosGrid%", id=94, state=TIMED_WAITING, blockCnt=86, waitCnt=105766] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@2edc67dc, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.pollFirst(LinkedBlockingDeque.java:522) event-data-repository_1 | at java.util.concurrent.LinkedBlockingDeque.poll(LinkedBlockingDeque.java:684) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorker.body(ServerImpl.java:7183) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$RingMessageWorker.body(ServerImpl.java:2700) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.discovery.tcp.ServerImpl$MessageWorkerThread.body(ServerImpl.java:7119) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="disco-notifier-worker-#43%TemenosGrid%", id=93, state=WAITING, blockCnt=0, waitCnt=925] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@16bdda51, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) event-data-repository_1 | at java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:442) event-data-repository_1 | at o.a.i.i.managers.discovery.GridDiscoveryManager$DiscoveryMessageNotifierWorker.body0(GridDiscoveryManager.java:2660) event-data-repository_1 | at o.a.i.i.managers.discovery.GridDiscoveryManager$DiscoveryMessageNotifierWorker.body(GridDiscoveryManager.java:2705) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-data-loader-flusher-#42%TemenosGrid%", id=92, state=WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@1d398289, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) event-data-repository_1 | at java.util.concurrent.DelayQueue.take(DelayQueue.java:211) event-data-repository_1 | at o.a.i.i.processors.datastreamer.DataStreamProcessor$2.body(DataStreamProcessor.java:109) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-tcp-rest-3-#40%TemenosGrid%", id=90, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@19234577 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@5bef2a87 event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@37ca8db9 event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-tcp-rest-2-#39%TemenosGrid%", id=89, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@5e76e6ee event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@5d7d08aa event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@7f64ba90 event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-tcp-rest-1-#38%TemenosGrid%", id=88, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@1c205780 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@7508937c event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@4005ed2f event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-tcp-rest-0-#37%TemenosGrid%", id=87, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@1aef7f85 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@4eda70f event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@35cf479d event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="nio-acceptor-tcp-rest-#41%TemenosGrid%", id=91, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked sun.nio.ch.Util$3@5a4ca89e event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@3bbee705 event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@5202faa2 event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$GridNioAcceptWorker.accept(GridNioServer.java:2926) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$GridNioAcceptWorker.body(GridNioServer.java:2874) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-client-listener-3-#34%TemenosGrid%", id=84, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@1d3ebcd0 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@2209efb1 event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@6ec3a23a event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-client-listener-2-#33%TemenosGrid%", id=83, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@2bd192aa event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@43e1633c event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@2ef87896 event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-client-listener-1-#32%TemenosGrid%", id=82, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@5d59dd01 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@7a3a50c2 event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@328a243a event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-client-listener-0-#31%TemenosGrid%", id=81, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@5f5bd635 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@4c79a90a event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@646d5c07 event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="nio-acceptor-client-listener-#35%TemenosGrid%", id=85, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked sun.nio.ch.Util$3@7a51f44 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@64e9bfb event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@7fe43c1 event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$GridNioAcceptWorker.accept(GridNioServer.java:2926) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$GridNioAcceptWorker.body(GridNioServer.java:2874) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="tcp-comm-worker-#1%TemenosGrid%", id=80, state=TIMED_WAITING, blockCnt=0, waitCnt=2] event-data-repository_1 | Lock [object=java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject@1effdf06, ownerName=null, ownerId=-1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) event-data-repository_1 | at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) event-data-repository_1 | at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467) event-data-repository_1 | at o.a.i.spi.communication.tcp.TcpCommunicationSpi$CommunicationWorker.body(TcpCommunicationSpi.java:4287) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at o.a.i.spi.communication.tcp.TcpCommunicationSpi$5.body(TcpCommunicationSpi.java:2237) event-data-repository_1 | at o.a.i.spi.IgniteSpiThread.run(IgniteSpiThread.java:62) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-tcp-comm-3-#29%TemenosGrid%", id=76, state=RUNNABLE, blockCnt=24, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@9fdaa5d event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@260ddb19 event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@546e0d60 event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-tcp-comm-2-#28%TemenosGrid%", id=75, state=RUNNABLE, blockCnt=52, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@217bbc67 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@45b05557 event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@d2d9beb event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-tcp-comm-1-#27%TemenosGrid%", id=74, state=RUNNABLE, blockCnt=57, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@298c9223 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@55e89bf1 event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@123566f6 event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-nio-worker-tcp-comm-0-#26%TemenosGrid%", id=73, state=RUNNABLE, blockCnt=58, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked o.a.i.i.util.nio.SelectedSelectionKeySet@73a64598 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@3e76cc45 event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@2f50ef69 event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.bodyInternal(GridNioServer.java:2148) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$AbstractNioClientWorker.body(GridNioServer.java:1794) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="nio-acceptor-tcp-comm-#30%TemenosGrid%", id=79, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) event-data-repository_1 | at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) event-data-repository_1 | at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) event-data-repository_1 | at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) event-data-repository_1 | - locked sun.nio.ch.Util$3@166b3ea3 event-data-repository_1 | - locked java.util.Collections$UnmodifiableSet@57203373 event-data-repository_1 | - locked sun.nio.ch.EPollSelectorImpl@5fbcd936 event-data-repository_1 | at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$GridNioAcceptWorker.accept(GridNioServer.java:2926) event-data-repository_1 | at o.a.i.i.util.nio.GridNioServer$GridNioAcceptWorker.body(GridNioServer.java:2874) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="grid-timeout-worker-#25%TemenosGrid%", id=71, state=TIMED_WAITING, blockCnt=2, waitCnt=6905] event-data-repository_1 | Lock [object=java.lang.Object@5350b982, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at o.a.i.i.processors.timeout.GridTimeoutProcessor$TimeoutWorker.body(GridTimeoutProcessor.java:269) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="ignite-clock", id=70, state=TIMED_WAITING, blockCnt=0, waitCnt=106594] event-data-repository_1 | at java.lang.Thread.sleep(Native Method) event-data-repository_1 | at o.a.i.i.util.IgniteUtils$22.run(IgniteUtils.java:3380) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="jvm-pause-detector-worker", id=63, state=TIMED_WAITING, blockCnt=0, waitCnt=21850] event-data-repository_1 | at java.lang.Thread.sleep(Native Method) event-data-repository_1 | at o.a.i.i.LongJVMPauseDetector$1.run(LongJVMPauseDetector.java:100) event-data-repository_1 | event-data-repository_1 | Thread [name="data-streamer-stripe-8-#18%TemenosGrid%", id=62, state=WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="data-streamer-stripe-7-#17%TemenosGrid%", id=61, state=WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="data-streamer-stripe-6-#16%TemenosGrid%", id=60, state=WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="data-streamer-stripe-5-#15%TemenosGrid%", id=59, state=WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="data-streamer-stripe-4-#14%TemenosGrid%", id=58, state=WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="data-streamer-stripe-3-#13%TemenosGrid%", id=57, state=WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="data-streamer-stripe-2-#12%TemenosGrid%", id=56, state=WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="data-streamer-stripe-1-#11%TemenosGrid%", id=55, state=WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="data-streamer-stripe-0-#10%TemenosGrid%", id=54, state=WAITING, blockCnt=0, waitCnt=1] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-stripe-8-#9%TemenosGrid%", id=53, state=RUNNABLE, blockCnt=623, waitCnt=119303] event-data-repository_1 | at sun.management.ThreadImpl.dumpThreads0(Native Method) event-data-repository_1 | at sun.management.ThreadImpl.dumpAllThreads(ThreadImpl.java:454) event-data-repository_1 | at o.a.i.i.util.IgniteUtils.dumpThreads(IgniteUtils.java:1365) event-data-repository_1 | at o.a.i.i.processors.failure.FailureProcessor.process(FailureProcessor.java:128) event-data-repository_1 | - locked o.a.i.i.processors.failure.FailureProcessor@773f5e0f event-data-repository_1 | at o.a.i.i.processors.failure.FailureProcessor.process(FailureProcessor.java:104) event-data-repository_1 | at o.a.i.i.pagemem.impl.PageMemoryNoStoreImpl.allocatePage(PageMemoryNoStoreImpl.java:319) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.AbstractFreeList.allocateDataPage(AbstractFreeList.java:464) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.AbstractFreeList.insertDataRow(AbstractFreeList.java:491) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.CacheFreeListImpl.insertDataRow(CacheFreeListImpl.java:59) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.CacheFreeListImpl.insertDataRow(CacheFreeListImpl.java:35) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.RowStore.addRow(RowStore.java:98) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.createRow(IgniteCacheOffheapManagerImpl.java:1691) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry$UpdateClosure.call(GridCacheMapEntry.java:5701) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry$UpdateClosure.call(GridCacheMapEntry.java:5643) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree$Invoke.invokeClosure(BPlusTree.java:3719) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree$Invoke.access$5900(BPlusTree.java:3613) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree.invokeDown(BPlusTree.java:1895) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree.invoke(BPlusTree.java:1779) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.invoke0(IgniteCacheOffheapManagerImpl.java:1638) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.invoke(IgniteCacheOffheapManagerImpl.java:1621) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl.invoke(IgniteCacheOffheapManagerImpl.java:428) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.storeValue(GridCacheMapEntry.java:4248) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.storeValue(GridCacheMapEntry.java:4226) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.innerGet0(GridCacheMapEntry.java:888) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.innerGet(GridCacheMapEntry.java:600) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateWithBatch(GridDhtAtomicCache.java:2095) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.update(GridDhtAtomicCache.java:1935) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateAllAsyncInternal0(GridDhtAtomicCache.java:1780) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateAllAsyncInternal(GridDhtAtomicCache.java:1668) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.processNearAtomicUpdateRequest(GridDhtAtomicCache.java:3138) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.access$400(GridDhtAtomicCache.java:135) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache$5.apply(GridDhtAtomicCache.java:271) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache$5.apply(GridDhtAtomicCache.java:266) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.processMessage(GridCacheIoManager.java:1056) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.onMessage0(GridCacheIoManager.java:581) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.handleMessage(GridCacheIoManager.java:380) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.handleMessage(GridCacheIoManager.java:306) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.access$100(GridCacheIoManager.java:101) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager$1.onMessage(GridCacheIoManager.java:295) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager.invokeListener(GridIoManager.java:1569) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager.processRegularMessage0(GridIoManager.java:1197) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager.access$4200(GridIoManager.java:127) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager$9.run(GridIoManager.java:1093) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:505) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Locked synchronizers: event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@38eef86d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6e83ffa8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@574fee27 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5a8d1de7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@156fbb4b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@646078f1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4cf69c8c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7e26c2d0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@16fb9bbc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@713bce62 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@9512e35 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@41295d8d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@bc5086f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@24228fab event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@24822d9c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2f4cc9b4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@c692a42 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3c06a3b3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@95d8762 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4095fe94 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5ebf7fe event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@cf9d7cb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@69fa27a1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6741554 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c7a5663 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1eff85a8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@26806ef9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7811c8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@ebf86ea event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6c8f8050 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@49dddc9e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@96b623c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7b912d2d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6b0cbf0f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4c12b7dc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1eba56e0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6c30e85f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@624ad611 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3bbf31d3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@777e7d12 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1ce711b2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@28a9a661 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ada3b0e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ec84daa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4a06bb40 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2ff986cf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1440ca75 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3849604a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@47ea00b8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@24c63974 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@36475cfb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@457f2149 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5287069e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@471f9823 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4bb3a4bf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@777aa1e9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1da3ea6e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2646b779 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@70867952 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5251eb42 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50a39bfc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@434d03ee event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@42be3c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5d145e8d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@11a442a8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a5d6101 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2f7b9b28 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50e4487c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6366f1ca event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3262c1ce event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4133be71 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@33e6a983 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@675af694 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@43123700 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1f8ed925 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@19a2d1e6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@297c6a10 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7b84f208 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@122a4bac event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1d0895ee event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57ee40b5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@ba6ecf2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b11b101 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@126fea68 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@37a1eb78 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@b5c5002 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d8371d1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@20d2eea3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@18103bea event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7125a4aa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@277e4ea6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@106e0b1c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@938ad90 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@55e448e9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@ba378d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@60ecea6d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2cf49fcd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5b2039ba event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@af76b0c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@d4f47bd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5cdab2e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5f7aafc4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ee4002c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1961d0e2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@719d8c3b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7b131943 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@35b8936d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@22c9b3f8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3f280fce event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c12c069 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2dc0137d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4279234e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3dd002b0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@770b8ceb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5afd228c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@511509ce event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ced44de event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@47aaf17c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@74705247 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@dd27e7e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@b1f10ae event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1b2e31a0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6d3aaae8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@730508bf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@ecfc11 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6aa56e9d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5270f4cb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@96b4eca event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6e003782 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2f106d59 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7a1302dc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@295d0e4a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@468e90c4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ad847e9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@83fad8c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@49d1cd9f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7beb7cd1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@53a49b4a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@26821f7f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@61f88305 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@41d5761f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@365ccba4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@9343f7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@25d6084b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4f59f760 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1f0814c4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@51e6029 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@109fb491 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@10fdb315 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4f0c2bfa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3965151a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5433159b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@29ab6113 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7d6dc27 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@160ac402 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5a598495 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2808d534 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@190e13f6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@59e4fe01 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4f0fa484 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@21406924 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@48385f59 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3652a4a9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4cb1d63 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@26e05d97 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2c280c85 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@f7c9245 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5300b6cc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@772855de event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1b08339a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@700d6404 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@26ed1a1f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@135e2db9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@499048ab event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@522638d9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1cd5ed60 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7e8428a6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@351af2cd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5699f447 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6514bd19 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a0a7805 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@58e46913 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@41c69584 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@17a14d0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@68d98eaa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@130ede53 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@66110303 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@371c4785 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@13142312 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@768d9b65 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@186a87ac event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d1fdb4b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7fa17d49 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@65ed976b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@29762b9d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1a80787b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ebf1557 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@67e09124 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7fd26486 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@660898a7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@700b2c9c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1381fa08 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7f1b4aaa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5df4384e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@74b280a0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@687f7584 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d142c3b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@315e5b9f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@51896a94 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@42c9e674 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2e507fe2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6d115b14 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@77d651e8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7aa1f320 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57a33cf9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@300d14e2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@75114a1a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c9f6b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4ed63f6d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@16a49851 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@699a5105 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@d592c9a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@728b470b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@405f3f1b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7b764c6c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3f0f9264 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@17942991 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2e0a7b9f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6756571e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@24096522 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@12e71893 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6fcc4fd3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a771b93 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5538598a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7f31e03e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@720e9adc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@70a7bc8d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@67444f1a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@170463dc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@11dac39a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5cd456f8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@19adfee7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2d83648d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6a82b4b4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@141f5c8a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6233c228 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@54a265ef event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2b7a5cdb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@456fbe32 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@39b11635 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7e7b91f1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@79e9661 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3f38368e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@e099fe2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2cdf63cb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5f81c5af event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@21731497 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@634779d4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@34d18f4c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@65ac98c4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c42ff4a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@493b4cf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3c80e0fe event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3d6b3512 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7618355a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6f34c29a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@54885892 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@30af34b4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7aa751e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4e434763 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5819daf1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@11da5fd2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@458cda72 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1120f5ff event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7f87b78 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@441f809a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@67639d21 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@717a133f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@351d95d3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@dbec8ba event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@764fd6b9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@578dd0a4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@e67648b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@756786e5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7d957cee event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@44a49b08 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@71d2fc2b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@384b1c5a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6e6f65b1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@ef3cfaf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@68a649fe event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@86f317c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1d388f2d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@da6a592 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5795e461 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2680db3e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7f986924 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@47aac970 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3fe49b08 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1f1dfee1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@23f0656f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3200c928 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@29a728bb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@59bd2198 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@79c6ecdd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4dbb3344 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5dc938b8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6dc86378 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@23a6da18 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3793ec3b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@238b1e89 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@df66a23 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1895179f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@30cfc282 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4bcbed44 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7552316c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@454bf9b6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@bdef84f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1f6b95bd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7b561e6d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@61cacef8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1d2a4e0e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5eaff66b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@15412ea5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@224b9ffb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6d5eef82 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4c6340ec event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@504b6bc3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2e018276 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@34395acb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@628b1880 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@69156261 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b254f07 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3534abaa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@fc1ea94 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d058767 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2c39b4b7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3c403361 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3cd5fb37 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5d0a6d75 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3cf7f66f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@19111ab event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@128e4eed event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1ce1f80c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1f262171 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@16337777 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@762aaa7a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@65980d2b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b9b8540 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@469ecb8b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6544d058 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4010788e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57365449 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@67c40fb7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2441d507 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@67d60d09 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2a789a6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@455997f2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6f9a8bb3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@38f36d0c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6a7390d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@f48c82c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@34b40d65 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@178c8df2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@285cb15f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@619c05d0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7557f3a3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6c703e6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b5c9c28 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4a6fa216 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6f9a13 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3e8971cb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@119b17ee event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@265368d9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5a90d102 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2fe2f4b2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@668fb2c7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5b270b6f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7e3c9c8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3f9ceae2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@249e7e5c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@470175d3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5eb54972 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6a663bc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@511d91aa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1d7ec9b7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@69f4edb3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c7ad8b2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@61d78256 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a0fccaa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4458312d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4e6dce5c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@137520f5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@67ebd4ad event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@62bf4024 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@42007d01 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@78c89f3c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@413f6234 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5919a457 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@18303200 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@243d6f45 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e299734 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a86a3ba event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@133f30d8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c3732bd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@e759e38 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@31d1673f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5b82382d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3ec54b3a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1c638684 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@26b1e2a8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6cb8e503 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@78b0ff35 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@78670101 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@51ea1807 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7a516fc4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5e76eec event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@45480858 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@44e31ad6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@35bd72c1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@b13ea94 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@e9c2966 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@52f5a7c9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@c00ec2c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@18d3e072 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@776b3d17 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@8df07dc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3b5fb67 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4624b13 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2a4f5d0d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5ade5e7f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@769cf880 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@60301f93 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@30c791ad event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@18420c6c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@98baf1c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6927f539 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@65e15b49 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6dc81af5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@39efab1f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6fb70f26 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@e32a8a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2dd187d1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@69a26627 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3ebb8a0a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2715402d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@65ba411 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7c3279a3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e3b47f5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@13a26837 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@48823d7e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2792aaac event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@63d25544 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@63317ad9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a01c20c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@8f4d82a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@79fdc405 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@11728e51 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@25d72fd1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@bcc96c9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1c06a659 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1903a38e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@536aed8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6a231a69 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@433e5d1a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@47258920 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7755bf72 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@55703a3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@34b0589e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5f37bba1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c3211a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3834601a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@ef6bb2a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6c7ef3e6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7029d482 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6b865e2d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5092879c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b68bfab event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@75dbef80 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2c75b020 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@689f7c55 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6684030a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4c0957b5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d7c0aa4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5e92aa8a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@18c87e01 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1efa3652 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@337e2603 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@78f31c1c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@23905d75 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6c97b7f3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2e1bca5c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4ee8307f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6f32b722 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3ecbad18 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4ef2c9b7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@419663de event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3b674769 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@58a78e70 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@c04ba2 event-data-repository_1 | Thread [name="sys-stripe-7-#8%TemenosGrid%", id=52, state=BLOCKED, blockCnt=696, waitCnt=141121] event-data-repository_1 | Lock [object=o.a.i.i.processors.failure.FailureProcessor@773f5e0f, ownerName=sys-stripe-8-#9%TemenosGrid%, ownerId=53] event-data-repository_1 | at o.a.i.i.processors.failure.FailureProcessor.process(FailureProcessor.java:115) event-data-repository_1 | at o.a.i.i.processors.failure.FailureProcessor.process(FailureProcessor.java:104) event-data-repository_1 | at o.a.i.i.pagemem.impl.PageMemoryNoStoreImpl.allocatePage(PageMemoryNoStoreImpl.java:319) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.AbstractFreeList.allocateDataPage(AbstractFreeList.java:464) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.AbstractFreeList.insertDataRow(AbstractFreeList.java:491) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.CacheFreeListImpl.insertDataRow(CacheFreeListImpl.java:59) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.CacheFreeListImpl.insertDataRow(CacheFreeListImpl.java:35) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.RowStore.addRow(RowStore.java:98) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.createRow(IgniteCacheOffheapManagerImpl.java:1691) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry$UpdateClosure.call(GridCacheMapEntry.java:5701) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry$UpdateClosure.call(GridCacheMapEntry.java:5643) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree$Invoke.invokeClosure(BPlusTree.java:3719) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree$Invoke.access$5900(BPlusTree.java:3613) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree.invokeDown(BPlusTree.java:1895) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree.invoke(BPlusTree.java:1779) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.invoke0(IgniteCacheOffheapManagerImpl.java:1638) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.invoke(IgniteCacheOffheapManagerImpl.java:1621) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl.invoke(IgniteCacheOffheapManagerImpl.java:428) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.storeValue(GridCacheMapEntry.java:4248) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.storeValue(GridCacheMapEntry.java:4226) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.innerGet0(GridCacheMapEntry.java:888) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.innerGet(GridCacheMapEntry.java:600) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateWithBatch(GridDhtAtomicCache.java:2095) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.update(GridDhtAtomicCache.java:1935) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateAllAsyncInternal0(GridDhtAtomicCache.java:1780) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateAllAsyncInternal(GridDhtAtomicCache.java:1668) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.processNearAtomicUpdateRequest(GridDhtAtomicCache.java:3138) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.access$400(GridDhtAtomicCache.java:135) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache$5.apply(GridDhtAtomicCache.java:271) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache$5.apply(GridDhtAtomicCache.java:266) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.processMessage(GridCacheIoManager.java:1056) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.onMessage0(GridCacheIoManager.java:581) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.handleMessage(GridCacheIoManager.java:380) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.handleMessage(GridCacheIoManager.java:306) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.access$100(GridCacheIoManager.java:101) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager$1.onMessage(GridCacheIoManager.java:295) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager.invokeListener(GridIoManager.java:1569) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager.processRegularMessage0(GridIoManager.java:1197) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager.access$4200(GridIoManager.java:127) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager$9.run(GridIoManager.java:1093) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:505) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Locked synchronizers: event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@72ba40f7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@73e71dc6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1757c9c4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@15480dd7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@35555a71 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7e177830 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57a787df event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2540e49 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1d4d5aa0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@585e67c1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@332c8a6a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@11a8fc00 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@66c73ccf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d0c2fb9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1aa483a8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4cba2400 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1344581c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3f19cc2a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@11eb1a2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1c991fa5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2d2c3092 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5ca5c181 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2842f816 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7c72ae8c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3016a930 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@42cddf7a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7d700d7d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@149fda91 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@11c5ae1b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3d5f9556 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@40397c65 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2a987da1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1611bb9f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57a5692e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c74edeb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@35e8e6b1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2ebe7ed0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@522e8e30 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@29ce2a2b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5be18ccd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@674319f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@200d706f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@784a8937 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2f9ae517 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@845d1b2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@430056dd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6f2f7cbb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@17658e14 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@318b050b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@70fc0b59 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@64a27b59 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5f8c4cf7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3646b950 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@26f0da02 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@51ff1536 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6c29000a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6fa6b8e6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4f265fda event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@66d9b5ad event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@42d4c02c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@18ef2118 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@65ca7550 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4e15c099 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2a24bcd7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4ba2af6a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7d7423bb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1d853584 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@12c8e0e7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4ccfc42b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@107a4ff2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@24e7716c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@717d72cf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@43211e57 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e6cc1e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e15a15f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4e44358 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4f7c3635 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@784d85f8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4be1dcf7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6d39a850 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@435af65c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57acc98a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@12efa1ec event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3234f0c9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@26c818c5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1719750f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@789646c1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6d3153a2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@bd970f1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5734cd88 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1dde29e1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7a15c5bb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a8b6cf0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b22aea4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@275f1700 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@73b3264d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@12be2827 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4cb7e35a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@130f0adb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7964d7b9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1af837a2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6926aa94 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2942cb9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5d12fa4d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@68c836a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5aa2fb25 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7970e52c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@33fab8dd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@510ffa1b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1c3958cc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@629e87e2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@444f73c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2ab50a8d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7c9cc82f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6a2be1c9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@49f516a7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b968c76 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5151e89b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@64c16ed1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5e01cc0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7aeac536 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2421efc0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b79aa73 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4efab9c0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6215165c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@49e1ba31 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4f4dab4c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@54623fdc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e8f74fa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5afb6528 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@784ea2f0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3d17eefa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@58da7506 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@59098910 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@545df9e3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@563fadac event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5dc69028 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4892c389 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@73bba999 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5842388f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@31ecb47f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6f3cb740 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@41652c5f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@82af2bc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c31f08a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@563dcafb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3ed203b7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6132a9c3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3254b0cf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a06de6c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2467bb20 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50ef72de event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@472a72c4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7b6321f3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@62c42b04 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4916dc00 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5d3e2bb4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3f30f559 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7cef65b0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@36640f1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2fa9a8a2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@178b1a33 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@104e3f97 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@211e94a7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4310c49e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@c05044d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6dd620dd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@38b82a6c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7dc92dd1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@594a6949 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c66a75 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7cd4dbeb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4847d605 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@42cdfe65 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@74eeb18b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2e3f377e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@58be2b42 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@752dd8e6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@744e5d21 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@239d1011 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a50cb1b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@11a1c85f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1700381f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5cd6bf76 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5052bfe4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4fae6681 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@59786fab event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@303c46fd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@75547281 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@49f298c4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@536db366 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1b41c58 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5722b695 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a4b200d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@348344a1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@15f4d8b5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57a04280 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4384b31 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2a30b15b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1932f5d8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4cd3003c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@af4ab54 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@25e17938 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2bf3c469 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7ff5f3cb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50f4496a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7e72d2f5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b1791de event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1b2c58dc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6961fdca event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1ec7f69 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@768297eb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@f11cb5f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6978e8da event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@b8dd5ab event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@69d226fe event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@681853a8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@46084849 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@234dfdae event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5b50daef event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@71ff473e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@75310348 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@395d30cd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@640712ab event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6f4a7fa2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@121c1a40 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@42179b96 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e595b35 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@22d28033 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50ee5033 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2e5baa93 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7a0fcc9e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4c6a08f4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ea7b682 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1ddb86f7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@19b4d2ee event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5055484 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5645f100 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@17683901 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@28f4baa0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7fa7a4e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7ece4640 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2838c8a9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@251446b4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@71e87052 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7d98c469 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@138b26f3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@14adf3d0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@26f56bf7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1d7548cc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5703592f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2ccb136c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@216c7fd1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@16687018 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5bef89db event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2f4bb1b9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6d1bd176 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3826947d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1f2063f3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6d94cfd7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5e372372 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@52bec1b5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4e1d1fea event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5bc8b89 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@62f714ff event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4660c27d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@60246714 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@125585a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5bafe46d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1b9ca64f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@58436eef event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@738f0a35 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57a704a5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@29776ec0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6a005917 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@61d538c0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@eb88a73 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1cabe40a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@748bce69 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3cd0fb00 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@76f115fb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3539249b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1f6b5cd3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2458bcc5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5af83b78 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2619d140 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@63518c94 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@28eaaaf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1936727d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@71cd23ca event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e17244b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@69bb2ef4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@43341c24 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5b7fb4fb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7cede6a3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4c91ab44 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2fa75bce event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@959eaad event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1a92cddb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5b188e46 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4e74d7c0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@8be5021 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4cea8f8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1b3b7c99 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@731972e9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@95c1bf3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@78a4fb48 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@383b9f71 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@d2ca5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@69673cd4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3646f40b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@526225a5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3be35348 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6b32497c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ad50ebd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2959414f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@8013e3f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7181ffea event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@33e3b781 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50d3cb8b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@51a2e768 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2fa23c19 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@153d9d3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4f92d5e1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@94df2dc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@37315518 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@28b39f6c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@71d76a46 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@17eabdb2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2a4eb3c2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e320118 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@54fdddc2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@16b8fdc2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@49378c74 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@47837e8b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7d2ab874 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2cad0cfe event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@590c6133 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5a73bbf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2d66da67 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6968f9c9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5357082e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6f91bb38 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@74bff14f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5a37e8a0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@31ca8db0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@53e0a8ca event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@585e0e70 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3dc99f33 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@588b2237 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@efb7714 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@267ec535 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57bff6ce event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5624ec94 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3326293 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@53b548ad event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7b94cd93 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a25ce39 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1a130314 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@631ac0db event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3e3f617e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1acf2486 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@18c68f68 event-data-repository_1 | Thread [name="sys-stripe-6-#7%TemenosGrid%", id=51, state=WAITING, blockCnt=453, waitCnt=99486] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-stripe-5-#6%TemenosGrid%", id=50, state=WAITING, blockCnt=289, waitCnt=59398] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-stripe-4-#5%TemenosGrid%", id=49, state=WAITING, blockCnt=319, waitCnt=59432] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-stripe-3-#4%TemenosGrid%", id=48, state=WAITING, blockCnt=359, waitCnt=53571] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-stripe-2-#3%TemenosGrid%", id=47, state=WAITING, blockCnt=438, waitCnt=73510] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-stripe-1-#2%TemenosGrid%", id=46, state=WAITING, blockCnt=551, waitCnt=90381] event-data-repository_1 | at sun.misc.Unsafe.park(Native Method) event-data-repository_1 | at java.util.concurrent.locks.LockSupport.park(LockSupport.java:304) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$StripeConcurrentQueue.take(StripedExecutor.java:669) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:493) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="sys-stripe-0-#1%TemenosGrid%", id=45, state=BLOCKED, blockCnt=707, waitCnt=157869] event-data-repository_1 | Lock [object=o.a.i.i.processors.failure.FailureProcessor@773f5e0f, ownerName=sys-stripe-8-#9%TemenosGrid%, ownerId=53] event-data-repository_1 | at o.a.i.i.processors.failure.FailureProcessor.process(FailureProcessor.java:115) event-data-repository_1 | at o.a.i.i.processors.failure.FailureProcessor.process(FailureProcessor.java:104) event-data-repository_1 | at o.a.i.i.pagemem.impl.PageMemoryNoStoreImpl.allocatePage(PageMemoryNoStoreImpl.java:319) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.AbstractFreeList.allocateDataPage(AbstractFreeList.java:464) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.AbstractFreeList.insertDataRow(AbstractFreeList.java:491) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.CacheFreeListImpl.insertDataRow(CacheFreeListImpl.java:59) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.freelist.CacheFreeListImpl.insertDataRow(CacheFreeListImpl.java:35) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.RowStore.addRow(RowStore.java:98) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.createRow(IgniteCacheOffheapManagerImpl.java:1691) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry$UpdateClosure.call(GridCacheMapEntry.java:5701) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry$UpdateClosure.call(GridCacheMapEntry.java:5643) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree$Invoke.invokeClosure(BPlusTree.java:3719) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree$Invoke.access$5900(BPlusTree.java:3613) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree.invokeDown(BPlusTree.java:1895) event-data-repository_1 | at o.a.i.i.processors.cache.persistence.tree.BPlusTree.invoke(BPlusTree.java:1779) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.invoke0(IgniteCacheOffheapManagerImpl.java:1638) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl$CacheDataStoreImpl.invoke(IgniteCacheOffheapManagerImpl.java:1621) event-data-repository_1 | at o.a.i.i.processors.cache.IgniteCacheOffheapManagerImpl.invoke(IgniteCacheOffheapManagerImpl.java:428) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.storeValue(GridCacheMapEntry.java:4248) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.storeValue(GridCacheMapEntry.java:4226) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.innerGet0(GridCacheMapEntry.java:888) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheMapEntry.innerGet(GridCacheMapEntry.java:600) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateWithBatch(GridDhtAtomicCache.java:2095) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.update(GridDhtAtomicCache.java:1935) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateAllAsyncInternal0(GridDhtAtomicCache.java:1780) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.updateAllAsyncInternal(GridDhtAtomicCache.java:1668) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.processNearAtomicUpdateRequest(GridDhtAtomicCache.java:3138) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache.access$400(GridDhtAtomicCache.java:135) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache$5.apply(GridDhtAtomicCache.java:271) event-data-repository_1 | at o.a.i.i.processors.cache.distributed.dht.atomic.GridDhtAtomicCache$5.apply(GridDhtAtomicCache.java:266) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.processMessage(GridCacheIoManager.java:1056) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.onMessage0(GridCacheIoManager.java:581) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.handleMessage(GridCacheIoManager.java:380) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.handleMessage(GridCacheIoManager.java:306) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager.access$100(GridCacheIoManager.java:101) event-data-repository_1 | at o.a.i.i.processors.cache.GridCacheIoManager$1.onMessage(GridCacheIoManager.java:295) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager.invokeListener(GridIoManager.java:1569) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager.processRegularMessage0(GridIoManager.java:1197) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager.access$4200(GridIoManager.java:127) event-data-repository_1 | at o.a.i.i.managers.communication.GridIoManager$9.run(GridIoManager.java:1093) event-data-repository_1 | at o.a.i.i.util.StripedExecutor$Stripe.body(StripedExecutor.java:505) event-data-repository_1 | at o.a.i.i.util.worker.GridWorker.run(GridWorker.java:120) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Locked synchronizers: event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@554c9517 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6cd82c43 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6e704289 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2c2d3c19 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1d78f0a6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@306f6d7e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5ce705f0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@196f2781 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@43c809ca event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@8878850 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6cabd435 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@c18a50e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@f1d6b1e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3b6ca7a5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@954c681 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@402d3270 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@248cf4f0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7a032e4b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@554c9f44 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7c5b892f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3f33d920 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5ca1d9fb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6da6cb94 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d1425fe event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6c4f1fa5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3ef37dd6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6552fbf7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@9050976 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1da39dc4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@389b5e6b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4a6487cc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6b8b694f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6a479447 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@84de762 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@66f94b40 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@560897c1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3bd4eb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@67aa4579 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4ba5a187 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@590191ad event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7c39aea event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@327786e6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5438a0b6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@161df65 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1aee1169 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@14a0e464 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50ccb56 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a19b665 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@60168b16 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7386c0b4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1033e3e9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@575ec514 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3c4bad2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4602497e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4921d6fc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@68f6dca9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4e42e94f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1adf0040 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5d8ea3d1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3433f5d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5a92a6eb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@38ad60f9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1095dc16 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a36a08e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@45dceadd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1625f037 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@286efb89 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@17630cda event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@35cb96e9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@cd66c2f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@53bb6c8f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c31affe event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@35a4064f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@aacb3c0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@27b790e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@53e468e0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@46e79aad event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@29ba1302 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@70d0ac32 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@8379a0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7acbb9ef event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@31097b7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@76d40adb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6d8123d8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4aa2fb46 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d08608a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1bdc2922 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7fa74662 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2202fa1b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2c80d2e1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@566f7b6a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@13beb393 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@26d941dd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@ce4983d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@217d16e0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@47b90b77 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2b027df4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@38f2dd9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4a0ce15e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4561a9db event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7db4915e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7af9ad4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2a049d6e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6263c684 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3b8495f7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4006c169 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4ee7cbea event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@32cc7354 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2d7873d7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5b03e298 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2bcbda89 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7acc4a77 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@14447832 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50975f00 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@257dd809 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3d7aa123 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a48cb56 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@60cdea40 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2b3bcbc0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@43a0043d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@f7e7564 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@ee0121 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@757eaf37 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@363cdc49 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4a155abf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a82add5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a85aaa5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6a8bf728 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a2b9864 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@256870f1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@322f367b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@728b5a1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5196e598 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@379ff8a3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7c489cc5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3e8fbb50 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@58d3333c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@10c18265 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@28d79a0c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6b41e17d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2aca75fa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3604186c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2217f35a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4639a46c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3fa57cf4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@29f4c414 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@34647f53 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3f75b777 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2b233eb1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@24f89ba5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@33f1f68e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@212d3188 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@134955aa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@730cd1d5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4ff50cbc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@71cd5bb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5e2134f7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b36d49f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2c41a1a9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d110897 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1a400b4e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@672fa6b5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@46c20f10 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@352f2ef event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@19d293b0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3d29431 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@55beb1d4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@41eff406 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4cadb098 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5bc9c620 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7b59fc9c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@45a87835 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@64203968 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@714d0b0a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4544b4a3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@43ab05ee event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@26258f8a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3fa9d51f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5f289fe5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4437659b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4ee4fb48 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3f94dea6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2458b906 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5b3c1bab event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@326b953f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2b931c3a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4ae210c7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@71b8bb73 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1f140a16 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2c55a702 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@766bfe67 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@423ca0fa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7d474611 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7c2b830c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@551ac228 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@72068301 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@35b6349e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1525f2c7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@16ad79c9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50f5fe96 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@54e332d2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6e6a5143 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@13f41e76 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6c8a53d0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@21328931 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1d6e341d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2eda9e68 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@10f04cb0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@25be249e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@491c22e8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@334fcc7d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@21c88368 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7506d91b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5d25ae31 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@10c4bdcc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@75f2af7f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@36ff7edb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@465b0a13 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7344e5c9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@13ad2589 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5e699881 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@407c0acd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@14c2c5a6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6eb93bda event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7c060b1c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c8c4c33 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5ee14df1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7921ca1c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@353360cf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@b604f8e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5fba05d6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@283c5517 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6276dc7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@f325eb2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@296e37 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4af717c8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@77030204 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ad91598 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@21ca8622 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@53718c84 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3c8db7cc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1eda0010 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6bd47e7f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@341eca2d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@65fc38b5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2b686e9a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6387ca0e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@218a3bf1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@253786e5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d432067 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1049447b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@60ecb030 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@79751266 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2d615537 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@77d1b67c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@72b9e586 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@230e2f95 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4610e2e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7ef92a88 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@43d16504 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@11f1886c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1d6df402 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2a77304d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@62c69942 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7f69df9e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@dd6e995 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e30553a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@48eac337 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@79ce709b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@436eb476 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5febb0a4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@41062315 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4bc6d906 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7dbd190c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7f51f88f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@fbf62fb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@72cc94e3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@67ac86cf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@17c98f35 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@63159f8d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@75e87480 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@76f10e0c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2d1a97c0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@623c8c19 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@54c6a8aa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2ab94d50 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@53e46b50 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@55be7971 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@345c0894 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@54ef1082 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@24214fa4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2f2cfa0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@563e6fc2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7a790e87 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@540b3203 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@409078ed event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@65754f42 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57ca685d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5a5cd38d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@19c89f3b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5688dc1f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5204ce13 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6e78af65 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3394d4d6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@236c8e9f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57eca8b7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7c4436a8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@69636f92 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2e3c0c57 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1c27b037 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@418b246d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@33e6dc31 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7d7620ae event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5c71813f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@44417e90 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@41c3257d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@d7c1ebb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5d541e52 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@12ae5e96 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4a9e7973 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@27fa2f5d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5aa1037e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a9bac52 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3f70f93 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@75815877 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@27e91f4a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4077ced9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7bc7cad3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4fa773c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6b063ba6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@15793119 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@502df037 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7393e99 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5db8fed3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@195ed84 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3e8609c9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@70052576 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6a5113c3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@44063ac0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4a40bbc8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@13b71963 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@711adfdd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4bf3b04 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4bee5ea0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@60393abf event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@477ae552 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3ae081a2 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@342c300 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2a07fd15 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a47dd72 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@494a609 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@115b3f58 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4a19733 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@96698 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@210c3c7e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@69e566f8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@61757cf8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2a83fbe8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6a2cdd0a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@28bca411 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@62648f42 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@570d645d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5bc5273b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@16949c7e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50cc0f71 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6cdfb6b9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e51a8e6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2c14a03e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1c13854a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@d68920f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1e6d1ddc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@17715b7f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@17c8b50e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5e79b512 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@288cd0fc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@35bbcc31 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@67096f4d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@74cb879b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a0ea2ce event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@513f3bfa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7de05597 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@55bff2ef event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1ae82244 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@32203a08 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4d138d36 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@671136f1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3c30d255 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@f735fa1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5e580944 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@30900ac0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a88c4fd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1f131c5a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@11f24d5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@31e9e83c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7d0a6dd0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7a7c5fe7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2bdf5f1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7c6b5592 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5221d322 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4aba3de6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@27455b3d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1a44d82 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5d402657 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@468e6514 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4b9f424b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@68f4cee7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@345a2f30 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1c95a6a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3075dd02 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7e38be1d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1b7e3d45 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@50af2e4a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4e2c6490 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@755fe31e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1f216ce5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3655dc35 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1bf0368b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@113d71f8 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5ef717e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1dc3a182 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@79d88df event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7dd58b65 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@34f7120 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3800f7a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@68b0a4a5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@396f9475 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@41d13409 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@42290896 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2fd109fa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ad8ec49 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@22e1ec67 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@28063d15 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@fbf6f36 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@22a8602c event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@f0732ca event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@167048aa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@62c2438d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@30af6f9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@35ad1530 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@218c2e37 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@51a2cfa0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5cb4653 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5812c74f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@182fa7ec event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@5fb7a486 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@9198fd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4e770f1f event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2b82c1fa event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4973c398 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@45a9ec3b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@33d00c66 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@e6086f7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@59d8fba0 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@539a0c94 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@601a99bd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6ac7ec77 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@745c1f74 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@77a153c7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4342ebff event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@16b303b4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@20009a9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7fc24d45 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2b0b4c51 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@25ab5427 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@27644c7a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4aa185b6 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3b392177 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@44cc505b event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@41cec324 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@704729e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7544d990 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@532c11eb event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@64cc2f49 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@407831cc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@13a3e80a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@203300dc event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2580d5f4 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@24f776d9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@281839f5 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1095f9ca event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3399a159 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2cc7584d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@45791bb1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7a9de17d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4f0041e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@12e331fd event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@1fca0483 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@a49880e event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@e1dbdad event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@57a094a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@4a5bfb7a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@c9413c7 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6f87372d event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@3a6243ce event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@2ff7efd3 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@351cbc1 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@55b9bad9 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@7dedff93 event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@6df40e8a event-data-repository_1 | java.util.concurrent.locks.ReentrantLock$NonfairSync@60f6df36 event-data-repository_1 | Thread [name="Timer-5", id=33, state=TIMED_WAITING, blockCnt=0, waitCnt=222] event-data-repository_1 | Lock [object=java.util.TaskQueue@2bc78eeb, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at java.util.TimerThread.mainLoop(Timer.java:552) event-data-repository_1 | at java.util.TimerThread.run(Timer.java:505) event-data-repository_1 | event-data-repository_1 | Thread [name="Timer-3", id=26, state=TIMED_WAITING, blockCnt=0, waitCnt=226] event-data-repository_1 | Lock [object=java.util.TaskQueue@7a3b7cd0, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at java.util.TimerThread.mainLoop(Timer.java:552) event-data-repository_1 | at java.util.TimerThread.run(Timer.java:505) event-data-repository_1 | event-data-repository_1 | Thread [name="Timer-1", id=18, state=TIMED_WAITING, blockCnt=0, waitCnt=226] event-data-repository_1 | Lock [object=java.util.TaskQueue@1adc9b7e, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at java.util.TimerThread.mainLoop(Timer.java:552) event-data-repository_1 | at java.util.TimerThread.run(Timer.java:505) event-data-repository_1 | event-data-repository_1 | Thread [name="RMI TCP Accept-0", id=14, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.PlainSocketImpl.socketAccept(Native Method) event-data-repository_1 | at java.net.AbstractPlainSocketImpl.accept(AbstractPlainSocketImpl.java:409) event-data-repository_1 | at java.net.ServerSocket.implAccept(ServerSocket.java:545) event-data-repository_1 | at java.net.ServerSocket.accept(ServerSocket.java:513) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$AcceptLoop.executeAcceptLoop(TCPTransport.java:400) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$AcceptLoop.run(TCPTransport.java:372) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="RMI TCP Accept-50510", id=13, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.PlainSocketImpl.socketAccept(Native Method) event-data-repository_1 | at java.net.AbstractPlainSocketImpl.accept(AbstractPlainSocketImpl.java:409) event-data-repository_1 | at java.net.ServerSocket.implAccept(ServerSocket.java:545) event-data-repository_1 | at java.net.ServerSocket.accept(ServerSocket.java:513) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$AcceptLoop.executeAcceptLoop(TCPTransport.java:400) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$AcceptLoop.run(TCPTransport.java:372) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="RMI TCP Accept-50511", id=12, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | at java.net.PlainSocketImpl.socketAccept(Native Method) event-data-repository_1 | at java.net.AbstractPlainSocketImpl.accept(AbstractPlainSocketImpl.java:409) event-data-repository_1 | at java.net.ServerSocket.implAccept(ServerSocket.java:545) event-data-repository_1 | at java.net.ServerSocket.accept(ServerSocket.java:513) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$AcceptLoop.executeAcceptLoop(TCPTransport.java:400) event-data-repository_1 | at sun.rmi.transport.tcp.TCPTransport$AcceptLoop.run(TCPTransport.java:372) event-data-repository_1 | at java.lang.Thread.run(Thread.java:748) event-data-repository_1 | event-data-repository_1 | Thread [name="Signal Dispatcher", id=5, state=RUNNABLE, blockCnt=0, waitCnt=0] event-data-repository_1 | event-data-repository_1 | Thread [name="Finalizer", id=3, state=WAITING, blockCnt=46, waitCnt=23] event-data-repository_1 | Lock [object=java.lang.ref.ReferenceQueue$Lock@4c96bd0e, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:143) event-data-repository_1 | at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:164) event-data-repository_1 | at java.lang.ref.Finalizer$FinalizerThread.run(Finalizer.java:209) event-data-repository_1 | event-data-repository_1 | Thread [name="Reference Handler", id=2, state=WAITING, blockCnt=23, waitCnt=22] event-data-repository_1 | Lock [object=java.lang.ref.Reference$Lock@5bb1d2a1, ownerName=null, ownerId=-1] event-data-repository_1 | at java.lang.Object.wait(Native Method) event-data-repository_1 | at java.lang.Object.wait(Object.java:502) event-data-repository_1 | at java.lang.ref.Reference.tryHandlePending(Reference.java:191) event-data-repository_1 | at java.lang.ref.Reference$ReferenceHandler.run(Reference.java:153) event-data-repository_1 | event-data-repository_1 | Thread [name="main", id=1, state=TIMED_WAITING, blockCnt=74, waitCnt=1133] event-data-repository_1 | at java.lang.Thread.sleep(Native Method) event-data-repository_1 | at com.temenos.des.datarepo.server.StartDataRepositoryIgniteCacheServer.main(StartDataRepositoryIgniteCacheServer.java:147) event-data-repository_1 | event-data-repository_1 | event-data-repository_1 | event-data-repository_1 | 2019-09-22 10:44:55.395 ERROR [sys-stripe-8-#9%TemenosGrid%] - JVM will be halted immediately due to the failure: [failureCtx=FailureContext [type=CRITICAL_ERROR, err=class o.a.i.i.mem.IgniteOutOfMemoryException: Out of memory in data region [name=1G_Region, initSize=100.0 MiB, maxSize=400.0 MiB, persistenceEnabled=false] Try the following: event-data-repository_1 | ^-- Increase maximum off-heap memory size (DataRegionConfiguration.maxSize) event-data-repository_1 | ^-- Enable Ignite persistence (DataRegionConfiguration.persistenceEnabled) event-data-repository_1 | ^-- Enable eviction or expiration policies]] src_event-data-repository_1 exited with code 130 C:\data\srcT24\rtc\DEV\des-docker>