2024-12-07 17:51:28,051 main DEBUG Apache Log4j Core 2.17.2 initializing configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-12-07 17:51:28,062 main DEBUG Took 0.009307 seconds to load 1 plugins from package org.apache.hadoop.hbase.logging 2024-12-07 17:51:28,062 main DEBUG PluginManager 'Core' found 129 plugins 2024-12-07 17:51:28,062 main DEBUG PluginManager 'Level' found 0 plugins 2024-12-07 17:51:28,063 main DEBUG PluginManager 'Lookup' found 16 plugins 2024-12-07 17:51:28,064 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,070 main DEBUG PluginManager 'TypeConverter' found 26 plugins 2024-12-07 17:51:28,081 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.util.MBeans", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,082 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,083 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.logging.TestJul2Slf4j", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,083 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,083 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.zookeeper", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,083 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,084 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,084 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,085 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,085 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,086 main DEBUG LoggerConfig$Builder(additivity="false", level="WARN", levelAndRefs="null", name="org.apache.directory", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,086 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,086 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.ipc.FailedServers", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,087 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,087 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsConfig", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,087 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,088 main DEBUG LoggerConfig$Builder(additivity="null", level="INFO", levelAndRefs="null", name="org.apache.hadoop.hbase.ScheduledChore", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,088 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,088 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.regionserver.RSRpcServices", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,088 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,089 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,089 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,089 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,089 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-07 17:51:28,090 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hbase.thirdparty.io.netty.channel", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,090 main DEBUG Building Plugin[name=root, class=org.apache.logging.log4j.core.config.LoggerConfig$RootLogger]. 2024-12-07 17:51:28,092 main DEBUG LoggerConfig$RootLogger$Builder(additivity="null", level="null", levelAndRefs="INFO,Console", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-07 17:51:28,093 main DEBUG Building Plugin[name=loggers, class=org.apache.logging.log4j.core.config.LoggersPlugin]. 2024-12-07 17:51:28,094 main DEBUG createLoggers(={org.apache.hadoop.metrics2.util.MBeans, org.apache.hadoop.hbase.logging.TestJul2Slf4j, org.apache.zookeeper, org.apache.hadoop.metrics2.impl.MetricsSinkAdapter, org.apache.hadoop.metrics2.impl.MetricsSystemImpl, org.apache.directory, org.apache.hadoop.hbase.ipc.FailedServers, org.apache.hadoop.metrics2.impl.MetricsConfig, org.apache.hadoop.hbase.ScheduledChore, org.apache.hadoop.hbase.regionserver.RSRpcServices, org.apache.hadoop, org.apache.hadoop.hbase, org.apache.hbase.thirdparty.io.netty.channel, root}) 2024-12-07 17:51:28,095 main DEBUG Building Plugin[name=layout, class=org.apache.logging.log4j.core.layout.PatternLayout]. 2024-12-07 17:51:28,096 main DEBUG PatternLayout$Builder(pattern="%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n", PatternSelector=null, Configuration(PropertiesConfig), Replace=null, charset="null", alwaysWriteExceptions="null", disableAnsi="null", noConsoleNoAnsi="null", header="null", footer="null") 2024-12-07 17:51:28,096 main DEBUG PluginManager 'Converter' found 47 plugins 2024-12-07 17:51:28,103 main DEBUG Building Plugin[name=appender, class=org.apache.hadoop.hbase.logging.HBaseTestAppender]. 2024-12-07 17:51:28,105 main DEBUG HBaseTestAppender$Builder(target="SYSTEM_ERR", maxSize="1G", bufferedIo="null", bufferSize="null", immediateFlush="null", ignoreExceptions="null", PatternLayout(%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n), name="Console", Configuration(PropertiesConfig), Filter=null, ={}) 2024-12-07 17:51:28,107 main DEBUG Starting HBaseTestOutputStreamManager SYSTEM_ERR 2024-12-07 17:51:28,107 main DEBUG Building Plugin[name=appenders, class=org.apache.logging.log4j.core.config.AppendersPlugin]. 2024-12-07 17:51:28,107 main DEBUG createAppenders(={Console}) 2024-12-07 17:51:28,108 main DEBUG Configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f initialized 2024-12-07 17:51:28,108 main DEBUG Starting configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-12-07 17:51:28,108 main DEBUG Started configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f OK. 2024-12-07 17:51:28,109 main DEBUG Shutting down OutputStreamManager SYSTEM_OUT.false.false-1 2024-12-07 17:51:28,109 main DEBUG OutputStream closed 2024-12-07 17:51:28,109 main DEBUG Shut down OutputStreamManager SYSTEM_OUT.false.false-1, all resources released: true 2024-12-07 17:51:28,109 main DEBUG Appender DefaultConsole-1 stopped with status true 2024-12-07 17:51:28,110 main DEBUG Stopped org.apache.logging.log4j.core.config.DefaultConfiguration@61001b64 OK 2024-12-07 17:51:28,173 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6 2024-12-07 17:51:28,176 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=StatusLogger 2024-12-07 17:51:28,177 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=ContextSelector 2024-12-07 17:51:28,178 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name= 2024-12-07 17:51:28,178 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.directory 2024-12-07 17:51:28,179 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSinkAdapter 2024-12-07 17:51:28,179 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.zookeeper 2024-12-07 17:51:28,179 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.logging.TestJul2Slf4j 2024-12-07 17:51:28,180 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSystemImpl 2024-12-07 17:51:28,180 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.util.MBeans 2024-12-07 17:51:28,180 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase 2024-12-07 17:51:28,181 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop 2024-12-07 17:51:28,181 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ipc.FailedServers 2024-12-07 17:51:28,181 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.regionserver.RSRpcServices 2024-12-07 17:51:28,181 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsConfig 2024-12-07 17:51:28,182 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hbase.thirdparty.io.netty.channel 2024-12-07 17:51:28,182 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ScheduledChore 2024-12-07 17:51:28,183 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Appenders,name=Console 2024-12-07 17:51:28,185 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-12-07 17:51:28,185 main DEBUG Reconfiguration complete for context[name=1dbd16a6] at URI jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-logging/target/hbase-logging-4.0.0-alpha-1-SNAPSHOT-tests.jar!/log4j2.properties (org.apache.logging.log4j.core.LoggerContext@40db2a24) with optional ClassLoader: null 2024-12-07 17:51:28,185 main DEBUG Shutdown hook enabled. Registering a new one. 2024-12-07 17:51:28,186 main DEBUG LoggerContext[name=1dbd16a6, org.apache.logging.log4j.core.LoggerContext@40db2a24] started OK. 2024-12-07T17:51:28,383 DEBUG [main {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07 2024-12-07 17:51:28,386 main DEBUG AsyncLogger.ThreadNameStrategy=UNCACHED (user specified null, default is UNCACHED) 2024-12-07 17:51:28,386 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-12-07T17:51:28,394 INFO [main {}] hbase.HBaseClassTestRule(94): Test class org.apache.hadoop.hbase.regionserver.wal.TestFSHLog timeout: 13 mins 2024-12-07T17:51:28,413 INFO [Time-limited test {}] hbase.HBaseZKTestingUtil(84): Created new mini-cluster data directory: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412, deleteOnExit=true 2024-12-07T17:51:28,414 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting test.cache.data to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/test.cache.data in system properties and HBase conf 2024-12-07T17:51:28,415 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.tmp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.tmp.dir in system properties and HBase conf 2024-12-07T17:51:28,415 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.log.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir in system properties and HBase conf 2024-12-07T17:51:28,416 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.local.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/mapreduce.cluster.local.dir in system properties and HBase conf 2024-12-07T17:51:28,416 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.temp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/mapreduce.cluster.temp.dir in system properties and HBase conf 2024-12-07T17:51:28,417 INFO [Time-limited test {}] hbase.HBaseTestingUtil(738): read short circuit is OFF 2024-12-07T17:51:28,493 WARN [Time-limited test {}] util.NativeCodeLoader(60): Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2024-12-07T17:51:28,570 DEBUG [Time-limited test {}] fs.HFileSystem(310): The file system is not a DistributedFileSystem. Skipping on block location reordering 2024-12-07T17:51:28,573 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-labels.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/yarn.node-labels.fs-store.root-dir in system properties and HBase conf 2024-12-07T17:51:28,573 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-attribute.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/yarn.node-attribute.fs-store.root-dir in system properties and HBase conf 2024-12-07T17:51:28,573 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.log-dirs to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/yarn.nodemanager.log-dirs in system properties and HBase conf 2024-12-07T17:51:28,574 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-12-07T17:51:28,574 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.active-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/yarn.timeline-service.entity-group-fs-store.active-dir in system properties and HBase conf 2024-12-07T17:51:28,574 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.done-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/yarn.timeline-service.entity-group-fs-store.done-dir in system properties and HBase conf 2024-12-07T17:51:28,575 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-12-07T17:51:28,575 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/dfs.journalnode.edits.dir in system properties and HBase conf 2024-12-07T17:51:28,576 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.datanode.shared.file.descriptor.paths to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/dfs.datanode.shared.file.descriptor.paths in system properties and HBase conf 2024-12-07T17:51:28,576 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting nfs.dump.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/nfs.dump.dir in system properties and HBase conf 2024-12-07T17:51:28,576 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting java.io.tmpdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/java.io.tmpdir in system properties and HBase conf 2024-12-07T17:51:28,576 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/dfs.journalnode.edits.dir in system properties and HBase conf 2024-12-07T17:51:28,577 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.provided.aliasmap.inmemory.leveldb.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/dfs.provided.aliasmap.inmemory.leveldb.dir in system properties and HBase conf 2024-12-07T17:51:28,577 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting fs.s3a.committer.staging.tmp.path to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/fs.s3a.committer.staging.tmp.path in system properties and HBase conf 2024-12-07T17:51:29,000 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-12-07T17:51:29,448 WARN [Time-limited test {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 2024-12-07T17:51:29,511 INFO [Time-limited test {}] log.Log(170): Logging initialized @2067ms to org.eclipse.jetty.util.log.Slf4jLog 2024-12-07T17:51:29,571 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-07T17:51:29,632 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-07T17:51:29,652 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-07T17:51:29,652 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-07T17:51:29,653 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-07T17:51:29,664 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-07T17:51:29,666 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@746f7db{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir/,AVAILABLE} 2024-12-07T17:51:29,667 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@61b73bb3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-07T17:51:29,835 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@5599def{hdfs,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/java.io.tmpdir/jetty-localhost-36603-hadoop-hdfs-3_4_1-tests_jar-_-any-3759908163279358429/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-12-07T17:51:29,844 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@493ba8a1{HTTP/1.1, (http/1.1)}{localhost:36603} 2024-12-07T17:51:29,845 INFO [Time-limited test {}] server.Server(415): Started @2402ms 2024-12-07T17:51:29,866 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-12-07T17:51:30,298 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-07T17:51:30,304 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-07T17:51:30,305 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-07T17:51:30,305 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-07T17:51:30,305 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-07T17:51:30,306 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@208945{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir/,AVAILABLE} 2024-12-07T17:51:30,306 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@43a917ce{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-07T17:51:30,398 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@411b19f7{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/java.io.tmpdir/jetty-localhost-34759-hadoop-hdfs-3_4_1-tests_jar-_-any-13271589711756425243/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-07T17:51:30,399 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@64bb503e{HTTP/1.1, (http/1.1)}{localhost:34759} 2024-12-07T17:51:30,399 INFO [Time-limited test {}] server.Server(415): Started @2956ms 2024-12-07T17:51:30,442 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-07T17:51:30,536 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-07T17:51:30,542 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-07T17:51:30,544 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-07T17:51:30,544 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-07T17:51:30,544 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-12-07T17:51:30,545 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@443ad5c2{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir/,AVAILABLE} 2024-12-07T17:51:30,546 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@243038a3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-07T17:51:30,645 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@28637041{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/java.io.tmpdir/jetty-localhost-46575-hadoop-hdfs-3_4_1-tests_jar-_-any-16594498197670752186/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-07T17:51:30,646 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@5e18b7fd{HTTP/1.1, (http/1.1)}{localhost:46575} 2024-12-07T17:51:30,646 INFO [Time-limited test {}] server.Server(415): Started @3203ms 2024-12-07T17:51:30,648 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-07T17:51:30,679 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-07T17:51:30,682 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-07T17:51:30,684 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-07T17:51:30,684 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-07T17:51:30,684 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-12-07T17:51:30,685 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@4c49fcd3{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir/,AVAILABLE} 2024-12-07T17:51:30,685 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@47ddd06a{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-07T17:51:30,776 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@4b2b884e{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/java.io.tmpdir/jetty-localhost-45063-hadoop-hdfs-3_4_1-tests_jar-_-any-1194649370799648090/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-07T17:51:30,776 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@fa4aa4c{HTTP/1.1, (http/1.1)}{localhost:45063} 2024-12-07T17:51:30,777 INFO [Time-limited test {}] server.Server(415): Started @3333ms 2024-12-07T17:51:30,778 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-07T17:51:31,668 WARN [Thread-122 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data3/current/BP-151443317-172.17.0.2-1733593889072/current, will proceed with Du for space computation calculation, 2024-12-07T17:51:31,668 WARN [Thread-123 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data4/current/BP-151443317-172.17.0.2-1733593889072/current, will proceed with Du for space computation calculation, 2024-12-07T17:51:31,668 WARN [Thread-125 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data2/current/BP-151443317-172.17.0.2-1733593889072/current, will proceed with Du for space computation calculation, 2024-12-07T17:51:31,668 WARN [Thread-124 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data1/current/BP-151443317-172.17.0.2-1733593889072/current, will proceed with Du for space computation calculation, 2024-12-07T17:51:31,698 WARN [Thread-81 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-07T17:51:31,698 WARN [Thread-58 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-07T17:51:31,714 WARN [Thread-142 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data5/current/BP-151443317-172.17.0.2-1733593889072/current, will proceed with Du for space computation calculation, 2024-12-07T17:51:31,715 WARN [Thread-143 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data6/current/BP-151443317-172.17.0.2-1733593889072/current, will proceed with Du for space computation calculation, 2024-12-07T17:51:31,733 WARN [Thread-103 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-07T17:51:31,745 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xf3e06695161a631 with lease ID 0x65c7890b0b90c663: Processing first storage report for DS-ff468f6c-05fa-48cf-ab7a-0ad555511de2 from datanode DatanodeRegistration(127.0.0.1:39393, datanodeUuid=fc42c86a-70e9-447f-87e8-a4721a0abec3, infoPort=32833, infoSecurePort=0, ipcPort=33361, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072) 2024-12-07T17:51:31,746 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xf3e06695161a631 with lease ID 0x65c7890b0b90c663: from storage DS-ff468f6c-05fa-48cf-ab7a-0ad555511de2 node DatanodeRegistration(127.0.0.1:39393, datanodeUuid=fc42c86a-70e9-447f-87e8-a4721a0abec3, infoPort=32833, infoSecurePort=0, ipcPort=33361, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-07T17:51:31,746 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x920ca2d22a2e042e with lease ID 0x65c7890b0b90c661: Processing first storage report for DS-02ae2ff1-ec0e-45b3-8f03-11b97394b189 from datanode DatanodeRegistration(127.0.0.1:36845, datanodeUuid=0283b3f6-7dae-4e65-912c-3a2518969050, infoPort=38451, infoSecurePort=0, ipcPort=43807, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072) 2024-12-07T17:51:31,746 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x920ca2d22a2e042e with lease ID 0x65c7890b0b90c661: from storage DS-02ae2ff1-ec0e-45b3-8f03-11b97394b189 node DatanodeRegistration(127.0.0.1:36845, datanodeUuid=0283b3f6-7dae-4e65-912c-3a2518969050, infoPort=38451, infoSecurePort=0, ipcPort=43807, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-07T17:51:31,746 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xdef3a96f816651c5 with lease ID 0x65c7890b0b90c662: Processing first storage report for DS-c7dc589b-d465-42a5-a262-7236c2907e5a from datanode DatanodeRegistration(127.0.0.1:44441, datanodeUuid=eb45ebf0-fe9d-4e21-b581-49b6bcb1794f, infoPort=40139, infoSecurePort=0, ipcPort=35431, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072) 2024-12-07T17:51:31,746 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xdef3a96f816651c5 with lease ID 0x65c7890b0b90c662: from storage DS-c7dc589b-d465-42a5-a262-7236c2907e5a node DatanodeRegistration(127.0.0.1:44441, datanodeUuid=eb45ebf0-fe9d-4e21-b581-49b6bcb1794f, infoPort=40139, infoSecurePort=0, ipcPort=35431, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-07T17:51:31,747 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xf3e06695161a631 with lease ID 0x65c7890b0b90c663: Processing first storage report for DS-17c3f915-c49d-49ad-bf71-2bf9a87b6997 from datanode DatanodeRegistration(127.0.0.1:39393, datanodeUuid=fc42c86a-70e9-447f-87e8-a4721a0abec3, infoPort=32833, infoSecurePort=0, ipcPort=33361, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072) 2024-12-07T17:51:31,747 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xf3e06695161a631 with lease ID 0x65c7890b0b90c663: from storage DS-17c3f915-c49d-49ad-bf71-2bf9a87b6997 node DatanodeRegistration(127.0.0.1:39393, datanodeUuid=fc42c86a-70e9-447f-87e8-a4721a0abec3, infoPort=32833, infoSecurePort=0, ipcPort=33361, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072), blocks: 0, hasStaleStorage: false, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-07T17:51:31,747 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x920ca2d22a2e042e with lease ID 0x65c7890b0b90c661: Processing first storage report for DS-67534d12-128d-4720-ae53-791095255043 from datanode DatanodeRegistration(127.0.0.1:36845, datanodeUuid=0283b3f6-7dae-4e65-912c-3a2518969050, infoPort=38451, infoSecurePort=0, ipcPort=43807, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072) 2024-12-07T17:51:31,747 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x920ca2d22a2e042e with lease ID 0x65c7890b0b90c661: from storage DS-67534d12-128d-4720-ae53-791095255043 node DatanodeRegistration(127.0.0.1:36845, datanodeUuid=0283b3f6-7dae-4e65-912c-3a2518969050, infoPort=38451, infoSecurePort=0, ipcPort=43807, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-07T17:51:31,747 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xdef3a96f816651c5 with lease ID 0x65c7890b0b90c662: Processing first storage report for DS-0227fffc-9400-46b6-9a10-d30c32aaf700 from datanode DatanodeRegistration(127.0.0.1:44441, datanodeUuid=eb45ebf0-fe9d-4e21-b581-49b6bcb1794f, infoPort=40139, infoSecurePort=0, ipcPort=35431, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072) 2024-12-07T17:51:31,747 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xdef3a96f816651c5 with lease ID 0x65c7890b0b90c662: from storage DS-0227fffc-9400-46b6-9a10-d30c32aaf700 node DatanodeRegistration(127.0.0.1:44441, datanodeUuid=eb45ebf0-fe9d-4e21-b581-49b6bcb1794f, infoPort=40139, infoSecurePort=0, ipcPort=35431, storageInfo=lv=-57;cid=testClusterID;nsid=1184411875;c=1733593889072), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-07T17:51:31,788 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(631): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07 2024-12-07T17:51:31,799 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=157, OpenFileDescriptor=391, MaxFileDescriptor=1048576, SystemLoadAverage=79, ProcessCount=11, AvailableMemoryMB=20406 2024-12-07T17:51:31,816 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:31,819 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:31,999 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741825_1001 (size=7) 2024-12-07T17:51:32,000 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741825_1001 (size=7) 2024-12-07T17:51:32,000 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741825_1001 (size=7) 2024-12-07T17:51:32,406 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:32,406 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:32,408 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:32,418 DEBUG [Time-limited test {}] util.ClassSize(228): Using Unsafe to estimate memory layout 2024-12-07T17:51:32,436 INFO [Time-limited test {}] metrics.MetricRegistriesLoader(60): Loaded MetricRegistries class org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl 2024-12-07T17:51:32,438 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:32,447 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/testSyncRunnerIndexOverflow, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/oldWALs, maxLogs=1760 2024-12-07T17:51:32,488 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593892480 2024-12-07T17:51:32,539 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/testSyncRunnerIndexOverflow/wal.1733593892480 2024-12-07T17:51:32,575 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:40139:40139)] 2024-12-07T17:51:32,626 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:32,627 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:32,627 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:32,627 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:32,627 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:32,632 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741826_1002 (size=1293) 2024-12-07T17:51:32,632 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741826_1002 (size=1293) 2024-12-07T17:51:32,632 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741826_1002 (size=1293) 2024-12-07T17:51:32,640 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/oldWALs 2024-12-07T17:51:32,642 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733593892480) 2024-12-07T17:51:32,652 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=164 (was 157) Potentially hanging thread: sync.2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: weak-ref-cleaner-strictcontextstorage java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:155) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:176) app//io.opentelemetry.context.StrictContextStorage$PendingScopes.run(StrictContextStorage.java:269) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.0 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: LeaseRenewer:jenkins@localhost:33389 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:441) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$800(LeaseRenewer.java:77) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:336) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) - Thread LEAK? -, OpenFileDescriptor=403 (was 391) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=79 (was 79), ProcessCount=11 (was 11), AvailableMemoryMB=20347 (was 20406) 2024-12-07T17:51:32,658 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=164, OpenFileDescriptor=403, MaxFileDescriptor=1048576, SystemLoadAverage=79, ProcessCount=11, AvailableMemoryMB=20347 2024-12-07T17:51:32,683 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741827_1003 (size=7) 2024-12-07T17:51:32,686 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741827_1003 (size=7) 2024-12-07T17:51:32,686 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741827_1003 (size=7) 2024-12-07T17:51:32,688 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:32,689 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:32,692 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:32,704 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:32,704 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/testUnflushedSeqIdTracking, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/oldWALs, maxLogs=1760 2024-12-07T17:51:32,707 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593892707 2024-12-07T17:51:32,719 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/testUnflushedSeqIdTracking/wal.1733593892707 2024-12-07T17:51:32,724 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:40139:40139)] 2024-12-07T17:51:32,729 INFO [Time-limited test {}] regionserver.ChunkCreator(472): data poolSizePercentage is less than 0. So not using pool 2024-12-07T17:51:32,729 INFO [Time-limited test {}] regionserver.ChunkCreator(472): index poolSizePercentage is less than 0. So not using pool 2024-12-07T17:51:32,751 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 50a44a87351f7f47590db106066cc77f, NAME => 'testUnflushedSeqIdTracking,,1733593892727.50a44a87351f7f47590db106066cc77f.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testUnflushedSeqIdTracking', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07 2024-12-07T17:51:32,774 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741829_1005 (size=61) 2024-12-07T17:51:32,774 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741829_1005 (size=61) 2024-12-07T17:51:32,775 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741829_1005 (size=61) 2024-12-07T17:51:32,779 INFO [Time-limited test {}] throttle.StoreHotnessProtector(112): StoreHotnessProtector is disabled. Set hbase.region.store.parallel.put.limit > 0 to enable, which may help mitigate load under heavy write pressure. 2024-12-07T17:51:32,784 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testUnflushedSeqIdTracking,,1733593892727.50a44a87351f7f47590db106066cc77f.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-07T17:51:32,825 INFO [StoreOpener-50a44a87351f7f47590db106066cc77f-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 50a44a87351f7f47590db106066cc77f 2024-12-07T17:51:32,851 INFO [StoreOpener-50a44a87351f7f47590db106066cc77f-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 50a44a87351f7f47590db106066cc77f columnFamilyName b 2024-12-07T17:51:32,856 DEBUG [StoreOpener-50a44a87351f7f47590db106066cc77f-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-07T17:51:32,860 INFO [StoreOpener-50a44a87351f7f47590db106066cc77f-1 {}] regionserver.HStore(327): Store=50a44a87351f7f47590db106066cc77f/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-07T17:51:32,862 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 50a44a87351f7f47590db106066cc77f 2024-12-07T17:51:32,865 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testUnflushedSeqIdTracking/50a44a87351f7f47590db106066cc77f 2024-12-07T17:51:32,867 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testUnflushedSeqIdTracking/50a44a87351f7f47590db106066cc77f 2024-12-07T17:51:32,868 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/08c89df9-5c75-9a3f-837f-f8a3ad443a6a/data/default/testUnflushedSeqIdTracking/50a44a87351f7f47590db106066cc77f 2024-12-07T17:51:32,872 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 50a44a87351f7f47590db106066cc77f 2024-12-07T17:51:32,873 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 50a44a87351f7f47590db106066cc77f 2024-12-07T17:51:32,881 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 50a44a87351f7f47590db106066cc77f 2024-12-07T17:51:32,886 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:33389/user/jenkins/test-data/08c89df9-5c75-9a3f-837f-f8a3ad443a6a/data/default/testUnflushedSeqIdTracking/50a44a87351f7f47590db106066cc77f/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-07T17:51:32,888 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 50a44a87351f7f47590db106066cc77f; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=61828946, jitterRate=-0.07867690920829773}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-07T17:51:32,902 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 50a44a87351f7f47590db106066cc77f: Writing region info on filesystem at 1733593892803Initializing all the Stores at 1733593892806 (+3 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733593892806Cleaning up temporary data from old regions at 1733593892873 (+67 ms)Region opened successfully at 1733593892899 (+26 ms) 2024-12-07T17:51:35,937 INFO [pool-60-thread-2 {}] regionserver.HRegion(2902): Flushing 50a44a87351f7f47590db106066cc77f 1/1 column families, dataSize=24 B heapSize=352 B 2024-12-07T17:51:39,028 DEBUG [pool-60-thread-2 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/testUnflushedSeqIdTracking/50a44a87351f7f47590db106066cc77f/.tmp/b/5b80e4dd8e424ae880b0d473b930b711 is 28, key is b/b:b/1733593892925/Put/seqid=0 2024-12-07T17:51:39,041 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741830_1006 (size=4945) 2024-12-07T17:51:39,041 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741830_1006 (size=4945) 2024-12-07T17:51:39,042 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741830_1006 (size=4945) 2024-12-07T17:51:39,043 INFO [pool-60-thread-2 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=4 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/testUnflushedSeqIdTracking/50a44a87351f7f47590db106066cc77f/.tmp/b/5b80e4dd8e424ae880b0d473b930b711 2024-12-07T17:51:39,119 DEBUG [pool-60-thread-2 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/testUnflushedSeqIdTracking/50a44a87351f7f47590db106066cc77f/.tmp/b/5b80e4dd8e424ae880b0d473b930b711 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/testUnflushedSeqIdTracking/50a44a87351f7f47590db106066cc77f/b/5b80e4dd8e424ae880b0d473b930b711 2024-12-07T17:51:39,132 INFO [pool-60-thread-2 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/testUnflushedSeqIdTracking/50a44a87351f7f47590db106066cc77f/b/5b80e4dd8e424ae880b0d473b930b711, entries=1, sequenceid=4, filesize=4.8 K 2024-12-07T17:51:39,142 INFO [pool-60-thread-2 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 50a44a87351f7f47590db106066cc77f in 3208ms, sequenceid=4, compaction requested=false 2024-12-07T17:51:39,143 DEBUG [pool-60-thread-2 {}] regionserver.HRegion(2603): Flush status journal for 50a44a87351f7f47590db106066cc77f: 2024-12-07T17:51:39,143 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(193): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-12-07T17:51:39,143 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(194): Flush succeeded:true 2024-12-07T17:51:39,144 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 50a44a87351f7f47590db106066cc77f, disabling compactions & flushes 2024-12-07T17:51:39,144 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testUnflushedSeqIdTracking,,1733593892727.50a44a87351f7f47590db106066cc77f. 2024-12-07T17:51:39,144 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testUnflushedSeqIdTracking,,1733593892727.50a44a87351f7f47590db106066cc77f. 2024-12-07T17:51:39,144 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testUnflushedSeqIdTracking,,1733593892727.50a44a87351f7f47590db106066cc77f. after waiting 0 ms 2024-12-07T17:51:39,145 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testUnflushedSeqIdTracking,,1733593892727.50a44a87351f7f47590db106066cc77f. 2024-12-07T17:51:39,147 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testUnflushedSeqIdTracking,,1733593892727.50a44a87351f7f47590db106066cc77f. 2024-12-07T17:51:39,147 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 50a44a87351f7f47590db106066cc77f: Waiting for close lock at 1733593899144Disabling compacts and flushes for region at 1733593899144Disabling writes for close at 1733593899145 (+1 ms)Writing region close event to WAL at 1733593899146 (+1 ms)Closed at 1733593899146 2024-12-07T17:51:39,148 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,148 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,148 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,148 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,149 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,152 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741828_1004 (size=875) 2024-12-07T17:51:39,152 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741828_1004 (size=875) 2024-12-07T17:51:39,152 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741828_1004 (size=875) 2024-12-07T17:51:39,156 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/oldWALs 2024-12-07T17:51:39,156 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733593892707) 2024-12-07T17:51:39,164 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=180 (was 164) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:58292 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: HBase-Metrics2-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data5 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: org.apache.hadoop.hdfs.PeerCache@106f1a79 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.PeerCache.run(PeerCache.java:253) app//org.apache.hadoop.hdfs.PeerCache.access$000(PeerCache.java:46) app//org.apache.hadoop.hdfs.PeerCache$1.run(PeerCache.java:124) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:47632 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Monitor thread for TaskMonitor java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hbase.monitoring.TaskMonitor$MonitorRunnable.run(TaskMonitor.java:325) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data6 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=417 (was 403) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=72 (was 79), ProcessCount=11 (was 11), AvailableMemoryMB=20309 (was 20347) 2024-12-07T17:51:39,172 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALComparator Thread=180, OpenFileDescriptor=417, MaxFileDescriptor=1048576, SystemLoadAverage=72, ProcessCount=11, AvailableMemoryMB=20309 2024-12-07T17:51:39,186 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741831_1007 (size=7) 2024-12-07T17:51:39,187 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741831_1007 (size=7) 2024-12-07T17:51:39,187 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741831_1007 (size=7) 2024-12-07T17:51:39,189 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:39,189 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:39,191 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:39,197 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:39,198 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/91947427-8e36-f862-73bd-f66dd114430f/testWALComparator, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/91947427-8e36-f862-73bd-f66dd114430f/oldWALs, maxLogs=1760 2024-12-07T17:51:39,199 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899199 2024-12-07T17:51:39,209 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/91947427-8e36-f862-73bd-f66dd114430f/testWALComparator/wal.1733593899199 2024-12-07T17:51:39,211 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:40139:40139)] 2024-12-07T17:51:39,214 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(215): Log obtained is: FSHLog wal:(num 1733593899199) 2024-12-07T17:51:39,218 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:39,218 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=.meta, logDir=hdfs://localhost:33389/user/jenkins/test-data/91947427-8e36-f862-73bd-f66dd114430f/testWALComparator, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/91947427-8e36-f862-73bd-f66dd114430f/oldWALs, maxLogs=1760 2024-12-07T17:51:39,220 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899219.meta 2024-12-07T17:51:39,229 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/91947427-8e36-f862-73bd-f66dd114430f/testWALComparator/wal.1733593899219.meta 2024-12-07T17:51:39,234 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:40139:40139)] 2024-12-07T17:51:39,239 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,239 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,239 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,239 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,240 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,243 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741832_1008 (size=93) 2024-12-07T17:51:39,244 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741832_1008 (size=93) 2024-12-07T17:51:39,244 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741832_1008 (size=93) 2024-12-07T17:51:39,248 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/91947427-8e36-f862-73bd-f66dd114430f/oldWALs 2024-12-07T17:51:39,248 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733593899199) 2024-12-07T17:51:39,249 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,249 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,249 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,250 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,250 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,253 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741833_1009 (size=93) 2024-12-07T17:51:39,253 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741833_1009 (size=93) 2024-12-07T17:51:39,253 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741833_1009 (size=93) 2024-12-07T17:51:39,256 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/91947427-8e36-f862-73bd-f66dd114430f/oldWALs 2024-12-07T17:51:39,257 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:.meta(num 1733593899219) 2024-12-07T17:51:39,264 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALComparator Thread=190 (was 180) - Thread LEAK? -, OpenFileDescriptor=423 (was 417) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=72 (was 72), ProcessCount=11 (was 11), AvailableMemoryMB=20302 (was 20309) 2024-12-07T17:51:39,272 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=190, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=72, ProcessCount=11, AvailableMemoryMB=20301 2024-12-07T17:51:39,287 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741834_1010 (size=7) 2024-12-07T17:51:39,288 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741834_1010 (size=7) 2024-12-07T17:51:39,288 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741834_1010 (size=7) 2024-12-07T17:51:39,289 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:39,290 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:39,291 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:39,294 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(383): testFindMemStoresEligibleForFlush 2024-12-07T17:51:39,313 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:39,313 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs, maxLogs=1 2024-12-07T17:51:39,314 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899314 2024-12-07T17:51:39,324 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899314 2024-12-07T17:51:39,327 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:39,332 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899332 2024-12-07T17:51:39,340 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,341 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,341 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,341 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,341 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,341 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899314 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899332 2024-12-07T17:51:39,342 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451)] 2024-12-07T17:51:39,343 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899314 is not closed yet, will try archiving it next time 2024-12-07T17:51:39,344 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741835_1011 (size=283) 2024-12-07T17:51:39,345 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741835_1011 (size=283) 2024-12-07T17:51:39,345 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899344 2024-12-07T17:51:39,345 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741835_1011 (size=283) 2024-12-07T17:51:39,353 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,354 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,354 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,354 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,354 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,355 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899332 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899344 2024-12-07T17:51:39,356 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:39,356 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899332 is not closed yet, will try archiving it next time 2024-12-07T17:51:39,357 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 971bc54ba03fe8766e375e19da9c5fdb[cf1] 2024-12-07T17:51:39,358 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741836_1012 (size=283) 2024-12-07T17:51:39,359 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741836_1012 (size=283) 2024-12-07T17:51:39,359 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741836_1012 (size=283) 2024-12-07T17:51:39,360 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-07T17:51:39,360 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 971bc54ba03fe8766e375e19da9c5fdb[cf1] 2024-12-07T17:51:39,362 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 971bc54ba03fe8766e375e19da9c5fdb[cf1] 2024-12-07T17:51:39,362 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899362 2024-12-07T17:51:39,374 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,374 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,375 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,375 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,375 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,375 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899344 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899362 2024-12-07T17:51:39,376 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:39,376 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899344 is not closed yet, will try archiving it next time 2024-12-07T17:51:39,377 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899314 to hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs/wal.1733593899314 2024-12-07T17:51:39,378 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741837_1013 (size=283) 2024-12-07T17:51:39,378 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-07T17:51:39,378 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899378 2024-12-07T17:51:39,378 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741837_1013 (size=283) 2024-12-07T17:51:39,379 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899332 to hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs/wal.1733593899332 2024-12-07T17:51:39,380 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741837_1013 (size=283) 2024-12-07T17:51:39,381 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899344 to hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs/wal.1733593899344 2024-12-07T17:51:39,387 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,387 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,387 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,387 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,387 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,387 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899362 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899378 2024-12-07T17:51:39,388 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:38451:38451)] 2024-12-07T17:51:39,388 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899362 is not closed yet, will try archiving it next time 2024-12-07T17:51:39,389 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-07T17:51:39,390 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741838_1014 (size=93) 2024-12-07T17:51:39,390 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741838_1014 (size=93) 2024-12-07T17:51:39,390 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741838_1014 (size=93) 2024-12-07T17:51:39,391 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899362 to hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs/wal.1733593899362 2024-12-07T17:51:39,499 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899499 2024-12-07T17:51:39,511 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,511 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,511 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,511 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,512 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,512 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899378 with entries=4, filesize=465 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899499 2024-12-07T17:51:39,513 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:40139:40139)] 2024-12-07T17:51:39,513 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899378 is not closed yet, will try archiving it next time 2024-12-07T17:51:39,514 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-07T17:51:39,516 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741839_1015 (size=473) 2024-12-07T17:51:39,516 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741839_1015 (size=473) 2024-12-07T17:51:39,516 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899516 2024-12-07T17:51:39,517 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741839_1015 (size=473) 2024-12-07T17:51:39,529 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,529 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,529 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,529 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,529 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,530 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899499 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899516 2024-12-07T17:51:39,530 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:40139:40139)] 2024-12-07T17:51:39,530 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899499 is not closed yet, will try archiving it next time 2024-12-07T17:51:39,531 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 971bc54ba03fe8766e375e19da9c5fdb[cf1],bd732dca7138ef4e2b682cc40753b271[cf1] 2024-12-07T17:51:39,531 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 971bc54ba03fe8766e375e19da9c5fdb[cf1],bd732dca7138ef4e2b682cc40753b271[cf1] 2024-12-07T17:51:39,531 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899531 2024-12-07T17:51:39,532 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741840_1016 (size=283) 2024-12-07T17:51:39,533 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741840_1016 (size=283) 2024-12-07T17:51:39,533 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741840_1016 (size=283) 2024-12-07T17:51:39,534 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899378 to hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs/wal.1733593899378 2024-12-07T17:51:39,535 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899499 to hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs/wal.1733593899499 2024-12-07T17:51:39,539 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,539 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,540 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,540 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,540 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,540 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899516 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899531 2024-12-07T17:51:39,541 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:40139:40139)] 2024-12-07T17:51:39,541 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899516 is not closed yet, will try archiving it next time 2024-12-07T17:51:39,542 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-07T17:51:39,543 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741841_1017 (size=93) 2024-12-07T17:51:39,543 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741841_1017 (size=93) 2024-12-07T17:51:39,544 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741841_1017 (size=93) 2024-12-07T17:51:39,544 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899516 to hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs/wal.1733593899516 2024-12-07T17:51:39,647 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899646 2024-12-07T17:51:39,660 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,660 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,661 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,661 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,661 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,661 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899531 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899646 2024-12-07T17:51:39,663 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:39,663 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899531 is not closed yet, will try archiving it next time 2024-12-07T17:51:39,663 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-07T17:51:39,664 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899664 2024-12-07T17:51:39,664 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741842_1018 (size=283) 2024-12-07T17:51:39,665 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741842_1018 (size=283) 2024-12-07T17:51:39,665 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741842_1018 (size=283) 2024-12-07T17:51:39,666 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899531 to hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs/wal.1733593899531 2024-12-07T17:51:39,673 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,673 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,673 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,673 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,673 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,673 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899646 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899664 2024-12-07T17:51:39,676 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741843_1019 (size=93) 2024-12-07T17:51:39,677 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741843_1019 (size=93) 2024-12-07T17:51:39,677 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741843_1019 (size=93) 2024-12-07T17:51:39,678 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899646 to hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs/wal.1733593899646 2024-12-07T17:51:39,683 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451)] 2024-12-07T17:51:39,687 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899687 2024-12-07T17:51:39,695 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,696 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,696 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,696 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,696 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,696 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899664 with entries=6, filesize=709 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899687 2024-12-07T17:51:39,697 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451)] 2024-12-07T17:51:39,697 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899664 is not closed yet, will try archiving it next time 2024-12-07T17:51:39,699 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741844_1020 (size=717) 2024-12-07T17:51:39,699 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741844_1020 (size=717) 2024-12-07T17:51:39,699 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899699 2024-12-07T17:51:39,700 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741844_1020 (size=717) 2024-12-07T17:51:39,707 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,707 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,707 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,707 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,707 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,708 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899687 with entries=2, filesize=293 B; new WAL /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899699 2024-12-07T17:51:39,708 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:39,709 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:33389/user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/testFindMemStoresEligibleForFlush/wal.1733593899687 is not closed yet, will try archiving it next time 2024-12-07T17:51:39,709 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 25fe8c8b74a660ae58135d61e8c374bc[cf1,cf3,cf2] 2024-12-07T17:51:39,709 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-07T17:51:39,709 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 25fe8c8b74a660ae58135d61e8c374bc[cf3,cf2] 2024-12-07T17:51:39,710 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741845_1021 (size=301) 2024-12-07T17:51:39,710 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741845_1021 (size=301) 2024-12-07T17:51:39,710 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741845_1021 (size=301) 2024-12-07T17:51:39,712 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,712 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,712 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,712 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,712 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,715 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741846_1022 (size=93) 2024-12-07T17:51:39,715 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741846_1022 (size=93) 2024-12-07T17:51:39,715 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741846_1022 (size=93) 2024-12-07T17:51:39,721 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 3 WAL file(s) to /user/jenkins/test-data/d80c6afd-4258-a787-e158-74cbd1c28ddf/oldWALs 2024-12-07T17:51:39,721 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733593899699) 2024-12-07T17:51:39,728 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=195 (was 190) - Thread LEAK? -, OpenFileDescriptor=423 (was 423), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=75 (was 72) - SystemLoadAverage LEAK? -, ProcessCount=11 (was 11), AvailableMemoryMB=20287 (was 20301) 2024-12-07T17:51:39,735 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=195, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=75, ProcessCount=11, AvailableMemoryMB=20286 2024-12-07T17:51:39,748 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741847_1023 (size=7) 2024-12-07T17:51:39,748 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741847_1023 (size=7) 2024-12-07T17:51:39,748 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741847_1023 (size=7) 2024-12-07T17:51:39,750 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:39,751 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:39,753 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:39,759 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:39,760 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/69d6667f-c6d1-b89c-18b4-6738e5ddabf1/testRollWriterForClosedWAL, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/69d6667f-c6d1-b89c-18b4-6738e5ddabf1/testRollWriterForClosedWAL, maxLogs=1760 2024-12-07T17:51:39,761 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899761 2024-12-07T17:51:39,768 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/69d6667f-c6d1-b89c-18b4-6738e5ddabf1/testRollWriterForClosedWAL/wal.1733593899761 2024-12-07T17:51:39,773 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451)] 2024-12-07T17:51:39,774 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,774 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,775 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,775 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,775 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:39,780 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741848_1024 (size=93) 2024-12-07T17:51:39,781 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741848_1024 (size=93) 2024-12-07T17:51:39,781 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741848_1024 (size=93) 2024-12-07T17:51:39,784 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/69d6667f-c6d1-b89c-18b4-6738e5ddabf1/testRollWriterForClosedWAL 2024-12-07T17:51:39,784 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733593899761) 2024-12-07T17:51:39,791 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=200 (was 195) - Thread LEAK? -, OpenFileDescriptor=423 (was 423), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=75 (was 75), ProcessCount=11 (was 11), AvailableMemoryMB=20286 (was 20286) 2024-12-07T17:51:39,798 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=200, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=75, ProcessCount=11, AvailableMemoryMB=20286 2024-12-07T17:51:39,811 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741849_1025 (size=7) 2024-12-07T17:51:39,811 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741849_1025 (size=7) 2024-12-07T17:51:39,812 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741849_1025 (size=7) 2024-12-07T17:51:39,813 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:39,814 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:39,816 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:39,823 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:39,823 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/testMaxFlushedSequenceIdGoBackwards, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/oldWALs, maxLogs=1760 2024-12-07T17:51:39,824 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593899824 2024-12-07T17:51:39,831 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/testMaxFlushedSequenceIdGoBackwards/wal.1733593899824 2024-12-07T17:51:39,834 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:39,835 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 8a081bdbcac7d2909073b273cbb1449b, NAME => 'table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07 2024-12-07T17:51:39,845 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741851_1027 (size=40) 2024-12-07T17:51:39,846 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741851_1027 (size=40) 2024-12-07T17:51:39,846 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741851_1027 (size=40) 2024-12-07T17:51:39,847 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-07T17:51:39,849 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:39,851 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 8a081bdbcac7d2909073b273cbb1449b columnFamilyName a 2024-12-07T17:51:39,851 DEBUG [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-07T17:51:39,852 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] regionserver.HStore(327): Store=8a081bdbcac7d2909073b273cbb1449b/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-07T17:51:39,852 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:39,854 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 8a081bdbcac7d2909073b273cbb1449b columnFamilyName b 2024-12-07T17:51:39,854 DEBUG [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-07T17:51:39,855 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] regionserver.HStore(327): Store=8a081bdbcac7d2909073b273cbb1449b/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-07T17:51:39,855 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:39,857 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/table/8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:39,857 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/table/8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:39,858 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/6a79e697-8b04-ff2b-d1cd-f91e2bfb2e54/data/default/table/8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:39,859 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:39,859 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:39,861 DEBUG [Time-limited test {}] regionserver.FlushLargeStoresPolicy(65): No hbase.hregion.percolumnfamilyflush.size.lower.bound set in table table descriptor;using region.getMemStoreFlushHeapSize/# of families (64.0 M)) instead. 2024-12-07T17:51:39,863 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:39,866 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:33389/user/jenkins/test-data/6a79e697-8b04-ff2b-d1cd-f91e2bfb2e54/data/default/table/8a081bdbcac7d2909073b273cbb1449b/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-07T17:51:39,867 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 8a081bdbcac7d2909073b273cbb1449b; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=58760672, jitterRate=-0.12439775466918945}}}, FlushLargeStoresPolicy{flushSizeLowerBound=67108864} 2024-12-07T17:51:39,869 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 8a081bdbcac7d2909073b273cbb1449b: Writing region info on filesystem at 1733593899847Initializing all the Stores at 1733593899848 (+1 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733593899848Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733593899849 (+1 ms)Cleaning up temporary data from old regions at 1733593899859 (+10 ms)Region opened successfully at 1733593899869 (+10 ms) 2024-12-07T17:51:39,870 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 8a081bdbcac7d2909073b273cbb1449b, disabling compactions & flushes 2024-12-07T17:51:39,870 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. 2024-12-07T17:51:39,870 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. 2024-12-07T17:51:39,870 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. after waiting 0 ms 2024-12-07T17:51:39,870 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. 2024-12-07T17:51:39,870 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. 2024-12-07T17:51:39,871 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 8a081bdbcac7d2909073b273cbb1449b: Waiting for close lock at 1733593899870Disabling compacts and flushes for region at 1733593899870Disabling writes for close at 1733593899870Writing region close event to WAL at 1733593899870Closed at 1733593899870 2024-12-07T17:51:40,241 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 8a081bdbcac7d2909073b273cbb1449b, NAME => 'table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b.', STARTKEY => '', ENDKEY => ''} 2024-12-07T17:51:40,258 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,259 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-07T17:51:40,260 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,261 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,264 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,265 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 8a081bdbcac7d2909073b273cbb1449b columnFamilyName a 2024-12-07T17:51:40,265 DEBUG [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-07T17:51:40,266 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] regionserver.HStore(327): Store=8a081bdbcac7d2909073b273cbb1449b/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-07T17:51:40,266 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,268 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 8a081bdbcac7d2909073b273cbb1449b columnFamilyName b 2024-12-07T17:51:40,268 DEBUG [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-07T17:51:40,269 INFO [StoreOpener-8a081bdbcac7d2909073b273cbb1449b-1 {}] regionserver.HStore(327): Store=8a081bdbcac7d2909073b273cbb1449b/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-07T17:51:40,269 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,270 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/table/8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,271 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/table/8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,273 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/6a79e697-8b04-ff2b-d1cd-f91e2bfb2e54/data/default/table/8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,274 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,274 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,277 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,278 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 8a081bdbcac7d2909073b273cbb1449b; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=60064295, jitterRate=-0.10497225821018219}}}, org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL$FlushSpecificStoresPolicy@2aa6376b 2024-12-07T17:51:40,278 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for 8a081bdbcac7d2909073b273cbb1449b 2024-12-07T17:51:40,280 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 8a081bdbcac7d2909073b273cbb1449b: Running coprocessor pre-open hook at 1733593900261Writing region info on filesystem at 1733593900261Initializing all the Stores at 1733593900263 (+2 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733593900263Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733593900263Cleaning up temporary data from old regions at 1733593900274 (+11 ms)Running coprocessor post-open hooks at 1733593900278 (+4 ms)Region opened successfully at 1733593900280 (+2 ms) 2024-12-07T17:51:43,299 INFO [pool-78-thread-1 {}] regionserver.HRegion(2902): Flushing 8a081bdbcac7d2909073b273cbb1449b 2/2 column families, dataSize=96 B heapSize=896 B 2024-12-07T17:51:45,777 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-12-07T17:51:46,326 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/a/c991df7dcce342348d7a7546cbd3a6e9 is 28, key is a/a:a/1733593900287/Put/seqid=0 2024-12-07T17:51:46,335 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741852_1028 (size=4945) 2024-12-07T17:51:46,335 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741852_1028 (size=4945) 2024-12-07T17:51:46,335 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741852_1028 (size=4945) 2024-12-07T17:51:46,335 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/a/c991df7dcce342348d7a7546cbd3a6e9 2024-12-07T17:51:46,358 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/b/d5317cd77350435cb92a426ca5a471ff is 28, key is a/b:b/1733593900287/Put/seqid=0 2024-12-07T17:51:46,366 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741853_1029 (size=4945) 2024-12-07T17:51:46,366 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741853_1029 (size=4945) 2024-12-07T17:51:46,366 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741853_1029 (size=4945) 2024-12-07T17:51:46,367 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/b/d5317cd77350435cb92a426ca5a471ff 2024-12-07T17:51:46,376 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/a/c991df7dcce342348d7a7546cbd3a6e9 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/a/c991df7dcce342348d7a7546cbd3a6e9 2024-12-07T17:51:46,384 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/a/c991df7dcce342348d7a7546cbd3a6e9, entries=1, sequenceid=6, filesize=4.8 K 2024-12-07T17:51:46,386 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/b/d5317cd77350435cb92a426ca5a471ff as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/b/d5317cd77350435cb92a426ca5a471ff 2024-12-07T17:51:46,395 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/b/d5317cd77350435cb92a426ca5a471ff, entries=1, sequenceid=6, filesize=4.8 K 2024-12-07T17:51:46,397 INFO [pool-78-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~96 B/96, heapSize ~864 B/864, currentSize=0 B/0 for 8a081bdbcac7d2909073b273cbb1449b in 3098ms, sequenceid=6, compaction requested=false 2024-12-07T17:51:46,397 DEBUG [pool-78-thread-1 {}] regionserver.HRegion(2603): Flush status journal for 8a081bdbcac7d2909073b273cbb1449b: 2024-12-07T17:51:46,397 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(676): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-12-07T17:51:46,397 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(677): Flush succeeded:true 2024-12-07T17:51:46,402 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 8a081bdbcac7d2909073b273cbb1449b 1/2 column families, dataSize=48 B heapSize=704 B; a={dataSize=24 B, heapSize=352 B, offHeapSize=0 B} 2024-12-07T17:51:46,409 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/a/1caa743b2da64e76bbd9b6f9faf97883 is 28, key is a/a:a/1733593900287/Put/seqid=0 2024-12-07T17:51:46,417 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741854_1030 (size=4945) 2024-12-07T17:51:46,417 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741854_1030 (size=4945) 2024-12-07T17:51:46,417 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741854_1030 (size=4945) 2024-12-07T17:51:46,418 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=10 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/a/1caa743b2da64e76bbd9b6f9faf97883 2024-12-07T17:51:46,428 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/a/1caa743b2da64e76bbd9b6f9faf97883 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/a/1caa743b2da64e76bbd9b6f9faf97883 2024-12-07T17:51:46,438 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/a/1caa743b2da64e76bbd9b6f9faf97883, entries=1, sequenceid=10, filesize=4.8 K 2024-12-07T17:51:46,440 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=24 B/24 for 8a081bdbcac7d2909073b273cbb1449b in 38ms, sequenceid=10, compaction requested=false 2024-12-07T17:51:46,440 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for 8a081bdbcac7d2909073b273cbb1449b: 2024-12-07T17:51:46,442 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 8a081bdbcac7d2909073b273cbb1449b, disabling compactions & flushes 2024-12-07T17:51:46,442 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. 2024-12-07T17:51:46,442 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. 2024-12-07T17:51:46,442 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. after waiting 0 ms 2024-12-07T17:51:46,442 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. 2024-12-07T17:51:46,442 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 8a081bdbcac7d2909073b273cbb1449b 2/2 column families, dataSize=24 B heapSize=608 B 2024-12-07T17:51:46,449 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/b/bc7ea363402a49249174879fdfb85e0c is 28, key is a/b:b/1733593900287/Put/seqid=0 2024-12-07T17:51:46,457 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741855_1031 (size=4945) 2024-12-07T17:51:46,457 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741855_1031 (size=4945) 2024-12-07T17:51:46,457 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741855_1031 (size=4945) 2024-12-07T17:51:46,458 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=13 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/b/bc7ea363402a49249174879fdfb85e0c 2024-12-07T17:51:46,468 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/.tmp/b/bc7ea363402a49249174879fdfb85e0c as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/b/bc7ea363402a49249174879fdfb85e0c 2024-12-07T17:51:46,480 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/8a081bdbcac7d2909073b273cbb1449b/b/bc7ea363402a49249174879fdfb85e0c, entries=1, sequenceid=13, filesize=4.8 K 2024-12-07T17:51:46,482 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 8a081bdbcac7d2909073b273cbb1449b in 40ms, sequenceid=13, compaction requested=false 2024-12-07T17:51:46,489 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:33389/user/jenkins/test-data/6a79e697-8b04-ff2b-d1cd-f91e2bfb2e54/data/default/table/8a081bdbcac7d2909073b273cbb1449b/recovered.edits/16.seqid, newMaxSeqId=16, maxSeqId=1 2024-12-07T17:51:46,490 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. 2024-12-07T17:51:46,490 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 8a081bdbcac7d2909073b273cbb1449b: Waiting for close lock at 1733593906442Running coprocessor pre-close hooks at 1733593906442Disabling compacts and flushes for region at 1733593906442Disabling writes for close at 1733593906442Obtaining lock to block concurrent updates at 1733593906442Preparing flush snapshotting stores in 8a081bdbcac7d2909073b273cbb1449b at 1733593906442Finished memstore snapshotting table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b., syncing WAL and waiting on mvcc, flushsize=dataSize=24, getHeapSize=576, getOffHeapSize=0, getCellsCount=1 at 1733593906443 (+1 ms)Flushing stores of table,,1733593899835.8a081bdbcac7d2909073b273cbb1449b. at 1733593906444 (+1 ms)Flushing 8a081bdbcac7d2909073b273cbb1449b/b: creating writer at 1733593906445 (+1 ms)Flushing 8a081bdbcac7d2909073b273cbb1449b/b: appending metadata at 1733593906449 (+4 ms)Flushing 8a081bdbcac7d2909073b273cbb1449b/b: closing flushed file at 1733593906449Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@54fc0017: reopening flushed file at 1733593906467 (+18 ms)Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 8a081bdbcac7d2909073b273cbb1449b in 40ms, sequenceid=13, compaction requested=false at 1733593906482 (+15 ms)Writing region close event to WAL at 1733593906483 (+1 ms)Running coprocessor post-close hooks at 1733593906490 (+7 ms)Closed at 1733593906490 2024-12-07T17:51:46,491 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:46,491 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:46,491 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:46,491 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:46,491 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:46,494 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741850_1026 (size=2357) 2024-12-07T17:51:46,494 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741850_1026 (size=2357) 2024-12-07T17:51:46,494 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741850_1026 (size=2357) 2024-12-07T17:51:46,497 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/oldWALs 2024-12-07T17:51:46,497 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733593899824) 2024-12-07T17:51:46,503 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=208 (was 200) Potentially hanging thread: Timer for 'HBase' metrics system java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.util.TimerThread.mainLoop(Timer.java:563) java.base@17.0.11/java.util.TimerThread.run(Timer.java:516) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:42180 [Waiting for operation #7] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:36470 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:40570 [Waiting for operation #9] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=425 (was 423) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=68 (was 75), ProcessCount=11 (was 11), AvailableMemoryMB=20244 (was 20286) 2024-12-07T17:51:46,510 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=208, OpenFileDescriptor=425, MaxFileDescriptor=1048576, SystemLoadAverage=68, ProcessCount=11, AvailableMemoryMB=20243 2024-12-07T17:51:46,522 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741856_1032 (size=7) 2024-12-07T17:51:46,522 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741856_1032 (size=7) 2024-12-07T17:51:46,523 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741856_1032 (size=7) 2024-12-07T17:51:46,524 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:46,524 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:46,526 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:46,536 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(244): No decryptEncryptedDataEncryptionKey method in DFSClient, should be hadoop version with HDFS-12396 java.lang.NoSuchMethodException: org.apache.hadoop.hdfs.DFSClient.decryptEncryptedDataEncryptionKey(org.apache.hadoop.fs.FileEncryptionInfo) at java.lang.Class.getDeclaredMethod(Class.java:2675) ~[?:?] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelperWithoutHDFS12396(FanOutOneBlockAsyncDFSOutputSaslHelper.java:183) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelper(FanOutOneBlockAsyncDFSOutputSaslHelper.java:242) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.(FanOutOneBlockAsyncDFSOutputSaslHelper.java:253) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at java.lang.Class.forName0(Native Method) ~[?:?] at java.lang.Class.forName(Class.java:375) ~[?:?] at org.apache.hadoop.hbase.wal.AsyncFSWALProvider.load(AsyncFSWALProvider.java:150) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.getProviderClass(WALFactory.java:174) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:262) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:214) ~[classes/:?] at org.apache.hadoop.hbase.HBaseTestingUtil.createWal(HBaseTestingUtil.java:2160) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2205) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2169) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFlushSequenceIdIsGreaterThanAllEditsInHFile(AbstractTestFSWAL.java:425) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-07T17:51:46,540 INFO [Time-limited test {}] wal.WALFactory(196): Instantiating WALProvider of type class org.apache.hadoop.hbase.wal.AsyncFSWALProvider 2024-12-07T17:51:46,545 DEBUG [Time-limited test {}] channel.MultithreadEventLoopGroup(44): -Dio.netty.eventLoopThreads: 16 2024-12-07T17:51:46,558 DEBUG [Time-limited test {}] nio.NioEventLoop(110): -Dio.netty.noKeySetOptimization: false 2024-12-07T17:51:46,558 DEBUG [Time-limited test {}] nio.NioEventLoop(111): -Dio.netty.selectorAutoRebuildThreshold: 512 2024-12-07T17:51:46,570 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor defaultMonitorName 2024-12-07T17:51:46,573 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:46,573 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=hregion-51555920, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/WALs/hregion-51555920, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/oldWALs, maxLogs=1760 2024-12-07T17:51:46,588 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputHelper(524): When create output stream for /user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/WALs/hregion-51555920/hregion-51555920.1733593906574, exclude list is [], retry=0 2024-12-07T17:51:46,598 DEBUG [Time-limited test {}] channel.DefaultChannelId(84): -Dio.netty.processId: 26153 (auto-detected) 2024-12-07T17:51:46,600 DEBUG [Time-limited test {}] channel.DefaultChannelId(106): -Dio.netty.machineId: 02:42:ac:ff:fe:11:00:02 (auto-detected) 2024-12-07T17:51:46,618 DEBUG [AsyncFSWAL-1-2 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:36845,DS-02ae2ff1-ec0e-45b3-8f03-11b97394b189,DISK] 2024-12-07T17:51:46,618 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:44441,DS-c7dc589b-d465-42a5-a262-7236c2907e5a,DISK] 2024-12-07T17:51:46,618 DEBUG [AsyncFSWAL-1-3 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:39393,DS-ff468f6c-05fa-48cf-ab7a-0ad555511de2,DISK] 2024-12-07T17:51:46,621 DEBUG [AsyncFSWAL-1-3 {}] asyncfs.ProtobufDecoder(117): Hadoop 3.3 and above shades protobuf. 2024-12-07T17:51:46,649 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/WALs/hregion-51555920/hregion-51555920.1733593906574 2024-12-07T17:51:46,650 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new AsyncFSWAL writer with pipeline: [(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:46,650 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => a94522fbd337ae7ddc0e0963689a18e4, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testFlushSequenceIdIsGreaterThanAllEditsInHFile', {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc 2024-12-07T17:51:46,660 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741858_1034 (size=82) 2024-12-07T17:51:46,660 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741858_1034 (size=82) 2024-12-07T17:51:46,660 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741858_1034 (size=82) 2024-12-07T17:51:46,661 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-07T17:51:46,663 INFO [StoreOpener-a94522fbd337ae7ddc0e0963689a18e4-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,665 INFO [StoreOpener-a94522fbd337ae7ddc0e0963689a18e4-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region a94522fbd337ae7ddc0e0963689a18e4 columnFamilyName f 2024-12-07T17:51:46,665 DEBUG [StoreOpener-a94522fbd337ae7ddc0e0963689a18e4-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-07T17:51:46,666 INFO [StoreOpener-a94522fbd337ae7ddc0e0963689a18e4-1 {}] regionserver.HStore(327): Store=a94522fbd337ae7ddc0e0963689a18e4/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-07T17:51:46,666 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,667 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,668 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,668 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,669 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,669 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,671 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,674 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-07T17:51:46,675 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened a94522fbd337ae7ddc0e0963689a18e4; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=69822819, jitterRate=0.040441080927848816}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-07T17:51:46,680 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for a94522fbd337ae7ddc0e0963689a18e4: Writing region info on filesystem at 1733593906661Initializing all the Stores at 1733593906663 (+2 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733593906663Cleaning up temporary data from old regions at 1733593906669 (+6 ms)Region opened successfully at 1733593906680 (+11 ms) 2024-12-07T17:51:46,680 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing a94522fbd337ae7ddc0e0963689a18e4, disabling compactions & flushes 2024-12-07T17:51:46,680 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4. 2024-12-07T17:51:46,680 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4. 2024-12-07T17:51:46,680 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4. after waiting 0 ms 2024-12-07T17:51:46,681 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4. 2024-12-07T17:51:46,681 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4. 2024-12-07T17:51:46,681 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for a94522fbd337ae7ddc0e0963689a18e4: Waiting for close lock at 1733593906680Disabling compacts and flushes for region at 1733593906680Disabling writes for close at 1733593906680Writing region close event to WAL at 1733593906681 (+1 ms)Closed at 1733593906681 2024-12-07T17:51:46,688 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741857_1033 (size=93) 2024-12-07T17:51:46,688 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741857_1033 (size=93) 2024-12-07T17:51:46,688 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741857_1033 (size=93) 2024-12-07T17:51:46,691 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/oldWALs 2024-12-07T17:51:46,691 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: AsyncFSWAL hregion-51555920:(num 1733593906574) 2024-12-07T17:51:46,694 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:46,694 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/testFlushSequenceIdIsGreaterThanAllEditsInHFile, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/testFlushSequenceIdIsGreaterThanAllEditsInHFile, maxLogs=1760 2024-12-07T17:51:46,695 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593906695 2024-12-07T17:51:46,702 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/testFlushSequenceIdIsGreaterThanAllEditsInHFile/wal.1733593906695 2024-12-07T17:51:46,705 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new writer with pipeline: [(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833),(127.0.0.1/127.0.0.1:40139:40139)] 2024-12-07T17:51:46,706 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:46,707 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => a94522fbd337ae7ddc0e0963689a18e4, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4.', STARTKEY => '', ENDKEY => ''} 2024-12-07T17:51:46,708 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-07T17:51:46,708 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,708 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,710 INFO [StoreOpener-a94522fbd337ae7ddc0e0963689a18e4-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,712 INFO [StoreOpener-a94522fbd337ae7ddc0e0963689a18e4-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region a94522fbd337ae7ddc0e0963689a18e4 columnFamilyName f 2024-12-07T17:51:46,712 DEBUG [StoreOpener-a94522fbd337ae7ddc0e0963689a18e4-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-07T17:51:46,713 INFO [StoreOpener-a94522fbd337ae7ddc0e0963689a18e4-1 {}] regionserver.HStore(327): Store=a94522fbd337ae7ddc0e0963689a18e4/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-07T17:51:46,713 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,714 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,714 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,716 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,717 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,717 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,719 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for a94522fbd337ae7ddc0e0963689a18e4 2024-12-07T17:51:46,720 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened a94522fbd337ae7ddc0e0963689a18e4; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=70700125, jitterRate=0.05351395905017853}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-07T17:51:46,722 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for a94522fbd337ae7ddc0e0963689a18e4: Writing region info on filesystem at 1733593906708Initializing all the Stores at 1733593906710 (+2 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733593906710Cleaning up temporary data from old regions at 1733593906717 (+7 ms)Region opened successfully at 1733593906722 (+5 ms) 2024-12-07T17:51:46,736 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.log.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir so I do NOT create it in target/test-data/a561e03b-c918-d3ce-829c-d9c740b144dc 2024-12-07T17:51:46,736 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.log.dir property value differs in configuration and system: Configuration=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/../logs while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir Erasing configuration value by system value. 2024-12-07T17:51:46,736 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.tmp.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.tmp.dir so I do NOT create it in target/test-data/a561e03b-c918-d3ce-829c-d9c740b144dc 2024-12-07T17:51:46,736 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.tmp.dir property value differs in configuration and system: Configuration=/tmp/hadoop-jenkins while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.tmp.dir Erasing configuration value by system value. 2024-12-07T17:51:46,736 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/a561e03b-c918-d3ce-829c-d9c740b144dc 2024-12-07T17:51:46,763 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing a94522fbd337ae7ddc0e0963689a18e4 1/1 column families, dataSize=1.14 KB heapSize=2.13 KB 2024-12-07T17:51:46,863 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:46,964 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:47,065 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:47,166 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:47,267 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:47,367 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:47,468 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:47,569 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:47,670 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:47,771 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:47,872 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:47,940 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4/.tmp/f/055d2ff9c03b40ba823eee5b4d562eb2 is 121, key is testFlushSequenceIdIsGreaterThanAllEditsInHFile/f:x0/1733593906736/Put/seqid=0 2024-12-07T17:51:47,947 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741860_1036 (size=6333) 2024-12-07T17:51:47,947 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741860_1036 (size=6333) 2024-12-07T17:51:47,947 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741860_1036 (size=6333) 2024-12-07T17:51:47,948 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=1.14 KB at sequenceid=23 (bloomFilter=true), to=hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4/.tmp/f/055d2ff9c03b40ba823eee5b4d562eb2 2024-12-07T17:51:47,961 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4/.tmp/f/055d2ff9c03b40ba823eee5b4d562eb2 as hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4/f/055d2ff9c03b40ba823eee5b4d562eb2 2024-12-07T17:51:47,972 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/a94522fbd337ae7ddc0e0963689a18e4/f/055d2ff9c03b40ba823eee5b4d562eb2, entries=10, sequenceid=23, filesize=6.2 K 2024-12-07T17:51:48,073 DEBUG [FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-07T17:51:48,074 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~1.14 KB/1170, heapSize ~2.11 KB/2160, currentSize=0 B/0 for a94522fbd337ae7ddc0e0963689a18e4 in 1311ms, sequenceid=23, compaction requested=false 2024-12-07T17:51:48,074 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for a94522fbd337ae7ddc0e0963689a18e4: 2024-12-07T17:51:48,074 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing a94522fbd337ae7ddc0e0963689a18e4, disabling compactions & flushes 2024-12-07T17:51:48,074 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4. 2024-12-07T17:51:48,074 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4. 2024-12-07T17:51:48,074 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4. after waiting 0 ms 2024-12-07T17:51:48,074 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4. 2024-12-07T17:51:48,076 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733593906529.a94522fbd337ae7ddc0e0963689a18e4. 2024-12-07T17:51:48,076 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for a94522fbd337ae7ddc0e0963689a18e4: Waiting for close lock at 1733593908074Disabling compacts and flushes for region at 1733593908074Disabling writes for close at 1733593908074Writing region close event to WAL at 1733593908076 (+2 ms)Closed at 1733593908076 2024-12-07T17:51:48,076 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,077 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,077 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,077 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,077 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,080 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741859_1035 (size=16547) 2024-12-07T17:51:48,080 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741859_1035 (size=16547) 2024-12-07T17:51:48,081 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741859_1035 (size=16547) 2024-12-07T17:51:48,084 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/4ea7d11b-8eab-88f5-8daf-4f733a69725e/testFlushSequenceIdIsGreaterThanAllEditsInHFile 2024-12-07T17:51:48,084 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: wal:(num 1733593906695) 2024-12-07T17:51:48,092 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=216 (was 208) Potentially hanging thread: AsyncFSWAL-1-1 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_590986707_22 at /127.0.0.1:42180 [Waiting for operation #8] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-3 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-2 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:36470 [Waiting for operation #4] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_590986707_22 at /127.0.0.1:40570 [Waiting for operation #11] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=457 (was 425) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=68 (was 68), ProcessCount=11 (was 11), AvailableMemoryMB=20209 (was 20243) 2024-12-07T17:51:48,099 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=216, OpenFileDescriptor=457, MaxFileDescriptor=1048576, SystemLoadAverage=68, ProcessCount=11, AvailableMemoryMB=20209 2024-12-07T17:51:48,108 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741861_1037 (size=7) 2024-12-07T17:51:48,109 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741861_1037 (size=7) 2024-12-07T17:51:48,109 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741861_1037 (size=7) 2024-12-07T17:51:48,110 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:48,110 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:48,112 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:48,116 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:48,116 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/oldWALs, maxLogs=1760 2024-12-07T17:51:48,117 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593908117 2024-12-07T17:51:48,124 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908117 2024-12-07T17:51:48,131 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:48,135 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593908135 2024-12-07T17:51:48,143 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593908136 2024-12-07T17:51:48,147 WARN [Time-limited test {}] wal.AbstractProtobufLogWriter(199): Init output failed, path=hdfs://localhost:33389/user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908136 java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-12-07T17:51:48,149 DEBUG [Time-limited test {}] wal.FSHLogProvider(93): Error instantiating log writer. java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-12-07T17:51:48,159 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=243 (was 216) Potentially hanging thread: PacketResponder: BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:36845, 127.0.0.1:39393] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: FSHLog-0-hdfs://localhost:33389/user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e-prefix:default java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: PacketResponder: BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:40656 [Receiving block BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: PacketResponder: BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:44441] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908135 block BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: PacketResponder: BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:39393] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:36562 [Receiving block BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:36845, 127.0.0.1:44441] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:42258 [Receiving block BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:36548 [Receiving block BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:40650 [Receiving block BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:42264 [Receiving block BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908117 block BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) - Thread LEAK? -, OpenFileDescriptor=483 (was 457) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=68 (was 68), ProcessCount=11 (was 11), AvailableMemoryMB=20204 (was 20209) 2024-12-07T17:51:48,167 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=243, OpenFileDescriptor=483, MaxFileDescriptor=1048576, SystemLoadAverage=68, ProcessCount=11, AvailableMemoryMB=20204 2024-12-07T17:51:48,180 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741864_1040 (size=7) 2024-12-07T17:51:48,180 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741864_1040 (size=7) 2024-12-07T17:51:48,180 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741864_1040 (size=7) 2024-12-07T17:51:48,182 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:48,182 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:48,183 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:48,188 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:48,188 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/705393e6-d6d5-3c64-a96f-fc5e04a5b1f3/testWALCoprocessorLoaded, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/705393e6-d6d5-3c64-a96f-fc5e04a5b1f3/oldWALs, maxLogs=1760 2024-12-07T17:51:48,189 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593908189 2024-12-07T17:51:48,227 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/705393e6-d6d5-3c64-a96f-fc5e04a5b1f3/testWALCoprocessorLoaded/wal.1733593908189 2024-12-07T17:51:48,229 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:48,231 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,231 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,232 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,232 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,232 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,237 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741865_1041 (size=93) 2024-12-07T17:51:48,237 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741865_1041 (size=93) 2024-12-07T17:51:48,238 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741865_1041 (size=93) 2024-12-07T17:51:48,242 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/705393e6-d6d5-3c64-a96f-fc5e04a5b1f3/oldWALs 2024-12-07T17:51:48,242 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733593908189) 2024-12-07T17:51:48,251 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=248 (was 243) - Thread LEAK? -, OpenFileDescriptor=495 (was 483) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=68 (was 68), ProcessCount=11 (was 11), AvailableMemoryMB=20199 (was 20204) 2024-12-07T17:51:48,258 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=248, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=68, ProcessCount=11, AvailableMemoryMB=20198 2024-12-07T17:51:48,268 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741866_1042 (size=7) 2024-12-07T17:51:48,269 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741866_1042 (size=7) 2024-12-07T17:51:48,269 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741866_1042 (size=7) 2024-12-07T17:51:48,275 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:48,275 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:48,277 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:48,282 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:48,282 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/60073843-150d-bb38-c4b3-5b26929abadc/testSyncNoAppend, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/60073843-150d-bb38-c4b3-5b26929abadc/testSyncNoAppend, maxLogs=1760 2024-12-07T17:51:48,283 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593908283 2024-12-07T17:51:48,290 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/60073843-150d-bb38-c4b3-5b26929abadc/testSyncNoAppend/wal.1733593908283 2024-12-07T17:51:48,292 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:48,293 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,293 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,293 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,293 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,294 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,296 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741867_1043 (size=93) 2024-12-07T17:51:48,296 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741867_1043 (size=93) 2024-12-07T17:51:48,296 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741867_1043 (size=93) 2024-12-07T17:51:48,298 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/60073843-150d-bb38-c4b3-5b26929abadc/testSyncNoAppend 2024-12-07T17:51:48,298 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733593908283) 2024-12-07T17:51:48,306 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=253 (was 248) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=68 (was 68), ProcessCount=11 (was 11), AvailableMemoryMB=20195 (was 20198) 2024-12-07T17:51:48,315 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=253, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=68, ProcessCount=11, AvailableMemoryMB=20194 2024-12-07T17:51:48,327 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741868_1044 (size=7) 2024-12-07T17:51:48,328 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741868_1044 (size=7) 2024-12-07T17:51:48,328 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741868_1044 (size=7) 2024-12-07T17:51:48,329 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:48,329 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:48,331 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:48,336 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:48,336 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/42607835-8367-b655-d405-6f4fe80d0186/testWriteEntryCanBeNull, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/42607835-8367-b655-d405-6f4fe80d0186/testWriteEntryCanBeNull, maxLogs=1760 2024-12-07T17:51:48,337 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593908337 2024-12-07T17:51:48,347 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/42607835-8367-b655-d405-6f4fe80d0186/testWriteEntryCanBeNull/wal.1733593908337 2024-12-07T17:51:48,348 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:48,349 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,349 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,350 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,350 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,350 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:48,353 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741869_1045 (size=93) 2024-12-07T17:51:48,353 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741869_1045 (size=93) 2024-12-07T17:51:48,353 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741869_1045 (size=93) 2024-12-07T17:51:48,356 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/42607835-8367-b655-d405-6f4fe80d0186/testWriteEntryCanBeNull 2024-12-07T17:51:48,356 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733593908337) 2024-12-07T17:51:48,368 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=258 (was 253) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=68 (was 68), ProcessCount=11 (was 11), AvailableMemoryMB=20190 (was 20194) 2024-12-07T17:51:48,377 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=258, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=68, ProcessCount=11, AvailableMemoryMB=20189 2024-12-07T17:51:48,390 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741870_1046 (size=7) 2024-12-07T17:51:48,390 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741870_1046 (size=7) 2024-12-07T17:51:48,390 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741870_1046 (size=7) 2024-12-07T17:51:48,391 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc with version=8 2024-12-07T17:51:48,392 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:48,393 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-07T17:51:48,399 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-07T17:51:48,399 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/testUnflushedSeqIdTrackingWithAsyncWal, archiveDir=hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/oldWALs, maxLogs=1760 2024-12-07T17:51:48,400 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733593908400 2024-12-07T17:51:48,407 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/testUnflushedSeqIdTrackingWithAsyncWal/wal.1733593908400 2024-12-07T17:51:48,411 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40139:40139),(127.0.0.1/127.0.0.1:38451:38451),(127.0.0.1/127.0.0.1:32833:32833)] 2024-12-07T17:51:48,414 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => eb59d98570d2d23170674a6ed46d2f17, NAME => 'table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07 2024-12-07T17:51:48,426 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741872_1048 (size=40) 2024-12-07T17:51:48,426 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741872_1048 (size=40) 2024-12-07T17:51:48,427 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741872_1048 (size=40) 2024-12-07T17:51:48,427 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-07T17:51:48,429 INFO [StoreOpener-eb59d98570d2d23170674a6ed46d2f17-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,432 INFO [StoreOpener-eb59d98570d2d23170674a6ed46d2f17-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region eb59d98570d2d23170674a6ed46d2f17 columnFamilyName b 2024-12-07T17:51:48,432 DEBUG [StoreOpener-eb59d98570d2d23170674a6ed46d2f17-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-07T17:51:48,432 INFO [StoreOpener-eb59d98570d2d23170674a6ed46d2f17-1 {}] regionserver.HStore(327): Store=eb59d98570d2d23170674a6ed46d2f17/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-07T17:51:48,433 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,434 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/table/eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,435 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/table/eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,435 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/e29ca86f-d83d-302a-aa30-d25b8ee786e8/data/default/table/eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,436 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,436 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,438 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,442 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:33389/user/jenkins/test-data/e29ca86f-d83d-302a-aa30-d25b8ee786e8/data/default/table/eb59d98570d2d23170674a6ed46d2f17/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-07T17:51:48,443 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened eb59d98570d2d23170674a6ed46d2f17; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=64350907, jitterRate=-0.0410967618227005}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-07T17:51:48,446 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for eb59d98570d2d23170674a6ed46d2f17: Writing region info on filesystem at 1733593908427Initializing all the Stores at 1733593908429 (+2 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733593908429Cleaning up temporary data from old regions at 1733593908436 (+7 ms)Region opened successfully at 1733593908445 (+9 ms) 2024-12-07T17:51:48,446 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing eb59d98570d2d23170674a6ed46d2f17, disabling compactions & flushes 2024-12-07T17:51:48,446 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. 2024-12-07T17:51:48,446 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. 2024-12-07T17:51:48,446 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. after waiting 0 ms 2024-12-07T17:51:48,446 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. 2024-12-07T17:51:48,447 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. 2024-12-07T17:51:48,448 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for eb59d98570d2d23170674a6ed46d2f17: Waiting for close lock at 1733593908446Disabling compacts and flushes for region at 1733593908446Disabling writes for close at 1733593908446Writing region close event to WAL at 1733593908447 (+1 ms)Closed at 1733593908447 2024-12-07T17:51:48,449 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => eb59d98570d2d23170674a6ed46d2f17, NAME => 'table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17.', STARTKEY => '', ENDKEY => ''} 2024-12-07T17:51:48,450 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,450 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-07T17:51:48,450 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,450 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,453 INFO [StoreOpener-eb59d98570d2d23170674a6ed46d2f17-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,455 INFO [StoreOpener-eb59d98570d2d23170674a6ed46d2f17-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region eb59d98570d2d23170674a6ed46d2f17 columnFamilyName b 2024-12-07T17:51:48,455 DEBUG [StoreOpener-eb59d98570d2d23170674a6ed46d2f17-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-07T17:51:48,456 INFO [StoreOpener-eb59d98570d2d23170674a6ed46d2f17-1 {}] regionserver.HStore(327): Store=eb59d98570d2d23170674a6ed46d2f17/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-07T17:51:48,456 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,457 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/table/eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,458 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/data/default/table/eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,459 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:33389/user/jenkins/test-data/e29ca86f-d83d-302a-aa30-d25b8ee786e8/data/default/table/eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,460 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,460 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,463 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,464 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened eb59d98570d2d23170674a6ed46d2f17; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=66345387, jitterRate=-0.011376693844795227}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-07T17:51:48,464 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for eb59d98570d2d23170674a6ed46d2f17 2024-12-07T17:51:48,466 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for eb59d98570d2d23170674a6ed46d2f17: Running coprocessor pre-open hook at 1733593908450Writing region info on filesystem at 1733593908450Initializing all the Stores at 1733593908452 (+2 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733593908452Cleaning up temporary data from old regions at 1733593908460 (+8 ms)Running coprocessor post-open hooks at 1733593908464 (+4 ms)Region opened successfully at 1733593908466 (+2 ms) 2024-12-07T17:51:49,025 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor 2024-12-07T17:51:49,026 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor Metrics about HBase WALObservers 2024-12-07T17:51:49,027 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=TableRequests_Namespace_default_table_table 2024-12-07T17:51:49,027 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=TableRequests_Namespace_default_table_table Metrics about Tables on a single HBase RegionServer 2024-12-07T17:51:51,474 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1722): Closing eb59d98570d2d23170674a6ed46d2f17, disabling compactions & flushes 2024-12-07T17:51:51,475 INFO [pool-106-thread-1 {}] regionserver.HRegion(1755): Closing region table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. 2024-12-07T17:51:51,475 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. 2024-12-07T17:51:51,475 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1843): Acquired close lock on table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. after waiting 0 ms 2024-12-07T17:51:51,475 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1853): Updates disabled for region table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. 2024-12-07T17:51:51,477 INFO [pool-106-thread-1 {}] regionserver.HRegion(2902): Flushing eb59d98570d2d23170674a6ed46d2f17 1/1 column families, dataSize=48 B heapSize=448 B 2024-12-07T17:51:51,997 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-12-07T17:51:54,503 DEBUG [pool-106-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/eb59d98570d2d23170674a6ed46d2f17/.tmp/b/d32a68973fb34f5b8963e1ee6a271504 is 28, key is b/b:b/1733593908469/Put/seqid=0 2024-12-07T17:51:54,509 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741873_1049 (size=4945) 2024-12-07T17:51:54,509 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741873_1049 (size=4945) 2024-12-07T17:51:54,509 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741873_1049 (size=4945) 2024-12-07T17:51:54,510 INFO [pool-106-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/eb59d98570d2d23170674a6ed46d2f17/.tmp/b/d32a68973fb34f5b8963e1ee6a271504 2024-12-07T17:51:54,519 DEBUG [pool-106-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/eb59d98570d2d23170674a6ed46d2f17/.tmp/b/d32a68973fb34f5b8963e1ee6a271504 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/eb59d98570d2d23170674a6ed46d2f17/b/d32a68973fb34f5b8963e1ee6a271504 2024-12-07T17:51:54,528 INFO [pool-106-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/eb59d98570d2d23170674a6ed46d2f17/b/d32a68973fb34f5b8963e1ee6a271504, entries=1, sequenceid=6, filesize=4.8 K 2024-12-07T17:51:54,530 INFO [pool-106-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for eb59d98570d2d23170674a6ed46d2f17 in 3054ms, sequenceid=6, compaction requested=false 2024-12-07T17:51:54,536 DEBUG [pool-106-thread-1 {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:33389/user/jenkins/test-data/e29ca86f-d83d-302a-aa30-d25b8ee786e8/data/default/table/eb59d98570d2d23170674a6ed46d2f17/recovered.edits/9.seqid, newMaxSeqId=9, maxSeqId=1 2024-12-07T17:51:54,537 INFO [pool-106-thread-1 {}] regionserver.HRegion(1973): Closed table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. 2024-12-07T17:51:54,537 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1676): Region close journal for eb59d98570d2d23170674a6ed46d2f17: Waiting for close lock at 1733593911474Running coprocessor pre-close hooks at 1733593911474Disabling compacts and flushes for region at 1733593911474Disabling writes for close at 1733593911475 (+1 ms)Obtaining lock to block concurrent updates at 1733593911477 (+2 ms)Preparing flush snapshotting stores in eb59d98570d2d23170674a6ed46d2f17 at 1733593911477Finished memstore snapshotting table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17., syncing WAL and waiting on mvcc, flushsize=dataSize=48, getHeapSize=432, getOffHeapSize=0, getCellsCount=2 at 1733593911479 (+2 ms)Flushing stores of table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. at 1733593914478 (+2999 ms)Flushing eb59d98570d2d23170674a6ed46d2f17/b: creating writer at 1733593914478Flushing eb59d98570d2d23170674a6ed46d2f17/b: appending metadata at 1733593914502 (+24 ms)Flushing eb59d98570d2d23170674a6ed46d2f17/b: closing flushed file at 1733593914502Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@74d68a00: reopening flushed file at 1733593914518 (+16 ms)Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for eb59d98570d2d23170674a6ed46d2f17 in 3054ms, sequenceid=6, compaction requested=false at 1733593914530 (+12 ms)Writing region close event to WAL at 1733593914531 (+1 ms)Running coprocessor post-close hooks at 1733593914536 (+5 ms)Closed at 1733593914536 2024-12-07T17:51:54,537 INFO [pool-106-thread-1 {}] wal.AbstractTestFSWAL(620): Close result:{[B@348bda42=[/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/data/default/table/eb59d98570d2d23170674a6ed46d2f17/b/d32a68973fb34f5b8963e1ee6a271504]} 2024-12-07T17:51:54,537 WARN [Time-limited test {}] regionserver.HRegion(1707): Region table,,1733593908414.eb59d98570d2d23170674a6ed46d2f17. already closed 2024-12-07T17:51:54,537 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for eb59d98570d2d23170674a6ed46d2f17: Waiting for close lock at 1733593914537 2024-12-07T17:51:54,538 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:54,538 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:54,538 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:54,538 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:54,538 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-07T17:51:54,541 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39393 is added to blk_1073741871_1047 (size=1206) 2024-12-07T17:51:54,541 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:36845 is added to blk_1073741871_1047 (size=1206) 2024-12-07T17:51:54,541 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:44441 is added to blk_1073741871_1047 (size=1206) 2024-12-07T17:51:54,543 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/a8187796-c219-99e6-ba37-6fceff85c3cc/oldWALs 2024-12-07T17:51:54,543 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733593908400) 2024-12-07T17:51:54,551 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=262 (was 258) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:49778 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:40176 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=503 (was 495) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=63 (was 68), ProcessCount=11 (was 11), AvailableMemoryMB=20165 (was 20189) 2024-12-07T17:51:54,551 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1019): Shutting down minicluster 2024-12-07T17:51:54,554 WARN [PacketResponder: BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:36845, 127.0.0.1:44441] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-151443317-172.17.0.2-1733593889072:1073741863 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-07T17:51:54,554 WARN [ResponseProcessor for block BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039 java.io.EOFException: Unexpected EOF while trying to read response from server at org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:529) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-07T17:51:54,555 WARN [DataStreamer for file /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908135 block BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039 {}] hdfs.DataStreamer(1731): Error Recovery for BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039 in pipeline [DatanodeInfoWithStorage[127.0.0.1:39393,DS-ff468f6c-05fa-48cf-ab7a-0ad555511de2,DISK], DatanodeInfoWithStorage[127.0.0.1:36845,DS-02ae2ff1-ec0e-45b3-8f03-11b97394b189,DISK], DatanodeInfoWithStorage[127.0.0.1:44441,DS-c7dc589b-d465-42a5-a262-7236c2907e5a,DISK]]: datanode 0(DatanodeInfoWithStorage[127.0.0.1:39393,DS-ff468f6c-05fa-48cf-ab7a-0ad555511de2,DISK]) is bad. 2024-12-07T17:51:54,560 WARN [DataStreamer for file /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908135 block BP-151443317-172.17.0.2-1733593889072:blk_1073741863_1039 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908135 (inode 16549) Holder DFSClient_NONMAPREDUCE_-1488954630_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-07T17:51:54,561 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908135 with renewLeaseKey: DEFAULT_16549 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908135 (inode 16549) Holder DFSClient_NONMAPREDUCE_-1488954630_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-07T17:51:54,563 WARN [PacketResponder: BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038, type=LAST_IN_PIPELINE {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-151443317-172.17.0.2-1733593889072:1073741862 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-07T17:51:54,564 WARN [ResponseProcessor for block BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038 java.io.IOException: Bad response ERROR for BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038 from datanode DatanodeInfoWithStorage[127.0.0.1:39393,DS-ff468f6c-05fa-48cf-ab7a-0ad555511de2,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-07T17:51:54,564 WARN [DataStreamer for file /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908117 block BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038 {}] hdfs.DataStreamer(1731): Error Recovery for BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038 in pipeline [DatanodeInfoWithStorage[127.0.0.1:44441,DS-c7dc589b-d465-42a5-a262-7236c2907e5a,DISK], DatanodeInfoWithStorage[127.0.0.1:36845,DS-02ae2ff1-ec0e-45b3-8f03-11b97394b189,DISK], DatanodeInfoWithStorage[127.0.0.1:39393,DS-ff468f6c-05fa-48cf-ab7a-0ad555511de2,DISK]]: datanode 2(DatanodeInfoWithStorage[127.0.0.1:39393,DS-ff468f6c-05fa-48cf-ab7a-0ad555511de2,DISK]) is bad. 2024-12-07T17:51:54,564 WARN [PacketResponder: BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:36845, 127.0.0.1:39393] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-07T17:51:54,565 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:42258 [Receiving block BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038] {}] datanode.BlockReceiver(316): Block 1073741862 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-12-07T17:51:54,565 WARN [PacketResponder: BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:39393] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-07T17:51:54,565 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_-1488954630_22 at /127.0.0.1:40650 [Receiving block BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038] {}] datanode.BlockReceiver(316): Block 1073741862 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-12-07T17:51:54,566 WARN [DataStreamer for file /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908117 block BP-151443317-172.17.0.2-1733593889072:blk_1073741862_1038 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908117 (inode 16548) Holder DFSClient_NONMAPREDUCE_-1488954630_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-07T17:51:54,566 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908117 with renewLeaseKey: DEFAULT_16548 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/dfc8b300-a60c-a7a0-d153-ba3bbe154a2e/testFailedToCreateWALIfParentRenamed/wal.1733593908117 (inode 16548) Holder DFSClient_NONMAPREDUCE_-1488954630_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-07T17:51:54,626 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@4b2b884e{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-07T17:51:54,629 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@fa4aa4c{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-07T17:51:54,629 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-07T17:51:54,629 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@47ddd06a{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-07T17:51:54,629 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@4c49fcd3{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir/,STOPPED} 2024-12-07T17:51:54,633 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-07T17:51:54,633 WARN [BP-151443317-172.17.0.2-1733593889072 heartbeating to localhost/127.0.0.1:33389 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-07T17:51:54,633 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-07T17:51:54,633 WARN [BP-151443317-172.17.0.2-1733593889072 heartbeating to localhost/127.0.0.1:33389 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-151443317-172.17.0.2-1733593889072 (Datanode Uuid fc42c86a-70e9-447f-87e8-a4721a0abec3) service to localhost/127.0.0.1:33389 2024-12-07T17:51:54,635 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data5/current/BP-151443317-172.17.0.2-1733593889072 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-07T17:51:54,635 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data6/current/BP-151443317-172.17.0.2-1733593889072 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-07T17:51:54,636 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-07T17:51:54,638 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@28637041{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-07T17:51:54,638 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@5e18b7fd{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-07T17:51:54,638 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-07T17:51:54,639 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@243038a3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-07T17:51:54,639 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@443ad5c2{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir/,STOPPED} 2024-12-07T17:51:54,640 WARN [BP-151443317-172.17.0.2-1733593889072 heartbeating to localhost/127.0.0.1:33389 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-07T17:51:54,640 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-07T17:51:54,640 WARN [BP-151443317-172.17.0.2-1733593889072 heartbeating to localhost/127.0.0.1:33389 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-151443317-172.17.0.2-1733593889072 (Datanode Uuid 0283b3f6-7dae-4e65-912c-3a2518969050) service to localhost/127.0.0.1:33389 2024-12-07T17:51:54,640 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-07T17:51:54,640 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data3/current/BP-151443317-172.17.0.2-1733593889072 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-07T17:51:54,641 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data4/current/BP-151443317-172.17.0.2-1733593889072 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-07T17:51:54,641 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-07T17:51:54,647 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@411b19f7{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-07T17:51:54,647 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@64bb503e{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-07T17:51:54,647 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-07T17:51:54,647 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@43a917ce{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-07T17:51:54,647 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@208945{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir/,STOPPED} 2024-12-07T17:51:54,649 WARN [BP-151443317-172.17.0.2-1733593889072 heartbeating to localhost/127.0.0.1:33389 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-07T17:51:54,649 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-07T17:51:54,649 WARN [BP-151443317-172.17.0.2-1733593889072 heartbeating to localhost/127.0.0.1:33389 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-151443317-172.17.0.2-1733593889072 (Datanode Uuid eb45ebf0-fe9d-4e21-b581-49b6bcb1794f) service to localhost/127.0.0.1:33389 2024-12-07T17:51:54,649 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-07T17:51:54,649 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data1/current/BP-151443317-172.17.0.2-1733593889072 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-07T17:51:54,649 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/cluster_71d07d26-65b2-a2aa-fbbe-663557bbb412/data/data2/current/BP-151443317-172.17.0.2-1733593889072 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-07T17:51:54,650 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-07T17:51:54,657 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@5599def{hdfs,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-12-07T17:51:54,658 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@493ba8a1{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-07T17:51:54,658 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-07T17:51:54,658 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@61b73bb3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-07T17:51:54,659 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@746f7db{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/3f0d30e5-f538-c7f8-b07f-1a67a5e67a07/hadoop.log.dir/,STOPPED} 2024-12-07T17:51:54,699 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1026): Minicluster is down