2024-12-08 00:51:10,237 main DEBUG Apache Log4j Core 2.17.2 initializing configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-12-08 00:51:10,247 main DEBUG Took 0.008828 seconds to load 1 plugins from package org.apache.hadoop.hbase.logging 2024-12-08 00:51:10,247 main DEBUG PluginManager 'Core' found 129 plugins 2024-12-08 00:51:10,248 main DEBUG PluginManager 'Level' found 0 plugins 2024-12-08 00:51:10,249 main DEBUG PluginManager 'Lookup' found 16 plugins 2024-12-08 00:51:10,249 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,255 main DEBUG PluginManager 'TypeConverter' found 26 plugins 2024-12-08 00:51:10,265 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.util.MBeans", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,266 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,267 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.logging.TestJul2Slf4j", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,267 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,268 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.zookeeper", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,268 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,269 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,269 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,269 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,269 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,270 main DEBUG LoggerConfig$Builder(additivity="false", level="WARN", levelAndRefs="null", name="org.apache.directory", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,270 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,271 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.ipc.FailedServers", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,271 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,271 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsConfig", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,272 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,272 main DEBUG LoggerConfig$Builder(additivity="null", level="INFO", levelAndRefs="null", name="org.apache.hadoop.hbase.ScheduledChore", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,272 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,273 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.regionserver.RSRpcServices", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,273 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,273 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,273 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,274 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,274 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-08 00:51:10,274 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hbase.thirdparty.io.netty.channel", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,274 main DEBUG Building Plugin[name=root, class=org.apache.logging.log4j.core.config.LoggerConfig$RootLogger]. 2024-12-08 00:51:10,276 main DEBUG LoggerConfig$RootLogger$Builder(additivity="null", level="null", levelAndRefs="INFO,Console", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-08 00:51:10,277 main DEBUG Building Plugin[name=loggers, class=org.apache.logging.log4j.core.config.LoggersPlugin]. 2024-12-08 00:51:10,279 main DEBUG createLoggers(={org.apache.hadoop.metrics2.util.MBeans, org.apache.hadoop.hbase.logging.TestJul2Slf4j, org.apache.zookeeper, org.apache.hadoop.metrics2.impl.MetricsSinkAdapter, org.apache.hadoop.metrics2.impl.MetricsSystemImpl, org.apache.directory, org.apache.hadoop.hbase.ipc.FailedServers, org.apache.hadoop.metrics2.impl.MetricsConfig, org.apache.hadoop.hbase.ScheduledChore, org.apache.hadoop.hbase.regionserver.RSRpcServices, org.apache.hadoop, org.apache.hadoop.hbase, org.apache.hbase.thirdparty.io.netty.channel, root}) 2024-12-08 00:51:10,279 main DEBUG Building Plugin[name=layout, class=org.apache.logging.log4j.core.layout.PatternLayout]. 2024-12-08 00:51:10,280 main DEBUG PatternLayout$Builder(pattern="%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n", PatternSelector=null, Configuration(PropertiesConfig), Replace=null, charset="null", alwaysWriteExceptions="null", disableAnsi="null", noConsoleNoAnsi="null", header="null", footer="null") 2024-12-08 00:51:10,280 main DEBUG PluginManager 'Converter' found 47 plugins 2024-12-08 00:51:10,287 main DEBUG Building Plugin[name=appender, class=org.apache.hadoop.hbase.logging.HBaseTestAppender]. 2024-12-08 00:51:10,289 main DEBUG HBaseTestAppender$Builder(target="SYSTEM_ERR", maxSize="1G", bufferedIo="null", bufferSize="null", immediateFlush="null", ignoreExceptions="null", PatternLayout(%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n), name="Console", Configuration(PropertiesConfig), Filter=null, ={}) 2024-12-08 00:51:10,291 main DEBUG Starting HBaseTestOutputStreamManager SYSTEM_ERR 2024-12-08 00:51:10,291 main DEBUG Building Plugin[name=appenders, class=org.apache.logging.log4j.core.config.AppendersPlugin]. 2024-12-08 00:51:10,291 main DEBUG createAppenders(={Console}) 2024-12-08 00:51:10,292 main DEBUG Configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f initialized 2024-12-08 00:51:10,292 main DEBUG Starting configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-12-08 00:51:10,292 main DEBUG Started configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f OK. 2024-12-08 00:51:10,293 main DEBUG Shutting down OutputStreamManager SYSTEM_OUT.false.false-1 2024-12-08 00:51:10,293 main DEBUG OutputStream closed 2024-12-08 00:51:10,293 main DEBUG Shut down OutputStreamManager SYSTEM_OUT.false.false-1, all resources released: true 2024-12-08 00:51:10,293 main DEBUG Appender DefaultConsole-1 stopped with status true 2024-12-08 00:51:10,294 main DEBUG Stopped org.apache.logging.log4j.core.config.DefaultConfiguration@61001b64 OK 2024-12-08 00:51:10,355 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6 2024-12-08 00:51:10,357 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=StatusLogger 2024-12-08 00:51:10,358 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=ContextSelector 2024-12-08 00:51:10,359 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name= 2024-12-08 00:51:10,359 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.directory 2024-12-08 00:51:10,360 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSinkAdapter 2024-12-08 00:51:10,360 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.zookeeper 2024-12-08 00:51:10,360 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.logging.TestJul2Slf4j 2024-12-08 00:51:10,360 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSystemImpl 2024-12-08 00:51:10,361 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.util.MBeans 2024-12-08 00:51:10,361 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase 2024-12-08 00:51:10,361 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop 2024-12-08 00:51:10,361 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ipc.FailedServers 2024-12-08 00:51:10,362 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.regionserver.RSRpcServices 2024-12-08 00:51:10,362 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsConfig 2024-12-08 00:51:10,362 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hbase.thirdparty.io.netty.channel 2024-12-08 00:51:10,362 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ScheduledChore 2024-12-08 00:51:10,363 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Appenders,name=Console 2024-12-08 00:51:10,365 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-12-08 00:51:10,365 main DEBUG Reconfiguration complete for context[name=1dbd16a6] at URI jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-logging/target/hbase-logging-4.0.0-alpha-1-SNAPSHOT-tests.jar!/log4j2.properties (org.apache.logging.log4j.core.LoggerContext@40db2a24) with optional ClassLoader: null 2024-12-08 00:51:10,366 main DEBUG Shutdown hook enabled. Registering a new one. 2024-12-08 00:51:10,366 main DEBUG LoggerContext[name=1dbd16a6, org.apache.logging.log4j.core.LoggerContext@40db2a24] started OK. 2024-12-08T00:51:10,584 DEBUG [main {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34 2024-12-08 00:51:10,587 main DEBUG AsyncLogger.ThreadNameStrategy=UNCACHED (user specified null, default is UNCACHED) 2024-12-08 00:51:10,587 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-12-08T00:51:10,595 INFO [main {}] hbase.HBaseClassTestRule(94): Test class org.apache.hadoop.hbase.regionserver.wal.TestFSHLog timeout: 13 mins 2024-12-08T00:51:10,613 INFO [Time-limited test {}] hbase.HBaseZKTestingUtil(84): Created new mini-cluster data directory: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe, deleteOnExit=true 2024-12-08T00:51:10,614 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting test.cache.data to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/test.cache.data in system properties and HBase conf 2024-12-08T00:51:10,615 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.tmp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.tmp.dir in system properties and HBase conf 2024-12-08T00:51:10,615 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.log.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir in system properties and HBase conf 2024-12-08T00:51:10,616 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.local.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/mapreduce.cluster.local.dir in system properties and HBase conf 2024-12-08T00:51:10,616 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.temp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/mapreduce.cluster.temp.dir in system properties and HBase conf 2024-12-08T00:51:10,616 INFO [Time-limited test {}] hbase.HBaseTestingUtil(738): read short circuit is OFF 2024-12-08T00:51:10,696 WARN [Time-limited test {}] util.NativeCodeLoader(60): Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2024-12-08T00:51:10,779 DEBUG [Time-limited test {}] fs.HFileSystem(310): The file system is not a DistributedFileSystem. Skipping on block location reordering 2024-12-08T00:51:10,782 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-labels.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/yarn.node-labels.fs-store.root-dir in system properties and HBase conf 2024-12-08T00:51:10,783 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-attribute.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/yarn.node-attribute.fs-store.root-dir in system properties and HBase conf 2024-12-08T00:51:10,783 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.log-dirs to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/yarn.nodemanager.log-dirs in system properties and HBase conf 2024-12-08T00:51:10,783 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-12-08T00:51:10,784 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.active-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/yarn.timeline-service.entity-group-fs-store.active-dir in system properties and HBase conf 2024-12-08T00:51:10,784 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.done-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/yarn.timeline-service.entity-group-fs-store.done-dir in system properties and HBase conf 2024-12-08T00:51:10,784 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-12-08T00:51:10,785 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/dfs.journalnode.edits.dir in system properties and HBase conf 2024-12-08T00:51:10,785 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.datanode.shared.file.descriptor.paths to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/dfs.datanode.shared.file.descriptor.paths in system properties and HBase conf 2024-12-08T00:51:10,785 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting nfs.dump.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/nfs.dump.dir in system properties and HBase conf 2024-12-08T00:51:10,786 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting java.io.tmpdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/java.io.tmpdir in system properties and HBase conf 2024-12-08T00:51:10,786 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/dfs.journalnode.edits.dir in system properties and HBase conf 2024-12-08T00:51:10,786 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.provided.aliasmap.inmemory.leveldb.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/dfs.provided.aliasmap.inmemory.leveldb.dir in system properties and HBase conf 2024-12-08T00:51:10,787 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting fs.s3a.committer.staging.tmp.path to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/fs.s3a.committer.staging.tmp.path in system properties and HBase conf 2024-12-08T00:51:11,195 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-12-08T00:51:11,672 WARN [Time-limited test {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 2024-12-08T00:51:11,737 INFO [Time-limited test {}] log.Log(170): Logging initialized @2109ms to org.eclipse.jetty.util.log.Slf4jLog 2024-12-08T00:51:11,802 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-08T00:51:11,859 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-08T00:51:11,882 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-08T00:51:11,882 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-08T00:51:11,884 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-08T00:51:11,898 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-08T00:51:11,901 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@746f7db{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir/,AVAILABLE} 2024-12-08T00:51:11,902 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@61b73bb3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-08T00:51:12,062 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@5599def{hdfs,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/java.io.tmpdir/jetty-localhost-38455-hadoop-hdfs-3_4_1-tests_jar-_-any-17875161250140478692/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-12-08T00:51:12,068 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@493ba8a1{HTTP/1.1, (http/1.1)}{localhost:38455} 2024-12-08T00:51:12,069 INFO [Time-limited test {}] server.Server(415): Started @2442ms 2024-12-08T00:51:12,094 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-12-08T00:51:12,559 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-08T00:51:12,564 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-08T00:51:12,565 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-08T00:51:12,565 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-08T00:51:12,566 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-12-08T00:51:12,566 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@208945{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir/,AVAILABLE} 2024-12-08T00:51:12,567 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@43a917ce{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-08T00:51:12,658 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@411b19f7{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/java.io.tmpdir/jetty-localhost-41033-hadoop-hdfs-3_4_1-tests_jar-_-any-11494266386931004192/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-08T00:51:12,659 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@64bb503e{HTTP/1.1, (http/1.1)}{localhost:41033} 2024-12-08T00:51:12,659 INFO [Time-limited test {}] server.Server(415): Started @3033ms 2024-12-08T00:51:12,703 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-08T00:51:12,796 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-08T00:51:12,801 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-08T00:51:12,802 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-08T00:51:12,802 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-08T00:51:12,802 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-12-08T00:51:12,802 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@443ad5c2{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir/,AVAILABLE} 2024-12-08T00:51:12,803 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@243038a3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-08T00:51:12,897 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@28637041{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/java.io.tmpdir/jetty-localhost-42079-hadoop-hdfs-3_4_1-tests_jar-_-any-12110458631969510931/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-08T00:51:12,898 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@5e18b7fd{HTTP/1.1, (http/1.1)}{localhost:42079} 2024-12-08T00:51:12,898 INFO [Time-limited test {}] server.Server(415): Started @3271ms 2024-12-08T00:51:12,900 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-08T00:51:12,931 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-08T00:51:12,935 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-08T00:51:12,936 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-08T00:51:12,936 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-08T00:51:12,936 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-12-08T00:51:12,937 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@4c49fcd3{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir/,AVAILABLE} 2024-12-08T00:51:12,938 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@47ddd06a{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-08T00:51:13,030 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@4b2b884e{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/java.io.tmpdir/jetty-localhost-37805-hadoop-hdfs-3_4_1-tests_jar-_-any-10962218833825545651/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-08T00:51:13,030 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@fa4aa4c{HTTP/1.1, (http/1.1)}{localhost:37805} 2024-12-08T00:51:13,031 INFO [Time-limited test {}] server.Server(415): Started @3404ms 2024-12-08T00:51:13,032 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-08T00:51:13,922 WARN [Thread-122 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data3/current/BP-1227637935-172.17.0.2-1733619071266/current, will proceed with Du for space computation calculation, 2024-12-08T00:51:13,922 WARN [Thread-124 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data1/current/BP-1227637935-172.17.0.2-1733619071266/current, will proceed with Du for space computation calculation, 2024-12-08T00:51:13,922 WARN [Thread-125 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data2/current/BP-1227637935-172.17.0.2-1733619071266/current, will proceed with Du for space computation calculation, 2024-12-08T00:51:13,922 WARN [Thread-123 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data4/current/BP-1227637935-172.17.0.2-1733619071266/current, will proceed with Du for space computation calculation, 2024-12-08T00:51:13,955 WARN [Thread-58 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-08T00:51:13,955 WARN [Thread-81 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-08T00:51:13,973 WARN [Thread-142 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data5/current/BP-1227637935-172.17.0.2-1733619071266/current, will proceed with Du for space computation calculation, 2024-12-08T00:51:13,974 WARN [Thread-143 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data6/current/BP-1227637935-172.17.0.2-1733619071266/current, will proceed with Du for space computation calculation, 2024-12-08T00:51:13,997 WARN [Thread-103 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-08T00:51:14,006 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x3e0bfbd2bcc20074 with lease ID 0xcb239720be4978a0: Processing first storage report for DS-474d2394-7f95-48cf-aa86-c41e2400ee6e from datanode DatanodeRegistration(127.0.0.1:38945, datanodeUuid=924dfcb1-3b2d-43f3-beb9-fc2331d99411, infoPort=43783, infoSecurePort=0, ipcPort=43897, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266) 2024-12-08T00:51:14,008 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x3e0bfbd2bcc20074 with lease ID 0xcb239720be4978a0: from storage DS-474d2394-7f95-48cf-aa86-c41e2400ee6e node DatanodeRegistration(127.0.0.1:38945, datanodeUuid=924dfcb1-3b2d-43f3-beb9-fc2331d99411, infoPort=43783, infoSecurePort=0, ipcPort=43897, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-08T00:51:14,008 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x365461aa1f017b9c with lease ID 0xcb239720be49789f: Processing first storage report for DS-ad358436-12a8-4e7a-bdde-c750d5647305 from datanode DatanodeRegistration(127.0.0.1:45199, datanodeUuid=1100f5f9-6c1f-4acd-9a7c-844d060c3822, infoPort=36555, infoSecurePort=0, ipcPort=41797, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266) 2024-12-08T00:51:14,008 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x365461aa1f017b9c with lease ID 0xcb239720be49789f: from storage DS-ad358436-12a8-4e7a-bdde-c750d5647305 node DatanodeRegistration(127.0.0.1:45199, datanodeUuid=1100f5f9-6c1f-4acd-9a7c-844d060c3822, infoPort=36555, infoSecurePort=0, ipcPort=41797, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-08T00:51:14,009 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x5ff005ee8d72350f with lease ID 0xcb239720be49789e: Processing first storage report for DS-6b32010d-891b-447b-a8a6-5a84dde2e565 from datanode DatanodeRegistration(127.0.0.1:39559, datanodeUuid=929cefbf-6d2a-4045-b8e1-081784df30d3, infoPort=40445, infoSecurePort=0, ipcPort=38391, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266) 2024-12-08T00:51:14,009 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x5ff005ee8d72350f with lease ID 0xcb239720be49789e: from storage DS-6b32010d-891b-447b-a8a6-5a84dde2e565 node DatanodeRegistration(127.0.0.1:39559, datanodeUuid=929cefbf-6d2a-4045-b8e1-081784df30d3, infoPort=40445, infoSecurePort=0, ipcPort=38391, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-08T00:51:14,009 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x3e0bfbd2bcc20074 with lease ID 0xcb239720be4978a0: Processing first storage report for DS-7a3dbd75-f800-42f2-ba9f-d6a91cac77bc from datanode DatanodeRegistration(127.0.0.1:38945, datanodeUuid=924dfcb1-3b2d-43f3-beb9-fc2331d99411, infoPort=43783, infoSecurePort=0, ipcPort=43897, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266) 2024-12-08T00:51:14,009 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x3e0bfbd2bcc20074 with lease ID 0xcb239720be4978a0: from storage DS-7a3dbd75-f800-42f2-ba9f-d6a91cac77bc node DatanodeRegistration(127.0.0.1:38945, datanodeUuid=924dfcb1-3b2d-43f3-beb9-fc2331d99411, infoPort=43783, infoSecurePort=0, ipcPort=43897, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-08T00:51:14,010 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x365461aa1f017b9c with lease ID 0xcb239720be49789f: Processing first storage report for DS-0d419b7e-c622-4362-9bc4-01274f031047 from datanode DatanodeRegistration(127.0.0.1:45199, datanodeUuid=1100f5f9-6c1f-4acd-9a7c-844d060c3822, infoPort=36555, infoSecurePort=0, ipcPort=41797, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266) 2024-12-08T00:51:14,010 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x365461aa1f017b9c with lease ID 0xcb239720be49789f: from storage DS-0d419b7e-c622-4362-9bc4-01274f031047 node DatanodeRegistration(127.0.0.1:45199, datanodeUuid=1100f5f9-6c1f-4acd-9a7c-844d060c3822, infoPort=36555, infoSecurePort=0, ipcPort=41797, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-08T00:51:14,010 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x5ff005ee8d72350f with lease ID 0xcb239720be49789e: Processing first storage report for DS-2b612a28-ebda-451c-ad65-dba90421b0bc from datanode DatanodeRegistration(127.0.0.1:39559, datanodeUuid=929cefbf-6d2a-4045-b8e1-081784df30d3, infoPort=40445, infoSecurePort=0, ipcPort=38391, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266) 2024-12-08T00:51:14,010 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x5ff005ee8d72350f with lease ID 0xcb239720be49789e: from storage DS-2b612a28-ebda-451c-ad65-dba90421b0bc node DatanodeRegistration(127.0.0.1:39559, datanodeUuid=929cefbf-6d2a-4045-b8e1-081784df30d3, infoPort=40445, infoSecurePort=0, ipcPort=38391, storageInfo=lv=-57;cid=testClusterID;nsid=1970813805;c=1733619071266), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-08T00:51:14,110 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(631): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34 2024-12-08T00:51:14,122 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=156, OpenFileDescriptor=393, MaxFileDescriptor=1048576, SystemLoadAverage=111, ProcessCount=11, AvailableMemoryMB=18123 2024-12-08T00:51:14,138 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:14,141 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:14,329 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741825_1001 (size=7) 2024-12-08T00:51:14,330 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741825_1001 (size=7) 2024-12-08T00:51:14,330 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741825_1001 (size=7) 2024-12-08T00:51:14,738 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:14,739 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:14,743 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:14,754 DEBUG [Time-limited test {}] util.ClassSize(228): Using Unsafe to estimate memory layout 2024-12-08T00:51:14,771 INFO [Time-limited test {}] metrics.MetricRegistriesLoader(60): Loaded MetricRegistries class org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl 2024-12-08T00:51:14,773 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:14,783 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/testSyncRunnerIndexOverflow, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/oldWALs, maxLogs=1760 2024-12-08T00:51:14,826 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619074818 2024-12-08T00:51:14,874 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/testSyncRunnerIndexOverflow/wal.1733619074818 2024-12-08T00:51:14,915 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:36555:36555)] 2024-12-08T00:51:14,967 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:14,967 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:14,967 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:14,968 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:14,968 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:14,972 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741826_1002 (size=1293) 2024-12-08T00:51:14,973 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741826_1002 (size=1293) 2024-12-08T00:51:14,974 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741826_1002 (size=1293) 2024-12-08T00:51:14,981 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/oldWALs 2024-12-08T00:51:14,984 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733619074818) 2024-12-08T00:51:14,992 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=163 (was 156) Potentially hanging thread: sync.3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.0 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: LeaseRenewer:jenkins@localhost:41187 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:441) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$800(LeaseRenewer.java:77) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:336) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: weak-ref-cleaner-strictcontextstorage java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:155) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:176) app//io.opentelemetry.context.StrictContextStorage$PendingScopes.run(StrictContextStorage.java:269) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) - Thread LEAK? -, OpenFileDescriptor=403 (was 393) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=118 (was 111) - SystemLoadAverage LEAK? -, ProcessCount=11 (was 11), AvailableMemoryMB=18075 (was 18123) 2024-12-08T00:51:14,999 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=163, OpenFileDescriptor=403, MaxFileDescriptor=1048576, SystemLoadAverage=118, ProcessCount=11, AvailableMemoryMB=18075 2024-12-08T00:51:15,022 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741827_1003 (size=7) 2024-12-08T00:51:15,022 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741827_1003 (size=7) 2024-12-08T00:51:15,023 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741827_1003 (size=7) 2024-12-08T00:51:15,025 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:15,026 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:15,028 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:15,037 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:15,038 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/testUnflushedSeqIdTracking, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/oldWALs, maxLogs=1760 2024-12-08T00:51:15,039 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619075039 2024-12-08T00:51:15,050 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/testUnflushedSeqIdTracking/wal.1733619075039 2024-12-08T00:51:15,052 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:36555:36555)] 2024-12-08T00:51:15,054 INFO [Time-limited test {}] regionserver.ChunkCreator(472): data poolSizePercentage is less than 0. So not using pool 2024-12-08T00:51:15,054 INFO [Time-limited test {}] regionserver.ChunkCreator(472): index poolSizePercentage is less than 0. So not using pool 2024-12-08T00:51:15,073 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 02013c5868946cd0d599b2308d1e4c25, NAME => 'testUnflushedSeqIdTracking,,1733619075053.02013c5868946cd0d599b2308d1e4c25.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testUnflushedSeqIdTracking', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34 2024-12-08T00:51:15,092 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741829_1005 (size=61) 2024-12-08T00:51:15,093 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741829_1005 (size=61) 2024-12-08T00:51:15,093 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741829_1005 (size=61) 2024-12-08T00:51:15,097 INFO [Time-limited test {}] throttle.StoreHotnessProtector(112): StoreHotnessProtector is disabled. Set hbase.region.store.parallel.put.limit > 0 to enable, which may help mitigate load under heavy write pressure. 2024-12-08T00:51:15,100 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testUnflushedSeqIdTracking,,1733619075053.02013c5868946cd0d599b2308d1e4c25.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-08T00:51:15,141 INFO [StoreOpener-02013c5868946cd0d599b2308d1e4c25-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 02013c5868946cd0d599b2308d1e4c25 2024-12-08T00:51:15,168 INFO [StoreOpener-02013c5868946cd0d599b2308d1e4c25-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 02013c5868946cd0d599b2308d1e4c25 columnFamilyName b 2024-12-08T00:51:15,173 DEBUG [StoreOpener-02013c5868946cd0d599b2308d1e4c25-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-08T00:51:15,177 INFO [StoreOpener-02013c5868946cd0d599b2308d1e4c25-1 {}] regionserver.HStore(327): Store=02013c5868946cd0d599b2308d1e4c25/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-08T00:51:15,179 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 02013c5868946cd0d599b2308d1e4c25 2024-12-08T00:51:15,183 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testUnflushedSeqIdTracking/02013c5868946cd0d599b2308d1e4c25 2024-12-08T00:51:15,184 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testUnflushedSeqIdTracking/02013c5868946cd0d599b2308d1e4c25 2024-12-08T00:51:15,185 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/871f50b7-83e3-45a4-d00f-19fcc993e2be/data/default/testUnflushedSeqIdTracking/02013c5868946cd0d599b2308d1e4c25 2024-12-08T00:51:15,188 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 02013c5868946cd0d599b2308d1e4c25 2024-12-08T00:51:15,189 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 02013c5868946cd0d599b2308d1e4c25 2024-12-08T00:51:15,197 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 02013c5868946cd0d599b2308d1e4c25 2024-12-08T00:51:15,202 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:41187/user/jenkins/test-data/871f50b7-83e3-45a4-d00f-19fcc993e2be/data/default/testUnflushedSeqIdTracking/02013c5868946cd0d599b2308d1e4c25/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-08T00:51:15,204 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 02013c5868946cd0d599b2308d1e4c25; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=64551017, jitterRate=-0.038114890456199646}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-08T00:51:15,217 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 02013c5868946cd0d599b2308d1e4c25: Writing region info on filesystem at 1733619075117Initializing all the Stores at 1733619075119 (+2 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733619075120 (+1 ms)Cleaning up temporary data from old regions at 1733619075189 (+69 ms)Region opened successfully at 1733619075215 (+26 ms) 2024-12-08T00:51:18,249 INFO [pool-60-thread-2 {}] regionserver.HRegion(2902): Flushing 02013c5868946cd0d599b2308d1e4c25 1/1 column families, dataSize=24 B heapSize=352 B 2024-12-08T00:51:21,329 DEBUG [pool-60-thread-2 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/testUnflushedSeqIdTracking/02013c5868946cd0d599b2308d1e4c25/.tmp/b/37a9c5e006604345af23d5b452292f19 is 28, key is b/b:b/1733619075237/Put/seqid=0 2024-12-08T00:51:21,342 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741830_1006 (size=4945) 2024-12-08T00:51:21,343 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741830_1006 (size=4945) 2024-12-08T00:51:21,343 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741830_1006 (size=4945) 2024-12-08T00:51:21,344 INFO [pool-60-thread-2 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=4 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/testUnflushedSeqIdTracking/02013c5868946cd0d599b2308d1e4c25/.tmp/b/37a9c5e006604345af23d5b452292f19 2024-12-08T00:51:21,420 DEBUG [pool-60-thread-2 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/testUnflushedSeqIdTracking/02013c5868946cd0d599b2308d1e4c25/.tmp/b/37a9c5e006604345af23d5b452292f19 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/testUnflushedSeqIdTracking/02013c5868946cd0d599b2308d1e4c25/b/37a9c5e006604345af23d5b452292f19 2024-12-08T00:51:21,431 INFO [pool-60-thread-2 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/testUnflushedSeqIdTracking/02013c5868946cd0d599b2308d1e4c25/b/37a9c5e006604345af23d5b452292f19, entries=1, sequenceid=4, filesize=4.8 K 2024-12-08T00:51:21,439 INFO [pool-60-thread-2 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 02013c5868946cd0d599b2308d1e4c25 in 3196ms, sequenceid=4, compaction requested=false 2024-12-08T00:51:21,440 DEBUG [pool-60-thread-2 {}] regionserver.HRegion(2603): Flush status journal for 02013c5868946cd0d599b2308d1e4c25: 2024-12-08T00:51:21,440 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(193): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-12-08T00:51:21,440 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(194): Flush succeeded:true 2024-12-08T00:51:21,441 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 02013c5868946cd0d599b2308d1e4c25, disabling compactions & flushes 2024-12-08T00:51:21,441 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testUnflushedSeqIdTracking,,1733619075053.02013c5868946cd0d599b2308d1e4c25. 2024-12-08T00:51:21,441 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testUnflushedSeqIdTracking,,1733619075053.02013c5868946cd0d599b2308d1e4c25. 2024-12-08T00:51:21,441 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testUnflushedSeqIdTracking,,1733619075053.02013c5868946cd0d599b2308d1e4c25. after waiting 0 ms 2024-12-08T00:51:21,441 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testUnflushedSeqIdTracking,,1733619075053.02013c5868946cd0d599b2308d1e4c25. 2024-12-08T00:51:21,443 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testUnflushedSeqIdTracking,,1733619075053.02013c5868946cd0d599b2308d1e4c25. 2024-12-08T00:51:21,444 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 02013c5868946cd0d599b2308d1e4c25: Waiting for close lock at 1733619081440Disabling compacts and flushes for region at 1733619081441 (+1 ms)Disabling writes for close at 1733619081441Writing region close event to WAL at 1733619081443 (+2 ms)Closed at 1733619081443 2024-12-08T00:51:21,444 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,444 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,445 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,445 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,445 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,448 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741828_1004 (size=875) 2024-12-08T00:51:21,449 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741828_1004 (size=875) 2024-12-08T00:51:21,449 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741828_1004 (size=875) 2024-12-08T00:51:21,452 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/oldWALs 2024-12-08T00:51:21,452 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733619075039) 2024-12-08T00:51:21,459 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=179 (was 163) Potentially hanging thread: org.apache.hadoop.hdfs.PeerCache@4397c923 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.PeerCache.run(PeerCache.java:253) app//org.apache.hadoop.hdfs.PeerCache.access$000(PeerCache.java:46) app//org.apache.hadoop.hdfs.PeerCache$1.run(PeerCache.java:124) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Monitor thread for TaskMonitor java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hbase.monitoring.TaskMonitor$MonitorRunnable.run(TaskMonitor.java:325) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: HBase-Metrics2-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data5 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:52506 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data6 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:37706 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=417 (was 403) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=108 (was 118), ProcessCount=11 (was 11), AvailableMemoryMB=18048 (was 18075) 2024-12-08T00:51:21,467 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALComparator Thread=179, OpenFileDescriptor=417, MaxFileDescriptor=1048576, SystemLoadAverage=108, ProcessCount=11, AvailableMemoryMB=18048 2024-12-08T00:51:21,481 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741831_1007 (size=7) 2024-12-08T00:51:21,481 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741831_1007 (size=7) 2024-12-08T00:51:21,482 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741831_1007 (size=7) 2024-12-08T00:51:21,484 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:21,484 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:21,486 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:21,495 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:21,496 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/e61ab431-825b-1c81-a601-f5aef6a87783/testWALComparator, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/e61ab431-825b-1c81-a601-f5aef6a87783/oldWALs, maxLogs=1760 2024-12-08T00:51:21,498 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081498 2024-12-08T00:51:21,507 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/e61ab431-825b-1c81-a601-f5aef6a87783/testWALComparator/wal.1733619081498 2024-12-08T00:51:21,509 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:36555:36555)] 2024-12-08T00:51:21,510 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(215): Log obtained is: FSHLog wal:(num 1733619081498) 2024-12-08T00:51:21,513 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:21,513 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=.meta, logDir=hdfs://localhost:41187/user/jenkins/test-data/e61ab431-825b-1c81-a601-f5aef6a87783/testWALComparator, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/e61ab431-825b-1c81-a601-f5aef6a87783/oldWALs, maxLogs=1760 2024-12-08T00:51:21,515 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081515.meta 2024-12-08T00:51:21,523 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/e61ab431-825b-1c81-a601-f5aef6a87783/testWALComparator/wal.1733619081515.meta 2024-12-08T00:51:21,525 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:40445:40445)] 2024-12-08T00:51:21,526 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,526 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,526 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,527 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,527 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,530 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741832_1008 (size=93) 2024-12-08T00:51:21,530 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741832_1008 (size=93) 2024-12-08T00:51:21,531 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741832_1008 (size=93) 2024-12-08T00:51:21,535 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/e61ab431-825b-1c81-a601-f5aef6a87783/oldWALs 2024-12-08T00:51:21,535 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733619081498) 2024-12-08T00:51:21,536 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,536 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,536 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,536 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,537 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,540 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741833_1009 (size=93) 2024-12-08T00:51:21,540 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741833_1009 (size=93) 2024-12-08T00:51:21,540 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741833_1009 (size=93) 2024-12-08T00:51:21,544 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/e61ab431-825b-1c81-a601-f5aef6a87783/oldWALs 2024-12-08T00:51:21,544 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:.meta(num 1733619081515) 2024-12-08T00:51:21,551 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALComparator Thread=189 (was 179) - Thread LEAK? -, OpenFileDescriptor=423 (was 417) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=108 (was 108), ProcessCount=11 (was 11), AvailableMemoryMB=18043 (was 18048) 2024-12-08T00:51:21,557 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=189, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=108, ProcessCount=11, AvailableMemoryMB=18043 2024-12-08T00:51:21,570 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741834_1010 (size=7) 2024-12-08T00:51:21,571 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741834_1010 (size=7) 2024-12-08T00:51:21,571 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741834_1010 (size=7) 2024-12-08T00:51:21,573 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:21,574 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:21,576 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:21,579 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(383): testFindMemStoresEligibleForFlush 2024-12-08T00:51:21,601 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:21,601 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs, maxLogs=1 2024-12-08T00:51:21,602 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081602 2024-12-08T00:51:21,611 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081602 2024-12-08T00:51:21,612 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:40445:40445)] 2024-12-08T00:51:21,615 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081615 2024-12-08T00:51:21,626 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,626 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,626 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,626 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,627 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,627 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081602 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081615 2024-12-08T00:51:21,628 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:36555:36555)] 2024-12-08T00:51:21,629 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081602 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,631 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741835_1011 (size=283) 2024-12-08T00:51:21,631 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741835_1011 (size=283) 2024-12-08T00:51:21,632 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741835_1011 (size=283) 2024-12-08T00:51:21,632 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081632 2024-12-08T00:51:21,641 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,642 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,642 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,642 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,642 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,643 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081615 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081632 2024-12-08T00:51:21,644 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:43783:43783)] 2024-12-08T00:51:21,644 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081615 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,645 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): dfff0885730bfab9990f5d245683b9ec[cf1] 2024-12-08T00:51:21,646 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741836_1012 (size=283) 2024-12-08T00:51:21,646 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741836_1012 (size=283) 2024-12-08T00:51:21,646 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741836_1012 (size=283) 2024-12-08T00:51:21,646 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-08T00:51:21,647 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): dfff0885730bfab9990f5d245683b9ec[cf1] 2024-12-08T00:51:21,648 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): dfff0885730bfab9990f5d245683b9ec[cf1] 2024-12-08T00:51:21,649 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081649 2024-12-08T00:51:21,657 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,657 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,657 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,657 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,658 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,658 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081632 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081649 2024-12-08T00:51:21,659 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:43783:43783)] 2024-12-08T00:51:21,659 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081632 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,660 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-08T00:51:21,660 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081602 to hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs/wal.1733619081602 2024-12-08T00:51:21,661 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081660 2024-12-08T00:51:21,661 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741837_1013 (size=283) 2024-12-08T00:51:21,661 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741837_1013 (size=283) 2024-12-08T00:51:21,662 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741837_1013 (size=283) 2024-12-08T00:51:21,663 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081615 to hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs/wal.1733619081615 2024-12-08T00:51:21,666 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081632 to hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs/wal.1733619081632 2024-12-08T00:51:21,672 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,672 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,673 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,673 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,673 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,673 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081649 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081660 2024-12-08T00:51:21,674 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:36555:36555)] 2024-12-08T00:51:21,674 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081649 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,675 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-08T00:51:21,676 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741838_1014 (size=93) 2024-12-08T00:51:21,676 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741838_1014 (size=93) 2024-12-08T00:51:21,676 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741838_1014 (size=93) 2024-12-08T00:51:21,677 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081649 to hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs/wal.1733619081649 2024-12-08T00:51:21,784 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081783 2024-12-08T00:51:21,795 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,795 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,796 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,796 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,796 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,796 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081660 with entries=4, filesize=465 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081783 2024-12-08T00:51:21,797 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:40445:40445)] 2024-12-08T00:51:21,797 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081660 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,798 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-08T00:51:21,799 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741839_1015 (size=473) 2024-12-08T00:51:21,800 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741839_1015 (size=473) 2024-12-08T00:51:21,800 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741839_1015 (size=473) 2024-12-08T00:51:21,804 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081803 2024-12-08T00:51:21,812 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,812 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,812 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,812 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,812 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,813 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081783 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081803 2024-12-08T00:51:21,814 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:43783:43783)] 2024-12-08T00:51:21,815 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081783 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,815 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 217b97e5a3c8d89ba61200c0b4f2cf0e[cf1],dfff0885730bfab9990f5d245683b9ec[cf1] 2024-12-08T00:51:21,815 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 217b97e5a3c8d89ba61200c0b4f2cf0e[cf1],dfff0885730bfab9990f5d245683b9ec[cf1] 2024-12-08T00:51:21,815 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741840_1016 (size=283) 2024-12-08T00:51:21,816 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741840_1016 (size=283) 2024-12-08T00:51:21,816 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081815 2024-12-08T00:51:21,816 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741840_1016 (size=283) 2024-12-08T00:51:21,817 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081660 to hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs/wal.1733619081660 2024-12-08T00:51:21,819 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081783 to hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs/wal.1733619081783 2024-12-08T00:51:21,824 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,825 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,825 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,825 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,825 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,825 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081803 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081815 2024-12-08T00:51:21,826 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:36555:36555)] 2024-12-08T00:51:21,826 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081803 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,826 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-08T00:51:21,828 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741841_1017 (size=93) 2024-12-08T00:51:21,828 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741841_1017 (size=93) 2024-12-08T00:51:21,829 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741841_1017 (size=93) 2024-12-08T00:51:21,829 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081803 to hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs/wal.1733619081803 2024-12-08T00:51:21,931 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081931 2024-12-08T00:51:21,945 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,946 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,946 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,946 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,946 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,946 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081815 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081931 2024-12-08T00:51:21,947 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:40445:40445)] 2024-12-08T00:51:21,947 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081815 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,948 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-08T00:51:21,948 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081948 2024-12-08T00:51:21,949 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741842_1018 (size=283) 2024-12-08T00:51:21,950 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741842_1018 (size=283) 2024-12-08T00:51:21,950 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741842_1018 (size=283) 2024-12-08T00:51:21,951 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081815 to hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs/wal.1733619081815 2024-12-08T00:51:21,957 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,957 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,957 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,958 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,958 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,958 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081931 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081948 2024-12-08T00:51:21,959 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:43783:43783)] 2024-12-08T00:51:21,959 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081931 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,961 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741843_1019 (size=93) 2024-12-08T00:51:21,962 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741843_1019 (size=93) 2024-12-08T00:51:21,962 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741843_1019 (size=93) 2024-12-08T00:51:21,963 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081931 to hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs/wal.1733619081931 2024-12-08T00:51:21,965 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081965 2024-12-08T00:51:21,975 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,975 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,975 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,975 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,975 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,976 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081948 with entries=6, filesize=709 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081965 2024-12-08T00:51:21,976 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:36555:36555)] 2024-12-08T00:51:21,977 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081948 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,978 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741844_1020 (size=717) 2024-12-08T00:51:21,979 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741844_1020 (size=717) 2024-12-08T00:51:21,979 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741844_1020 (size=717) 2024-12-08T00:51:21,979 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619081979 2024-12-08T00:51:21,987 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,987 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,988 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,988 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,988 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,988 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081965 with entries=2, filesize=293 B; new WAL /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081979 2024-12-08T00:51:21,989 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:40445:40445)] 2024-12-08T00:51:21,989 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:41187/user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/testFindMemStoresEligibleForFlush/wal.1733619081965 is not closed yet, will try archiving it next time 2024-12-08T00:51:21,989 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): ff6211ac57c2bb75946f4879e3de5155[cf1,cf3,cf2] 2024-12-08T00:51:21,989 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-08T00:51:21,990 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): ff6211ac57c2bb75946f4879e3de5155[cf3,cf2] 2024-12-08T00:51:21,990 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,990 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,991 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,991 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741845_1021 (size=301) 2024-12-08T00:51:21,991 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,991 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741845_1021 (size=301) 2024-12-08T00:51:21,992 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741845_1021 (size=301) 2024-12-08T00:51:21,992 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:21,994 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741846_1022 (size=93) 2024-12-08T00:51:21,994 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741846_1022 (size=93) 2024-12-08T00:51:21,995 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741846_1022 (size=93) 2024-12-08T00:51:22,000 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 3 WAL file(s) to /user/jenkins/test-data/9c635c7c-dbfb-08b6-6679-c35c71c14e01/oldWALs 2024-12-08T00:51:22,001 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733619081979) 2024-12-08T00:51:22,007 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=194 (was 189) - Thread LEAK? -, OpenFileDescriptor=423 (was 423), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=108 (was 108), ProcessCount=11 (was 11), AvailableMemoryMB=18033 (was 18043) 2024-12-08T00:51:22,014 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=194, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=108, ProcessCount=11, AvailableMemoryMB=18033 2024-12-08T00:51:22,027 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741847_1023 (size=7) 2024-12-08T00:51:22,028 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741847_1023 (size=7) 2024-12-08T00:51:22,028 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741847_1023 (size=7) 2024-12-08T00:51:22,032 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:22,032 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:22,034 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:22,039 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:22,039 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/be25ef0f-ccaa-7405-0e4e-67441c96157a/testRollWriterForClosedWAL, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/be25ef0f-ccaa-7405-0e4e-67441c96157a/testRollWriterForClosedWAL, maxLogs=1760 2024-12-08T00:51:22,040 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619082040 2024-12-08T00:51:22,048 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/be25ef0f-ccaa-7405-0e4e-67441c96157a/testRollWriterForClosedWAL/wal.1733619082040 2024-12-08T00:51:22,054 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:43783:43783)] 2024-12-08T00:51:22,059 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:22,059 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:22,059 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:22,060 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:22,060 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:22,063 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741848_1024 (size=93) 2024-12-08T00:51:22,063 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741848_1024 (size=93) 2024-12-08T00:51:22,063 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741848_1024 (size=93) 2024-12-08T00:51:22,066 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/be25ef0f-ccaa-7405-0e4e-67441c96157a/testRollWriterForClosedWAL 2024-12-08T00:51:22,066 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733619082040) 2024-12-08T00:51:22,073 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=199 (was 194) - Thread LEAK? -, OpenFileDescriptor=423 (was 423), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=108 (was 108), ProcessCount=11 (was 11), AvailableMemoryMB=18032 (was 18033) 2024-12-08T00:51:22,080 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=199, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=108, ProcessCount=11, AvailableMemoryMB=18032 2024-12-08T00:51:22,094 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741849_1025 (size=7) 2024-12-08T00:51:22,094 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741849_1025 (size=7) 2024-12-08T00:51:22,095 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741849_1025 (size=7) 2024-12-08T00:51:22,096 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:22,097 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:22,098 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:22,106 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:22,106 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/testMaxFlushedSequenceIdGoBackwards, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/oldWALs, maxLogs=1760 2024-12-08T00:51:22,107 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619082107 2024-12-08T00:51:22,114 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/testMaxFlushedSequenceIdGoBackwards/wal.1733619082107 2024-12-08T00:51:22,115 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:36555:36555)] 2024-12-08T00:51:22,117 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => eba24f426c1fbd4ccbf19081baba17e9, NAME => 'table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34 2024-12-08T00:51:22,126 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741851_1027 (size=40) 2024-12-08T00:51:22,127 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741851_1027 (size=40) 2024-12-08T00:51:22,127 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741851_1027 (size=40) 2024-12-08T00:51:22,128 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-08T00:51:22,130 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,132 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region eba24f426c1fbd4ccbf19081baba17e9 columnFamilyName a 2024-12-08T00:51:22,132 DEBUG [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-08T00:51:22,133 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] regionserver.HStore(327): Store=eba24f426c1fbd4ccbf19081baba17e9/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-08T00:51:22,133 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,135 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region eba24f426c1fbd4ccbf19081baba17e9 columnFamilyName b 2024-12-08T00:51:22,135 DEBUG [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-08T00:51:22,136 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] regionserver.HStore(327): Store=eba24f426c1fbd4ccbf19081baba17e9/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-08T00:51:22,136 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,137 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/table/eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,138 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/table/eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,139 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/0cd75b5b-498f-9431-bebf-333c32729ec0/data/default/table/eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,140 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,140 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,142 DEBUG [Time-limited test {}] regionserver.FlushLargeStoresPolicy(65): No hbase.hregion.percolumnfamilyflush.size.lower.bound set in table table descriptor;using region.getMemStoreFlushHeapSize/# of families (64.0 M)) instead. 2024-12-08T00:51:22,144 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,148 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:41187/user/jenkins/test-data/0cd75b5b-498f-9431-bebf-333c32729ec0/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-08T00:51:22,149 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened eba24f426c1fbd4ccbf19081baba17e9; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=62645006, jitterRate=-0.06651666760444641}}}, FlushLargeStoresPolicy{flushSizeLowerBound=67108864} 2024-12-08T00:51:22,151 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for eba24f426c1fbd4ccbf19081baba17e9: Writing region info on filesystem at 1733619082128Initializing all the Stores at 1733619082130 (+2 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733619082130Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733619082130Cleaning up temporary data from old regions at 1733619082140 (+10 ms)Region opened successfully at 1733619082150 (+10 ms) 2024-12-08T00:51:22,151 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing eba24f426c1fbd4ccbf19081baba17e9, disabling compactions & flushes 2024-12-08T00:51:22,151 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. 2024-12-08T00:51:22,151 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. 2024-12-08T00:51:22,151 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. after waiting 0 ms 2024-12-08T00:51:22,151 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. 2024-12-08T00:51:22,152 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. 2024-12-08T00:51:22,152 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for eba24f426c1fbd4ccbf19081baba17e9: Waiting for close lock at 1733619082151Disabling compacts and flushes for region at 1733619082151Disabling writes for close at 1733619082151Writing region close event to WAL at 1733619082152 (+1 ms)Closed at 1733619082152 2024-12-08T00:51:22,525 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => eba24f426c1fbd4ccbf19081baba17e9, NAME => 'table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9.', STARTKEY => '', ENDKEY => ''} 2024-12-08T00:51:22,542 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,542 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-08T00:51:22,544 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,545 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,548 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,549 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region eba24f426c1fbd4ccbf19081baba17e9 columnFamilyName a 2024-12-08T00:51:22,550 DEBUG [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-08T00:51:22,550 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] regionserver.HStore(327): Store=eba24f426c1fbd4ccbf19081baba17e9/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-08T00:51:22,551 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,553 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region eba24f426c1fbd4ccbf19081baba17e9 columnFamilyName b 2024-12-08T00:51:22,553 DEBUG [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-08T00:51:22,554 INFO [StoreOpener-eba24f426c1fbd4ccbf19081baba17e9-1 {}] regionserver.HStore(327): Store=eba24f426c1fbd4ccbf19081baba17e9/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-08T00:51:22,555 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,556 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/table/eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,556 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/table/eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,559 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/0cd75b5b-498f-9431-bebf-333c32729ec0/data/default/table/eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,560 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,560 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,564 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,566 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened eba24f426c1fbd4ccbf19081baba17e9; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=72585849, jitterRate=0.0816134363412857}}}, org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL$FlushSpecificStoresPolicy@12ca6f8c 2024-12-08T00:51:22,566 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for eba24f426c1fbd4ccbf19081baba17e9 2024-12-08T00:51:22,569 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for eba24f426c1fbd4ccbf19081baba17e9: Running coprocessor pre-open hook at 1733619082545Writing region info on filesystem at 1733619082545Initializing all the Stores at 1733619082547 (+2 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733619082547Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733619082547Cleaning up temporary data from old regions at 1733619082560 (+13 ms)Running coprocessor post-open hooks at 1733619082566 (+6 ms)Region opened successfully at 1733619082569 (+3 ms) 2024-12-08T00:51:25,587 INFO [pool-78-thread-1 {}] regionserver.HRegion(2902): Flushing eba24f426c1fbd4ccbf19081baba17e9 2/2 column families, dataSize=96 B heapSize=896 B 2024-12-08T00:51:28,057 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-12-08T00:51:28,618 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/a/b42602135d5c401abb1b28e45bc2ae43 is 28, key is a/a:a/1733619082576/Put/seqid=0 2024-12-08T00:51:28,626 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741852_1028 (size=4945) 2024-12-08T00:51:28,626 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741852_1028 (size=4945) 2024-12-08T00:51:28,627 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741852_1028 (size=4945) 2024-12-08T00:51:28,627 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/a/b42602135d5c401abb1b28e45bc2ae43 2024-12-08T00:51:28,650 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/b/fa469795f6604e3a8dc93fc97cd50ce3 is 28, key is a/b:b/1733619082576/Put/seqid=0 2024-12-08T00:51:28,657 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741853_1029 (size=4945) 2024-12-08T00:51:28,658 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741853_1029 (size=4945) 2024-12-08T00:51:28,658 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741853_1029 (size=4945) 2024-12-08T00:51:28,658 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/b/fa469795f6604e3a8dc93fc97cd50ce3 2024-12-08T00:51:28,670 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/a/b42602135d5c401abb1b28e45bc2ae43 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/a/b42602135d5c401abb1b28e45bc2ae43 2024-12-08T00:51:28,679 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/a/b42602135d5c401abb1b28e45bc2ae43, entries=1, sequenceid=6, filesize=4.8 K 2024-12-08T00:51:28,681 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/b/fa469795f6604e3a8dc93fc97cd50ce3 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/b/fa469795f6604e3a8dc93fc97cd50ce3 2024-12-08T00:51:28,691 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/b/fa469795f6604e3a8dc93fc97cd50ce3, entries=1, sequenceid=6, filesize=4.8 K 2024-12-08T00:51:28,693 INFO [pool-78-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~96 B/96, heapSize ~864 B/864, currentSize=0 B/0 for eba24f426c1fbd4ccbf19081baba17e9 in 3106ms, sequenceid=6, compaction requested=false 2024-12-08T00:51:28,694 DEBUG [pool-78-thread-1 {}] regionserver.HRegion(2603): Flush status journal for eba24f426c1fbd4ccbf19081baba17e9: 2024-12-08T00:51:28,694 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(676): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-12-08T00:51:28,694 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(677): Flush succeeded:true 2024-12-08T00:51:28,698 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing eba24f426c1fbd4ccbf19081baba17e9 1/2 column families, dataSize=48 B heapSize=704 B; a={dataSize=24 B, heapSize=352 B, offHeapSize=0 B} 2024-12-08T00:51:28,704 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/a/ae6315c014c546f7a880b20bcb54cb9f is 28, key is a/a:a/1733619082576/Put/seqid=0 2024-12-08T00:51:28,711 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741854_1030 (size=4945) 2024-12-08T00:51:28,711 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741854_1030 (size=4945) 2024-12-08T00:51:28,712 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741854_1030 (size=4945) 2024-12-08T00:51:28,712 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=10 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/a/ae6315c014c546f7a880b20bcb54cb9f 2024-12-08T00:51:28,721 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/a/ae6315c014c546f7a880b20bcb54cb9f as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/a/ae6315c014c546f7a880b20bcb54cb9f 2024-12-08T00:51:28,729 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/a/ae6315c014c546f7a880b20bcb54cb9f, entries=1, sequenceid=10, filesize=4.8 K 2024-12-08T00:51:28,731 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=24 B/24 for eba24f426c1fbd4ccbf19081baba17e9 in 33ms, sequenceid=10, compaction requested=false 2024-12-08T00:51:28,732 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for eba24f426c1fbd4ccbf19081baba17e9: 2024-12-08T00:51:28,733 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing eba24f426c1fbd4ccbf19081baba17e9, disabling compactions & flushes 2024-12-08T00:51:28,733 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. 2024-12-08T00:51:28,733 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. 2024-12-08T00:51:28,733 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. after waiting 0 ms 2024-12-08T00:51:28,733 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. 2024-12-08T00:51:28,733 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing eba24f426c1fbd4ccbf19081baba17e9 2/2 column families, dataSize=24 B heapSize=608 B 2024-12-08T00:51:28,739 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/b/5ddce547375a48a4b227986fabbb6bed is 28, key is a/b:b/1733619082576/Put/seqid=0 2024-12-08T00:51:28,745 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741855_1031 (size=4945) 2024-12-08T00:51:28,746 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741855_1031 (size=4945) 2024-12-08T00:51:28,746 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741855_1031 (size=4945) 2024-12-08T00:51:28,746 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=13 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/b/5ddce547375a48a4b227986fabbb6bed 2024-12-08T00:51:28,756 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/.tmp/b/5ddce547375a48a4b227986fabbb6bed as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/b/5ddce547375a48a4b227986fabbb6bed 2024-12-08T00:51:28,765 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/b/5ddce547375a48a4b227986fabbb6bed, entries=1, sequenceid=13, filesize=4.8 K 2024-12-08T00:51:28,767 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for eba24f426c1fbd4ccbf19081baba17e9 in 34ms, sequenceid=13, compaction requested=false 2024-12-08T00:51:28,774 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:41187/user/jenkins/test-data/0cd75b5b-498f-9431-bebf-333c32729ec0/data/default/table/eba24f426c1fbd4ccbf19081baba17e9/recovered.edits/16.seqid, newMaxSeqId=16, maxSeqId=1 2024-12-08T00:51:28,774 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. 2024-12-08T00:51:28,775 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for eba24f426c1fbd4ccbf19081baba17e9: Waiting for close lock at 1733619088733Running coprocessor pre-close hooks at 1733619088733Disabling compacts and flushes for region at 1733619088733Disabling writes for close at 1733619088733Obtaining lock to block concurrent updates at 1733619088733Preparing flush snapshotting stores in eba24f426c1fbd4ccbf19081baba17e9 at 1733619088733Finished memstore snapshotting table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9., syncing WAL and waiting on mvcc, flushsize=dataSize=24, getHeapSize=576, getOffHeapSize=0, getCellsCount=1 at 1733619088734 (+1 ms)Flushing stores of table,,1733619082116.eba24f426c1fbd4ccbf19081baba17e9. at 1733619088735 (+1 ms)Flushing eba24f426c1fbd4ccbf19081baba17e9/b: creating writer at 1733619088735Flushing eba24f426c1fbd4ccbf19081baba17e9/b: appending metadata at 1733619088739 (+4 ms)Flushing eba24f426c1fbd4ccbf19081baba17e9/b: closing flushed file at 1733619088739Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@520c2878: reopening flushed file at 1733619088754 (+15 ms)Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for eba24f426c1fbd4ccbf19081baba17e9 in 34ms, sequenceid=13, compaction requested=false at 1733619088767 (+13 ms)Writing region close event to WAL at 1733619088768 (+1 ms)Running coprocessor post-close hooks at 1733619088774 (+6 ms)Closed at 1733619088774 2024-12-08T00:51:28,775 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:28,775 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:28,776 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:28,776 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:28,776 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:28,778 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741850_1026 (size=2357) 2024-12-08T00:51:28,778 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741850_1026 (size=2357) 2024-12-08T00:51:28,778 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741850_1026 (size=2357) 2024-12-08T00:51:28,781 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/oldWALs 2024-12-08T00:51:28,781 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733619082107) 2024-12-08T00:51:28,787 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=207 (was 199) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:52712 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Timer for 'HBase' metrics system java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.util.TimerThread.mainLoop(Timer.java:563) java.base@17.0.11/java.util.TimerThread.run(Timer.java:516) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:37940 [Waiting for operation #9] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:58130 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=425 (was 423) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=99 (was 108), ProcessCount=11 (was 11), AvailableMemoryMB=17987 (was 18032) 2024-12-08T00:51:28,794 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=207, OpenFileDescriptor=425, MaxFileDescriptor=1048576, SystemLoadAverage=99, ProcessCount=11, AvailableMemoryMB=17986 2024-12-08T00:51:28,806 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741856_1032 (size=7) 2024-12-08T00:51:28,807 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741856_1032 (size=7) 2024-12-08T00:51:28,807 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741856_1032 (size=7) 2024-12-08T00:51:28,809 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:28,809 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:28,812 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:28,821 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(244): No decryptEncryptedDataEncryptionKey method in DFSClient, should be hadoop version with HDFS-12396 java.lang.NoSuchMethodException: org.apache.hadoop.hdfs.DFSClient.decryptEncryptedDataEncryptionKey(org.apache.hadoop.fs.FileEncryptionInfo) at java.lang.Class.getDeclaredMethod(Class.java:2675) ~[?:?] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelperWithoutHDFS12396(FanOutOneBlockAsyncDFSOutputSaslHelper.java:183) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelper(FanOutOneBlockAsyncDFSOutputSaslHelper.java:242) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.(FanOutOneBlockAsyncDFSOutputSaslHelper.java:253) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at java.lang.Class.forName0(Native Method) ~[?:?] at java.lang.Class.forName(Class.java:375) ~[?:?] at org.apache.hadoop.hbase.wal.AsyncFSWALProvider.load(AsyncFSWALProvider.java:150) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.getProviderClass(WALFactory.java:174) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:262) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:214) ~[classes/:?] at org.apache.hadoop.hbase.HBaseTestingUtil.createWal(HBaseTestingUtil.java:2160) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2205) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2169) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFlushSequenceIdIsGreaterThanAllEditsInHFile(AbstractTestFSWAL.java:425) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-08T00:51:28,824 INFO [Time-limited test {}] wal.WALFactory(196): Instantiating WALProvider of type class org.apache.hadoop.hbase.wal.AsyncFSWALProvider 2024-12-08T00:51:28,828 DEBUG [Time-limited test {}] channel.MultithreadEventLoopGroup(44): -Dio.netty.eventLoopThreads: 16 2024-12-08T00:51:28,837 DEBUG [Time-limited test {}] nio.NioEventLoop(110): -Dio.netty.noKeySetOptimization: false 2024-12-08T00:51:28,837 DEBUG [Time-limited test {}] nio.NioEventLoop(111): -Dio.netty.selectorAutoRebuildThreshold: 512 2024-12-08T00:51:28,849 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor defaultMonitorName 2024-12-08T00:51:28,852 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:28,852 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=hregion-47738116, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/WALs/hregion-47738116, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/oldWALs, maxLogs=1760 2024-12-08T00:51:28,865 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputHelper(524): When create output stream for /user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/WALs/hregion-47738116/hregion-47738116.1733619088853, exclude list is [], retry=0 2024-12-08T00:51:28,875 DEBUG [Time-limited test {}] channel.DefaultChannelId(84): -Dio.netty.processId: 26110 (auto-detected) 2024-12-08T00:51:28,877 DEBUG [Time-limited test {}] channel.DefaultChannelId(106): -Dio.netty.machineId: 02:42:ac:ff:fe:11:00:02 (auto-detected) 2024-12-08T00:51:28,894 DEBUG [AsyncFSWAL-1-3 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:39559,DS-6b32010d-891b-447b-a8a6-5a84dde2e565,DISK] 2024-12-08T00:51:28,894 DEBUG [AsyncFSWAL-1-2 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:45199,DS-ad358436-12a8-4e7a-bdde-c750d5647305,DISK] 2024-12-08T00:51:28,894 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:38945,DS-474d2394-7f95-48cf-aa86-c41e2400ee6e,DISK] 2024-12-08T00:51:28,897 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.ProtobufDecoder(117): Hadoop 3.3 and above shades protobuf. 2024-12-08T00:51:28,923 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/WALs/hregion-47738116/hregion-47738116.1733619088853 2024-12-08T00:51:28,923 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new AsyncFSWAL writer with pipeline: [(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:40445:40445)] 2024-12-08T00:51:28,923 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => cc424ad19f85f4364566ce5df7b6c54d, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testFlushSequenceIdIsGreaterThanAllEditsInHFile', {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 2024-12-08T00:51:28,932 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741858_1034 (size=82) 2024-12-08T00:51:28,932 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741858_1034 (size=82) 2024-12-08T00:51:28,933 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741858_1034 (size=82) 2024-12-08T00:51:28,934 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-08T00:51:28,936 INFO [StoreOpener-cc424ad19f85f4364566ce5df7b6c54d-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,939 INFO [StoreOpener-cc424ad19f85f4364566ce5df7b6c54d-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region cc424ad19f85f4364566ce5df7b6c54d columnFamilyName f 2024-12-08T00:51:28,939 DEBUG [StoreOpener-cc424ad19f85f4364566ce5df7b6c54d-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-08T00:51:28,940 INFO [StoreOpener-cc424ad19f85f4364566ce5df7b6c54d-1 {}] regionserver.HStore(327): Store=cc424ad19f85f4364566ce5df7b6c54d/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-08T00:51:28,940 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,941 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,942 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,942 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,943 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,943 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,945 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,947 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-08T00:51:28,948 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened cc424ad19f85f4364566ce5df7b6c54d; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=71696083, jitterRate=0.06835488975048065}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-08T00:51:28,952 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for cc424ad19f85f4364566ce5df7b6c54d: Writing region info on filesystem at 1733619088934Initializing all the Stores at 1733619088936 (+2 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733619088936Cleaning up temporary data from old regions at 1733619088943 (+7 ms)Region opened successfully at 1733619088952 (+9 ms) 2024-12-08T00:51:28,952 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing cc424ad19f85f4364566ce5df7b6c54d, disabling compactions & flushes 2024-12-08T00:51:28,952 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d. 2024-12-08T00:51:28,952 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d. 2024-12-08T00:51:28,952 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d. after waiting 0 ms 2024-12-08T00:51:28,952 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d. 2024-12-08T00:51:28,953 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d. 2024-12-08T00:51:28,953 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for cc424ad19f85f4364566ce5df7b6c54d: Waiting for close lock at 1733619088952Disabling compacts and flushes for region at 1733619088952Disabling writes for close at 1733619088952Writing region close event to WAL at 1733619088953 (+1 ms)Closed at 1733619088953 2024-12-08T00:51:28,957 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741857_1033 (size=93) 2024-12-08T00:51:28,957 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741857_1033 (size=93) 2024-12-08T00:51:28,957 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741857_1033 (size=93) 2024-12-08T00:51:28,960 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/oldWALs 2024-12-08T00:51:28,960 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: AsyncFSWAL hregion-47738116:(num 1733619088853) 2024-12-08T00:51:28,962 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:28,963 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/testFlushSequenceIdIsGreaterThanAllEditsInHFile, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/testFlushSequenceIdIsGreaterThanAllEditsInHFile, maxLogs=1760 2024-12-08T00:51:28,963 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619088963 2024-12-08T00:51:28,970 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/testFlushSequenceIdIsGreaterThanAllEditsInHFile/wal.1733619088963 2024-12-08T00:51:28,972 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new writer with pipeline: [(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:43783:43783)] 2024-12-08T00:51:28,972 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:28,974 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => cc424ad19f85f4364566ce5df7b6c54d, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d.', STARTKEY => '', ENDKEY => ''} 2024-12-08T00:51:28,974 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-08T00:51:28,974 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,974 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,977 INFO [StoreOpener-cc424ad19f85f4364566ce5df7b6c54d-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,978 INFO [StoreOpener-cc424ad19f85f4364566ce5df7b6c54d-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region cc424ad19f85f4364566ce5df7b6c54d columnFamilyName f 2024-12-08T00:51:28,978 DEBUG [StoreOpener-cc424ad19f85f4364566ce5df7b6c54d-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-08T00:51:28,979 INFO [StoreOpener-cc424ad19f85f4364566ce5df7b6c54d-1 {}] regionserver.HStore(327): Store=cc424ad19f85f4364566ce5df7b6c54d/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-08T00:51:28,979 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,980 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,981 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,982 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,983 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,983 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,985 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for cc424ad19f85f4364566ce5df7b6c54d 2024-12-08T00:51:28,986 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened cc424ad19f85f4364566ce5df7b6c54d; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=64727732, jitterRate=-0.03548163175582886}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-08T00:51:28,988 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for cc424ad19f85f4364566ce5df7b6c54d: Writing region info on filesystem at 1733619088974Initializing all the Stores at 1733619088976 (+2 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733619088976Cleaning up temporary data from old regions at 1733619088983 (+7 ms)Region opened successfully at 1733619088988 (+5 ms) 2024-12-08T00:51:29,001 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.log.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir so I do NOT create it in target/test-data/f5edf70c-2c5d-86f3-078a-5bf503de447b 2024-12-08T00:51:29,002 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.log.dir property value differs in configuration and system: Configuration=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/../logs while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir Erasing configuration value by system value. 2024-12-08T00:51:29,002 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.tmp.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.tmp.dir so I do NOT create it in target/test-data/f5edf70c-2c5d-86f3-078a-5bf503de447b 2024-12-08T00:51:29,002 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.tmp.dir property value differs in configuration and system: Configuration=/tmp/hadoop-jenkins while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.tmp.dir Erasing configuration value by system value. 2024-12-08T00:51:29,002 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5edf70c-2c5d-86f3-078a-5bf503de447b 2024-12-08T00:51:29,026 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing cc424ad19f85f4364566ce5df7b6c54d 1/1 column families, dataSize=1.14 KB heapSize=2.13 KB 2024-12-08T00:51:29,126 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:29,227 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:29,328 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:29,428 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:29,530 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:29,630 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:29,731 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:29,832 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:29,933 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:30,034 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:30,134 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:30,209 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d/.tmp/f/ee41d3db5cc2429bbb290a3635e6c85f is 121, key is testFlushSequenceIdIsGreaterThanAllEditsInHFile/f:x0/1733619089002/Put/seqid=0 2024-12-08T00:51:30,216 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741860_1036 (size=6333) 2024-12-08T00:51:30,216 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741860_1036 (size=6333) 2024-12-08T00:51:30,216 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741860_1036 (size=6333) 2024-12-08T00:51:30,217 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=1.14 KB at sequenceid=23 (bloomFilter=true), to=hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d/.tmp/f/ee41d3db5cc2429bbb290a3635e6c85f 2024-12-08T00:51:30,228 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d/.tmp/f/ee41d3db5cc2429bbb290a3635e6c85f as hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d/f/ee41d3db5cc2429bbb290a3635e6c85f 2024-12-08T00:51:30,237 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/cc424ad19f85f4364566ce5df7b6c54d/f/ee41d3db5cc2429bbb290a3635e6c85f, entries=10, sequenceid=23, filesize=6.2 K 2024-12-08T00:51:30,338 DEBUG [FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-08T00:51:30,340 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~1.14 KB/1170, heapSize ~2.11 KB/2160, currentSize=0 B/0 for cc424ad19f85f4364566ce5df7b6c54d in 1314ms, sequenceid=23, compaction requested=false 2024-12-08T00:51:30,340 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for cc424ad19f85f4364566ce5df7b6c54d: 2024-12-08T00:51:30,340 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing cc424ad19f85f4364566ce5df7b6c54d, disabling compactions & flushes 2024-12-08T00:51:30,340 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d. 2024-12-08T00:51:30,341 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d. 2024-12-08T00:51:30,341 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d. after waiting 0 ms 2024-12-08T00:51:30,341 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d. 2024-12-08T00:51:30,342 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733619088815.cc424ad19f85f4364566ce5df7b6c54d. 2024-12-08T00:51:30,342 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for cc424ad19f85f4364566ce5df7b6c54d: Waiting for close lock at 1733619090340Disabling compacts and flushes for region at 1733619090340Disabling writes for close at 1733619090341 (+1 ms)Writing region close event to WAL at 1733619090342 (+1 ms)Closed at 1733619090342 2024-12-08T00:51:30,343 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,343 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,343 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,344 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,344 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,348 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741859_1035 (size=16527) 2024-12-08T00:51:30,349 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741859_1035 (size=16527) 2024-12-08T00:51:30,350 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741859_1035 (size=16527) 2024-12-08T00:51:30,353 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/c06dbcec-c33c-723a-9712-db40ef27fff3/testFlushSequenceIdIsGreaterThanAllEditsInHFile 2024-12-08T00:51:30,353 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: wal:(num 1733619088963) 2024-12-08T00:51:30,361 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=215 (was 207) Potentially hanging thread: AsyncFSWAL-1-3 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-1 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-350635737_22 at /127.0.0.1:52712 [Waiting for operation #6] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:37940 [Waiting for operation #11] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:58130 [Waiting for operation #6] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-2 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=457 (was 425) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=91 (was 99), ProcessCount=11 (was 11), AvailableMemoryMB=17961 (was 17986) 2024-12-08T00:51:30,371 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=215, OpenFileDescriptor=457, MaxFileDescriptor=1048576, SystemLoadAverage=91, ProcessCount=11, AvailableMemoryMB=17960 2024-12-08T00:51:30,381 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741861_1037 (size=7) 2024-12-08T00:51:30,382 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741861_1037 (size=7) 2024-12-08T00:51:30,382 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741861_1037 (size=7) 2024-12-08T00:51:30,383 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:30,383 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:30,385 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:30,389 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:30,390 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/oldWALs, maxLogs=1760 2024-12-08T00:51:30,390 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619090390 2024-12-08T00:51:30,397 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090390 2024-12-08T00:51:30,398 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:43783:43783)] 2024-12-08T00:51:30,399 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619090398 2024-12-08T00:51:30,407 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619090399 2024-12-08T00:51:30,411 WARN [Time-limited test {}] wal.AbstractProtobufLogWriter(199): Init output failed, path=hdfs://localhost:41187/user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090399 java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-12-08T00:51:30,413 DEBUG [Time-limited test {}] wal.FSHLogProvider(93): Error instantiating log writer. java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-12-08T00:51:30,427 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=242 (was 215) Potentially hanging thread: FSHLog-0-hdfs://localhost:41187/user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029-prefix:default java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:39559, 127.0.0.1:45199] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:45199, 127.0.0.1:38945] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:38022 [Receiving block BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:58226 [Receiving block BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090398 block BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:38020 [Receiving block BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090390 block BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:52814 [Receiving block BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:52828 [Receiving block BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:38945] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:45199] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:58220 [Receiving block BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=483 (was 457) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=91 (was 91), ProcessCount=11 (was 11), AvailableMemoryMB=17952 (was 17960) 2024-12-08T00:51:30,435 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=242, OpenFileDescriptor=483, MaxFileDescriptor=1048576, SystemLoadAverage=91, ProcessCount=11, AvailableMemoryMB=17950 2024-12-08T00:51:30,444 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741864_1040 (size=7) 2024-12-08T00:51:30,444 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741864_1040 (size=7) 2024-12-08T00:51:30,445 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741864_1040 (size=7) 2024-12-08T00:51:30,446 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:30,446 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:30,448 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:30,453 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:30,454 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/41aafabd-eed2-b997-8de8-689378d6a57c/testWALCoprocessorLoaded, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/41aafabd-eed2-b997-8de8-689378d6a57c/oldWALs, maxLogs=1760 2024-12-08T00:51:30,454 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619090454 2024-12-08T00:51:30,487 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/41aafabd-eed2-b997-8de8-689378d6a57c/testWALCoprocessorLoaded/wal.1733619090454 2024-12-08T00:51:30,489 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:43783:43783)] 2024-12-08T00:51:30,491 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,491 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,492 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,492 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,492 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,497 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741865_1041 (size=93) 2024-12-08T00:51:30,497 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741865_1041 (size=93) 2024-12-08T00:51:30,498 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741865_1041 (size=93) 2024-12-08T00:51:30,502 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/41aafabd-eed2-b997-8de8-689378d6a57c/oldWALs 2024-12-08T00:51:30,502 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733619090454) 2024-12-08T00:51:30,511 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=247 (was 242) - Thread LEAK? -, OpenFileDescriptor=495 (was 483) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=91 (was 91), ProcessCount=11 (was 11), AvailableMemoryMB=17944 (was 17950) 2024-12-08T00:51:30,520 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=247, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=91, ProcessCount=11, AvailableMemoryMB=17944 2024-12-08T00:51:30,535 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741866_1042 (size=7) 2024-12-08T00:51:30,535 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741866_1042 (size=7) 2024-12-08T00:51:30,535 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741866_1042 (size=7) 2024-12-08T00:51:30,537 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:30,537 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:30,539 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:30,545 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:30,545 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/039a540f-afe2-7bf4-281e-aaab1a62afe3/testSyncNoAppend, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/039a540f-afe2-7bf4-281e-aaab1a62afe3/testSyncNoAppend, maxLogs=1760 2024-12-08T00:51:30,546 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619090546 2024-12-08T00:51:30,555 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/039a540f-afe2-7bf4-281e-aaab1a62afe3/testSyncNoAppend/wal.1733619090546 2024-12-08T00:51:30,558 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:40445:40445),(127.0.0.1/127.0.0.1:43783:43783)] 2024-12-08T00:51:30,559 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,559 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,559 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,560 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,560 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,562 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741867_1043 (size=93) 2024-12-08T00:51:30,562 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741867_1043 (size=93) 2024-12-08T00:51:30,562 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741867_1043 (size=93) 2024-12-08T00:51:30,565 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/039a540f-afe2-7bf4-281e-aaab1a62afe3/testSyncNoAppend 2024-12-08T00:51:30,565 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733619090546) 2024-12-08T00:51:30,574 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=252 (was 247) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=91 (was 91), ProcessCount=11 (was 11), AvailableMemoryMB=17940 (was 17944) 2024-12-08T00:51:30,582 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=252, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=91, ProcessCount=11, AvailableMemoryMB=17939 2024-12-08T00:51:30,592 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741868_1044 (size=7) 2024-12-08T00:51:30,593 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741868_1044 (size=7) 2024-12-08T00:51:30,593 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741868_1044 (size=7) 2024-12-08T00:51:30,594 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:30,595 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:30,596 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:30,602 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:30,602 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/b2211c31-7685-98cd-bf4a-142259851f35/testWriteEntryCanBeNull, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/b2211c31-7685-98cd-bf4a-142259851f35/testWriteEntryCanBeNull, maxLogs=1760 2024-12-08T00:51:30,602 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619090602 2024-12-08T00:51:30,609 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/b2211c31-7685-98cd-bf4a-142259851f35/testWriteEntryCanBeNull/wal.1733619090602 2024-12-08T00:51:30,610 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:40445:40445)] 2024-12-08T00:51:30,611 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,611 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,611 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,611 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,611 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:30,614 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741869_1045 (size=93) 2024-12-08T00:51:30,614 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741869_1045 (size=93) 2024-12-08T00:51:30,614 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741869_1045 (size=93) 2024-12-08T00:51:30,616 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/b2211c31-7685-98cd-bf4a-142259851f35/testWriteEntryCanBeNull 2024-12-08T00:51:30,616 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733619090602) 2024-12-08T00:51:30,626 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=257 (was 252) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=91 (was 91), ProcessCount=11 (was 11), AvailableMemoryMB=17935 (was 17939) 2024-12-08T00:51:30,635 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=257, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=91, ProcessCount=11, AvailableMemoryMB=17934 2024-12-08T00:51:30,648 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741870_1046 (size=7) 2024-12-08T00:51:30,648 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741870_1046 (size=7) 2024-12-08T00:51:30,649 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741870_1046 (size=7) 2024-12-08T00:51:30,650 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800 with version=8 2024-12-08T00:51:30,650 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:30,652 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-08T00:51:30,657 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-08T00:51:30,658 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/testUnflushedSeqIdTrackingWithAsyncWal, archiveDir=hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/oldWALs, maxLogs=1760 2024-12-08T00:51:30,658 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733619090658 2024-12-08T00:51:30,669 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/testUnflushedSeqIdTrackingWithAsyncWal/wal.1733619090658 2024-12-08T00:51:30,675 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36555:36555),(127.0.0.1/127.0.0.1:43783:43783),(127.0.0.1/127.0.0.1:40445:40445)] 2024-12-08T00:51:30,676 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 397e8d62e0f22bb81112c4177e5f2f33, NAME => 'table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34 2024-12-08T00:51:30,690 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741872_1048 (size=40) 2024-12-08T00:51:30,690 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741872_1048 (size=40) 2024-12-08T00:51:30,690 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741872_1048 (size=40) 2024-12-08T00:51:30,691 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-08T00:51:30,695 INFO [StoreOpener-397e8d62e0f22bb81112c4177e5f2f33-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,697 INFO [StoreOpener-397e8d62e0f22bb81112c4177e5f2f33-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 397e8d62e0f22bb81112c4177e5f2f33 columnFamilyName b 2024-12-08T00:51:30,697 DEBUG [StoreOpener-397e8d62e0f22bb81112c4177e5f2f33-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-08T00:51:30,698 INFO [StoreOpener-397e8d62e0f22bb81112c4177e5f2f33-1 {}] regionserver.HStore(327): Store=397e8d62e0f22bb81112c4177e5f2f33/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-08T00:51:30,699 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,700 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/table/397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,700 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/table/397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,701 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/95227bf7-b2d1-629b-6caa-0adf2135cef8/data/default/table/397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,702 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,702 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,704 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,708 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:41187/user/jenkins/test-data/95227bf7-b2d1-629b-6caa-0adf2135cef8/data/default/table/397e8d62e0f22bb81112c4177e5f2f33/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-08T00:51:30,709 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 397e8d62e0f22bb81112c4177e5f2f33; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=63276912, jitterRate=-0.057100534439086914}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-08T00:51:30,712 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 397e8d62e0f22bb81112c4177e5f2f33: Writing region info on filesystem at 1733619090691Initializing all the Stores at 1733619090692 (+1 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733619090692Cleaning up temporary data from old regions at 1733619090702 (+10 ms)Region opened successfully at 1733619090711 (+9 ms) 2024-12-08T00:51:30,712 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 397e8d62e0f22bb81112c4177e5f2f33, disabling compactions & flushes 2024-12-08T00:51:30,712 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. 2024-12-08T00:51:30,712 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. 2024-12-08T00:51:30,712 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. after waiting 0 ms 2024-12-08T00:51:30,712 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. 2024-12-08T00:51:30,713 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. 2024-12-08T00:51:30,713 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 397e8d62e0f22bb81112c4177e5f2f33: Waiting for close lock at 1733619090712Disabling compacts and flushes for region at 1733619090712Disabling writes for close at 1733619090712Writing region close event to WAL at 1733619090713 (+1 ms)Closed at 1733619090713 2024-12-08T00:51:30,715 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 397e8d62e0f22bb81112c4177e5f2f33, NAME => 'table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33.', STARTKEY => '', ENDKEY => ''} 2024-12-08T00:51:30,716 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,716 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-08T00:51:30,716 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,716 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,719 INFO [StoreOpener-397e8d62e0f22bb81112c4177e5f2f33-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,720 INFO [StoreOpener-397e8d62e0f22bb81112c4177e5f2f33-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 397e8d62e0f22bb81112c4177e5f2f33 columnFamilyName b 2024-12-08T00:51:30,720 DEBUG [StoreOpener-397e8d62e0f22bb81112c4177e5f2f33-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-08T00:51:30,721 INFO [StoreOpener-397e8d62e0f22bb81112c4177e5f2f33-1 {}] regionserver.HStore(327): Store=397e8d62e0f22bb81112c4177e5f2f33/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-08T00:51:30,721 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,722 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/table/397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,723 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/data/default/table/397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,725 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:41187/user/jenkins/test-data/95227bf7-b2d1-629b-6caa-0adf2135cef8/data/default/table/397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,725 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,725 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,728 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,729 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 397e8d62e0f22bb81112c4177e5f2f33; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=65079047, jitterRate=-0.030246630311012268}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-08T00:51:30,729 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for 397e8d62e0f22bb81112c4177e5f2f33 2024-12-08T00:51:30,731 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 397e8d62e0f22bb81112c4177e5f2f33: Running coprocessor pre-open hook at 1733619090716Writing region info on filesystem at 1733619090716Initializing all the Stores at 1733619090718 (+2 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733619090718Cleaning up temporary data from old regions at 1733619090725 (+7 ms)Running coprocessor post-open hooks at 1733619090729 (+4 ms)Region opened successfully at 1733619090730 (+1 ms) 2024-12-08T00:51:31,326 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor 2024-12-08T00:51:31,327 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor Metrics about HBase WALObservers 2024-12-08T00:51:31,328 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=TableRequests_Namespace_default_table_table 2024-12-08T00:51:31,328 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=TableRequests_Namespace_default_table_table Metrics about Tables on a single HBase RegionServer 2024-12-08T00:51:33,737 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1722): Closing 397e8d62e0f22bb81112c4177e5f2f33, disabling compactions & flushes 2024-12-08T00:51:33,737 INFO [pool-106-thread-1 {}] regionserver.HRegion(1755): Closing region table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. 2024-12-08T00:51:33,737 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. 2024-12-08T00:51:33,738 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1843): Acquired close lock on table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. after waiting 0 ms 2024-12-08T00:51:33,738 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1853): Updates disabled for region table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. 2024-12-08T00:51:33,739 INFO [pool-106-thread-1 {}] regionserver.HRegion(2902): Flushing 397e8d62e0f22bb81112c4177e5f2f33 1/1 column families, dataSize=48 B heapSize=448 B 2024-12-08T00:51:34,280 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-12-08T00:51:36,764 DEBUG [pool-106-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/397e8d62e0f22bb81112c4177e5f2f33/.tmp/b/2e82c93d416b4b2fadc904375ca2a101 is 28, key is b/b:b/1733619090733/Put/seqid=0 2024-12-08T00:51:36,770 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741873_1049 (size=4945) 2024-12-08T00:51:36,770 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741873_1049 (size=4945) 2024-12-08T00:51:36,770 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741873_1049 (size=4945) 2024-12-08T00:51:36,771 INFO [pool-106-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/397e8d62e0f22bb81112c4177e5f2f33/.tmp/b/2e82c93d416b4b2fadc904375ca2a101 2024-12-08T00:51:36,783 DEBUG [pool-106-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/397e8d62e0f22bb81112c4177e5f2f33/.tmp/b/2e82c93d416b4b2fadc904375ca2a101 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/397e8d62e0f22bb81112c4177e5f2f33/b/2e82c93d416b4b2fadc904375ca2a101 2024-12-08T00:51:36,793 INFO [pool-106-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/397e8d62e0f22bb81112c4177e5f2f33/b/2e82c93d416b4b2fadc904375ca2a101, entries=1, sequenceid=6, filesize=4.8 K 2024-12-08T00:51:36,795 INFO [pool-106-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 397e8d62e0f22bb81112c4177e5f2f33 in 3057ms, sequenceid=6, compaction requested=false 2024-12-08T00:51:36,800 DEBUG [pool-106-thread-1 {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:41187/user/jenkins/test-data/95227bf7-b2d1-629b-6caa-0adf2135cef8/data/default/table/397e8d62e0f22bb81112c4177e5f2f33/recovered.edits/9.seqid, newMaxSeqId=9, maxSeqId=1 2024-12-08T00:51:36,801 INFO [pool-106-thread-1 {}] regionserver.HRegion(1973): Closed table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. 2024-12-08T00:51:36,801 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1676): Region close journal for 397e8d62e0f22bb81112c4177e5f2f33: Waiting for close lock at 1733619093737Running coprocessor pre-close hooks at 1733619093737Disabling compacts and flushes for region at 1733619093737Disabling writes for close at 1733619093738 (+1 ms)Obtaining lock to block concurrent updates at 1733619093739 (+1 ms)Preparing flush snapshotting stores in 397e8d62e0f22bb81112c4177e5f2f33 at 1733619093739Finished memstore snapshotting table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33., syncing WAL and waiting on mvcc, flushsize=dataSize=48, getHeapSize=432, getOffHeapSize=0, getCellsCount=2 at 1733619093740 (+1 ms)Flushing stores of table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. at 1733619096741 (+3001 ms)Flushing 397e8d62e0f22bb81112c4177e5f2f33/b: creating writer at 1733619096742 (+1 ms)Flushing 397e8d62e0f22bb81112c4177e5f2f33/b: appending metadata at 1733619096763 (+21 ms)Flushing 397e8d62e0f22bb81112c4177e5f2f33/b: closing flushed file at 1733619096763Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@1c62f15b: reopening flushed file at 1733619096782 (+19 ms)Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 397e8d62e0f22bb81112c4177e5f2f33 in 3057ms, sequenceid=6, compaction requested=false at 1733619096795 (+13 ms)Writing region close event to WAL at 1733619096795Running coprocessor post-close hooks at 1733619096801 (+6 ms)Closed at 1733619096801 2024-12-08T00:51:36,801 INFO [pool-106-thread-1 {}] wal.AbstractTestFSWAL(620): Close result:{[B@1db47355=[/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/data/default/table/397e8d62e0f22bb81112c4177e5f2f33/b/2e82c93d416b4b2fadc904375ca2a101]} 2024-12-08T00:51:36,802 WARN [Time-limited test {}] regionserver.HRegion(1707): Region table,,1733619090676.397e8d62e0f22bb81112c4177e5f2f33. already closed 2024-12-08T00:51:36,802 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 397e8d62e0f22bb81112c4177e5f2f33: Waiting for close lock at 1733619096801 2024-12-08T00:51:36,802 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:36,802 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:36,802 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:36,802 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:36,802 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-08T00:51:36,805 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39559 is added to blk_1073741871_1047 (size=1206) 2024-12-08T00:51:36,805 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38945 is added to blk_1073741871_1047 (size=1206) 2024-12-08T00:51:36,806 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45199 is added to blk_1073741871_1047 (size=1206) 2024-12-08T00:51:36,808 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/2e2be337-af2b-2473-27fc-ec9ee56fd800/oldWALs 2024-12-08T00:51:36,808 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733619090658) 2024-12-08T00:51:36,816 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=261 (was 257) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:34208 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:44034 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=503 (was 495) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=84 (was 91), ProcessCount=11 (was 11), AvailableMemoryMB=17909 (was 17934) 2024-12-08T00:51:36,816 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1019): Shutting down minicluster 2024-12-08T00:51:36,818 WARN [PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039, type=LAST_IN_PIPELINE {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-1227637935-172.17.0.2-1733619071266:1073741863 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-08T00:51:36,818 WARN [ResponseProcessor for block BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039 java.io.IOException: Bad response ERROR for BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039 from datanode DatanodeInfoWithStorage[127.0.0.1:45199,DS-ad358436-12a8-4e7a-bdde-c750d5647305,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-08T00:51:36,819 WARN [DataStreamer for file /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090398 block BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039 {}] hdfs.DataStreamer(1731): Error Recovery for BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039 in pipeline [DatanodeInfoWithStorage[127.0.0.1:38945,DS-474d2394-7f95-48cf-aa86-c41e2400ee6e,DISK], DatanodeInfoWithStorage[127.0.0.1:39559,DS-6b32010d-891b-447b-a8a6-5a84dde2e565,DISK], DatanodeInfoWithStorage[127.0.0.1:45199,DS-ad358436-12a8-4e7a-bdde-c750d5647305,DISK]]: datanode 2(DatanodeInfoWithStorage[127.0.0.1:45199,DS-ad358436-12a8-4e7a-bdde-c750d5647305,DISK]) is bad. 2024-12-08T00:51:36,819 WARN [PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:39559, 127.0.0.1:45199] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-08T00:51:36,819 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_-282733047_22 at /127.0.0.1:52828 [Receiving block BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039] {}] datanode.BlockReceiver(316): Block 1073741863 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-12-08T00:51:36,819 WARN [PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:45199] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-08T00:51:36,823 WARN [DataStreamer for file /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090398 block BP-1227637935-172.17.0.2-1733619071266:blk_1073741863_1039 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090398 (inode 16549) Holder DFSClient_NONMAPREDUCE_-282733047_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-08T00:51:36,824 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090398 with renewLeaseKey: DEFAULT_16549 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090398 (inode 16549) Holder DFSClient_NONMAPREDUCE_-282733047_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-08T00:51:36,826 WARN [PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:38945] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-1227637935-172.17.0.2-1733619071266:1073741862 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-08T00:51:36,826 WARN [ResponseProcessor for block BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038 java.io.IOException: Bad response ERROR for BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038 from datanode DatanodeInfoWithStorage[127.0.0.1:45199,DS-ad358436-12a8-4e7a-bdde-c750d5647305,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-08T00:51:36,827 WARN [DataStreamer for file /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090390 block BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038 {}] hdfs.DataStreamer(1731): Error Recovery for BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038 in pipeline [DatanodeInfoWithStorage[127.0.0.1:39559,DS-6b32010d-891b-447b-a8a6-5a84dde2e565,DISK], DatanodeInfoWithStorage[127.0.0.1:45199,DS-ad358436-12a8-4e7a-bdde-c750d5647305,DISK], DatanodeInfoWithStorage[127.0.0.1:38945,DS-474d2394-7f95-48cf-aa86-c41e2400ee6e,DISK]]: datanode 1(DatanodeInfoWithStorage[127.0.0.1:45199,DS-ad358436-12a8-4e7a-bdde-c750d5647305,DISK]) is bad. 2024-12-08T00:51:36,827 WARN [PacketResponder: BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:45199, 127.0.0.1:38945] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-08T00:51:36,828 WARN [DataStreamer for file /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090390 block BP-1227637935-172.17.0.2-1733619071266:blk_1073741862_1038 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090390 (inode 16548) Holder DFSClient_NONMAPREDUCE_-282733047_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-08T00:51:36,829 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090390 with renewLeaseKey: DEFAULT_16548 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/38e4374f-9851-a3a2-5795-2757f410e029/testFailedToCreateWALIfParentRenamed/wal.1733619090390 (inode 16548) Holder DFSClient_NONMAPREDUCE_-282733047_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-08T00:51:36,868 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@4b2b884e{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-08T00:51:36,872 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@fa4aa4c{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-08T00:51:36,872 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-08T00:51:36,872 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@47ddd06a{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-08T00:51:36,873 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@4c49fcd3{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir/,STOPPED} 2024-12-08T00:51:36,875 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-08T00:51:36,875 WARN [BP-1227637935-172.17.0.2-1733619071266 heartbeating to localhost/127.0.0.1:41187 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-08T00:51:36,876 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-08T00:51:36,876 WARN [BP-1227637935-172.17.0.2-1733619071266 heartbeating to localhost/127.0.0.1:41187 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-1227637935-172.17.0.2-1733619071266 (Datanode Uuid 924dfcb1-3b2d-43f3-beb9-fc2331d99411) service to localhost/127.0.0.1:41187 2024-12-08T00:51:36,877 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data5/current/BP-1227637935-172.17.0.2-1733619071266 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-08T00:51:36,877 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data6/current/BP-1227637935-172.17.0.2-1733619071266 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-08T00:51:36,878 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-08T00:51:36,880 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@28637041{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-08T00:51:36,880 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@5e18b7fd{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-08T00:51:36,880 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-08T00:51:36,880 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@243038a3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-08T00:51:36,880 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@443ad5c2{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir/,STOPPED} 2024-12-08T00:51:36,882 WARN [BP-1227637935-172.17.0.2-1733619071266 heartbeating to localhost/127.0.0.1:41187 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-08T00:51:36,882 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-08T00:51:36,882 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-08T00:51:36,882 WARN [BP-1227637935-172.17.0.2-1733619071266 heartbeating to localhost/127.0.0.1:41187 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-1227637935-172.17.0.2-1733619071266 (Datanode Uuid 929cefbf-6d2a-4045-b8e1-081784df30d3) service to localhost/127.0.0.1:41187 2024-12-08T00:51:36,882 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data3/current/BP-1227637935-172.17.0.2-1733619071266 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-08T00:51:36,883 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data4/current/BP-1227637935-172.17.0.2-1733619071266 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-08T00:51:36,883 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-08T00:51:36,885 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@411b19f7{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-08T00:51:36,885 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@64bb503e{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-08T00:51:36,885 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-08T00:51:36,885 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@43a917ce{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-08T00:51:36,885 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@208945{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir/,STOPPED} 2024-12-08T00:51:36,886 WARN [BP-1227637935-172.17.0.2-1733619071266 heartbeating to localhost/127.0.0.1:41187 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-08T00:51:36,886 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-08T00:51:36,886 WARN [BP-1227637935-172.17.0.2-1733619071266 heartbeating to localhost/127.0.0.1:41187 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-1227637935-172.17.0.2-1733619071266 (Datanode Uuid 1100f5f9-6c1f-4acd-9a7c-844d060c3822) service to localhost/127.0.0.1:41187 2024-12-08T00:51:36,886 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-08T00:51:36,887 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data1/current/BP-1227637935-172.17.0.2-1733619071266 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-08T00:51:36,887 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/cluster_1fa2aa1d-92ec-bf66-e6a9-e02c23e034fe/data/data2/current/BP-1227637935-172.17.0.2-1733619071266 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-08T00:51:36,887 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-08T00:51:36,892 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@5599def{hdfs,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-12-08T00:51:36,893 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@493ba8a1{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-08T00:51:36,893 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-08T00:51:36,893 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@61b73bb3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-08T00:51:36,894 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@746f7db{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/97cd7d1a-fb0b-a1a1-8ea2-2a829bb5ab34/hadoop.log.dir/,STOPPED} 2024-12-08T00:51:36,932 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1026): Minicluster is down