2024-12-02 14:20:49,578 main DEBUG Apache Log4j Core 2.17.2 initializing configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-12-02 14:20:49,595 main DEBUG Took 0.015375 seconds to load 1 plugins from package org.apache.hadoop.hbase.logging 2024-12-02 14:20:49,596 main DEBUG PluginManager 'Core' found 129 plugins 2024-12-02 14:20:49,596 main DEBUG PluginManager 'Level' found 0 plugins 2024-12-02 14:20:49,597 main DEBUG PluginManager 'Lookup' found 16 plugins 2024-12-02 14:20:49,599 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,610 main DEBUG PluginManager 'TypeConverter' found 26 plugins 2024-12-02 14:20:49,626 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.util.MBeans", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,628 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,628 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.logging.TestJul2Slf4j", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,629 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,629 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.zookeeper", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,630 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,633 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,634 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,635 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,636 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,637 main DEBUG LoggerConfig$Builder(additivity="false", level="WARN", levelAndRefs="null", name="org.apache.directory", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,637 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,638 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.ipc.FailedServers", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,639 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,639 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsConfig", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,640 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,641 main DEBUG LoggerConfig$Builder(additivity="null", level="INFO", levelAndRefs="null", name="org.apache.hadoop.hbase.ScheduledChore", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,641 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,642 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.regionserver.RSRpcServices", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,642 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,643 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,643 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,644 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,644 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-02 14:20:49,645 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hbase.thirdparty.io.netty.channel", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,645 main DEBUG Building Plugin[name=root, class=org.apache.logging.log4j.core.config.LoggerConfig$RootLogger]. 2024-12-02 14:20:49,647 main DEBUG LoggerConfig$RootLogger$Builder(additivity="null", level="null", levelAndRefs="INFO,Console", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-02 14:20:49,649 main DEBUG Building Plugin[name=loggers, class=org.apache.logging.log4j.core.config.LoggersPlugin]. 2024-12-02 14:20:49,651 main DEBUG createLoggers(={org.apache.hadoop.metrics2.util.MBeans, org.apache.hadoop.hbase.logging.TestJul2Slf4j, org.apache.zookeeper, org.apache.hadoop.metrics2.impl.MetricsSinkAdapter, org.apache.hadoop.metrics2.impl.MetricsSystemImpl, org.apache.directory, org.apache.hadoop.hbase.ipc.FailedServers, org.apache.hadoop.metrics2.impl.MetricsConfig, org.apache.hadoop.hbase.ScheduledChore, org.apache.hadoop.hbase.regionserver.RSRpcServices, org.apache.hadoop, org.apache.hadoop.hbase, org.apache.hbase.thirdparty.io.netty.channel, root}) 2024-12-02 14:20:49,652 main DEBUG Building Plugin[name=layout, class=org.apache.logging.log4j.core.layout.PatternLayout]. 2024-12-02 14:20:49,653 main DEBUG PatternLayout$Builder(pattern="%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n", PatternSelector=null, Configuration(PropertiesConfig), Replace=null, charset="null", alwaysWriteExceptions="null", disableAnsi="null", noConsoleNoAnsi="null", header="null", footer="null") 2024-12-02 14:20:49,654 main DEBUG PluginManager 'Converter' found 47 plugins 2024-12-02 14:20:49,665 main DEBUG Building Plugin[name=appender, class=org.apache.hadoop.hbase.logging.HBaseTestAppender]. 2024-12-02 14:20:49,668 main DEBUG HBaseTestAppender$Builder(target="SYSTEM_ERR", maxSize="1G", bufferedIo="null", bufferSize="null", immediateFlush="null", ignoreExceptions="null", PatternLayout(%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n), name="Console", Configuration(PropertiesConfig), Filter=null, ={}) 2024-12-02 14:20:49,670 main DEBUG Starting HBaseTestOutputStreamManager SYSTEM_ERR 2024-12-02 14:20:49,671 main DEBUG Building Plugin[name=appenders, class=org.apache.logging.log4j.core.config.AppendersPlugin]. 2024-12-02 14:20:49,671 main DEBUG createAppenders(={Console}) 2024-12-02 14:20:49,672 main DEBUG Configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f initialized 2024-12-02 14:20:49,673 main DEBUG Starting configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-12-02 14:20:49,673 main DEBUG Started configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f OK. 2024-12-02 14:20:49,674 main DEBUG Shutting down OutputStreamManager SYSTEM_OUT.false.false-1 2024-12-02 14:20:49,674 main DEBUG OutputStream closed 2024-12-02 14:20:49,674 main DEBUG Shut down OutputStreamManager SYSTEM_OUT.false.false-1, all resources released: true 2024-12-02 14:20:49,674 main DEBUG Appender DefaultConsole-1 stopped with status true 2024-12-02 14:20:49,675 main DEBUG Stopped org.apache.logging.log4j.core.config.DefaultConfiguration@61001b64 OK 2024-12-02 14:20:49,781 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6 2024-12-02 14:20:49,783 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=StatusLogger 2024-12-02 14:20:49,785 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=ContextSelector 2024-12-02 14:20:49,786 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name= 2024-12-02 14:20:49,787 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.directory 2024-12-02 14:20:49,787 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSinkAdapter 2024-12-02 14:20:49,787 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.zookeeper 2024-12-02 14:20:49,788 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.logging.TestJul2Slf4j 2024-12-02 14:20:49,788 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSystemImpl 2024-12-02 14:20:49,789 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.util.MBeans 2024-12-02 14:20:49,790 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase 2024-12-02 14:20:49,790 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop 2024-12-02 14:20:49,791 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ipc.FailedServers 2024-12-02 14:20:49,791 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.regionserver.RSRpcServices 2024-12-02 14:20:49,791 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsConfig 2024-12-02 14:20:49,792 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hbase.thirdparty.io.netty.channel 2024-12-02 14:20:49,792 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ScheduledChore 2024-12-02 14:20:49,793 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Appenders,name=Console 2024-12-02 14:20:49,796 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-12-02 14:20:49,797 main DEBUG Reconfiguration complete for context[name=1dbd16a6] at URI jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-logging/target/hbase-logging-4.0.0-alpha-1-SNAPSHOT-tests.jar!/log4j2.properties (org.apache.logging.log4j.core.LoggerContext@40db2a24) with optional ClassLoader: null 2024-12-02 14:20:49,797 main DEBUG Shutdown hook enabled. Registering a new one. 2024-12-02 14:20:49,798 main DEBUG LoggerContext[name=1dbd16a6, org.apache.logging.log4j.core.LoggerContext@40db2a24] started OK. 2024-12-02T14:20:50,101 DEBUG [main {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a 2024-12-02 14:20:50,104 main DEBUG AsyncLogger.ThreadNameStrategy=UNCACHED (user specified null, default is UNCACHED) 2024-12-02 14:20:50,105 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-12-02T14:20:50,119 INFO [main {}] hbase.HBaseClassTestRule(94): Test class org.apache.hadoop.hbase.regionserver.wal.TestFSHLog timeout: 13 mins 2024-12-02T14:20:50,149 INFO [Time-limited test {}] hbase.HBaseZKTestingUtil(84): Created new mini-cluster data directory: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8, deleteOnExit=true 2024-12-02T14:20:50,150 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting test.cache.data to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/test.cache.data in system properties and HBase conf 2024-12-02T14:20:50,151 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.tmp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.tmp.dir in system properties and HBase conf 2024-12-02T14:20:50,152 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.log.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir in system properties and HBase conf 2024-12-02T14:20:50,152 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.local.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/mapreduce.cluster.local.dir in system properties and HBase conf 2024-12-02T14:20:50,153 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.temp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/mapreduce.cluster.temp.dir in system properties and HBase conf 2024-12-02T14:20:50,153 INFO [Time-limited test {}] hbase.HBaseTestingUtil(738): read short circuit is OFF 2024-12-02T14:20:50,262 WARN [Time-limited test {}] util.NativeCodeLoader(60): Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2024-12-02T14:20:50,380 DEBUG [Time-limited test {}] fs.HFileSystem(310): The file system is not a DistributedFileSystem. Skipping on block location reordering 2024-12-02T14:20:50,384 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-labels.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/yarn.node-labels.fs-store.root-dir in system properties and HBase conf 2024-12-02T14:20:50,384 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-attribute.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/yarn.node-attribute.fs-store.root-dir in system properties and HBase conf 2024-12-02T14:20:50,385 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.log-dirs to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/yarn.nodemanager.log-dirs in system properties and HBase conf 2024-12-02T14:20:50,386 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-12-02T14:20:50,386 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.active-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/yarn.timeline-service.entity-group-fs-store.active-dir in system properties and HBase conf 2024-12-02T14:20:50,387 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.done-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/yarn.timeline-service.entity-group-fs-store.done-dir in system properties and HBase conf 2024-12-02T14:20:50,388 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-12-02T14:20:50,388 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/dfs.journalnode.edits.dir in system properties and HBase conf 2024-12-02T14:20:50,389 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.datanode.shared.file.descriptor.paths to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/dfs.datanode.shared.file.descriptor.paths in system properties and HBase conf 2024-12-02T14:20:50,389 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting nfs.dump.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/nfs.dump.dir in system properties and HBase conf 2024-12-02T14:20:50,390 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting java.io.tmpdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/java.io.tmpdir in system properties and HBase conf 2024-12-02T14:20:50,390 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/dfs.journalnode.edits.dir in system properties and HBase conf 2024-12-02T14:20:50,390 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.provided.aliasmap.inmemory.leveldb.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/dfs.provided.aliasmap.inmemory.leveldb.dir in system properties and HBase conf 2024-12-02T14:20:50,391 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting fs.s3a.committer.staging.tmp.path to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/fs.s3a.committer.staging.tmp.path in system properties and HBase conf 2024-12-02T14:20:50,901 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-12-02T14:20:51,274 WARN [Time-limited test {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 2024-12-02T14:20:51,372 INFO [Time-limited test {}] log.Log(170): Logging initialized @2621ms to org.eclipse.jetty.util.log.Slf4jLog 2024-12-02T14:20:51,467 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-02T14:20:51,537 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-02T14:20:51,558 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-02T14:20:51,558 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-02T14:20:51,560 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-02T14:20:51,572 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-02T14:20:51,575 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@746f7db{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir/,AVAILABLE} 2024-12-02T14:20:51,576 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@61b73bb3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-02T14:20:51,797 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@5599def{hdfs,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/java.io.tmpdir/jetty-localhost-42637-hadoop-hdfs-3_4_1-tests_jar-_-any-3154039903759769421/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-12-02T14:20:51,806 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@493ba8a1{HTTP/1.1, (http/1.1)}{localhost:42637} 2024-12-02T14:20:51,806 INFO [Time-limited test {}] server.Server(415): Started @3055ms 2024-12-02T14:20:51,835 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-12-02T14:20:52,185 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-02T14:20:52,192 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-02T14:20:52,193 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-02T14:20:52,193 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-02T14:20:52,194 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-02T14:20:52,194 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@208945{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir/,AVAILABLE} 2024-12-02T14:20:52,195 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@43a917ce{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-02T14:20:52,324 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@411b19f7{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/java.io.tmpdir/jetty-localhost-45555-hadoop-hdfs-3_4_1-tests_jar-_-any-15006040880985658544/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-02T14:20:52,325 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@64bb503e{HTTP/1.1, (http/1.1)}{localhost:45555} 2024-12-02T14:20:52,325 INFO [Time-limited test {}] server.Server(415): Started @3574ms 2024-12-02T14:20:52,382 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-02T14:20:52,526 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-02T14:20:52,537 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-02T14:20:52,539 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-02T14:20:52,539 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-02T14:20:52,539 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-02T14:20:52,540 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@443ad5c2{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir/,AVAILABLE} 2024-12-02T14:20:52,541 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@243038a3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-02T14:20:52,673 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@28637041{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/java.io.tmpdir/jetty-localhost-36743-hadoop-hdfs-3_4_1-tests_jar-_-any-17949338483455746773/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-02T14:20:52,674 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@5e18b7fd{HTTP/1.1, (http/1.1)}{localhost:36743} 2024-12-02T14:20:52,674 INFO [Time-limited test {}] server.Server(415): Started @3923ms 2024-12-02T14:20:52,677 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-02T14:20:52,718 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-02T14:20:52,723 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-02T14:20:52,733 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-02T14:20:52,733 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-02T14:20:52,733 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-02T14:20:52,734 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@4c49fcd3{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir/,AVAILABLE} 2024-12-02T14:20:52,735 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@47ddd06a{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-02T14:20:52,850 WARN [Thread-105 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data1/current/BP-1650638000-172.17.0.3-1733149251009/current, will proceed with Du for space computation calculation, 2024-12-02T14:20:52,851 WARN [Thread-108 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data4/current/BP-1650638000-172.17.0.3-1733149251009/current, will proceed with Du for space computation calculation, 2024-12-02T14:20:52,851 WARN [Thread-106 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data2/current/BP-1650638000-172.17.0.3-1733149251009/current, will proceed with Du for space computation calculation, 2024-12-02T14:20:52,852 WARN [Thread-107 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data3/current/BP-1650638000-172.17.0.3-1733149251009/current, will proceed with Du for space computation calculation, 2024-12-02T14:20:52,871 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@4b2b884e{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/java.io.tmpdir/jetty-localhost-36981-hadoop-hdfs-3_4_1-tests_jar-_-any-10330709664293678110/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-02T14:20:52,872 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@fa4aa4c{HTTP/1.1, (http/1.1)}{localhost:36981} 2024-12-02T14:20:52,872 INFO [Time-limited test {}] server.Server(415): Started @4122ms 2024-12-02T14:20:52,875 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-02T14:20:52,895 WARN [Thread-58 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-02T14:20:52,904 WARN [Thread-81 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-02T14:20:52,972 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xcf5019ed3b85cee9 with lease ID 0xeaa4a1df0cf74904: Processing first storage report for DS-b26a3641-b3ea-4185-b2ac-f20b249f581a from datanode DatanodeRegistration(127.0.0.1:32825, datanodeUuid=619cb3e0-c96c-41b4-945f-caf82d951ccc, infoPort=35411, infoSecurePort=0, ipcPort=39583, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009) 2024-12-02T14:20:52,974 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xcf5019ed3b85cee9 with lease ID 0xeaa4a1df0cf74904: from storage DS-b26a3641-b3ea-4185-b2ac-f20b249f581a node DatanodeRegistration(127.0.0.1:32825, datanodeUuid=619cb3e0-c96c-41b4-945f-caf82d951ccc, infoPort=35411, infoSecurePort=0, ipcPort=39583, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-02T14:20:52,974 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x10b049ab4f74bd1 with lease ID 0xeaa4a1df0cf74903: Processing first storage report for DS-3b0edb9f-cf2f-4b01-8d9e-52c3bfc7008e from datanode DatanodeRegistration(127.0.0.1:43509, datanodeUuid=157239cf-2ea8-4afc-8dd1-90ba56d41db7, infoPort=33577, infoSecurePort=0, ipcPort=43363, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009) 2024-12-02T14:20:52,975 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x10b049ab4f74bd1 with lease ID 0xeaa4a1df0cf74903: from storage DS-3b0edb9f-cf2f-4b01-8d9e-52c3bfc7008e node DatanodeRegistration(127.0.0.1:43509, datanodeUuid=157239cf-2ea8-4afc-8dd1-90ba56d41db7, infoPort=33577, infoSecurePort=0, ipcPort=43363, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-02T14:20:52,975 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xcf5019ed3b85cee9 with lease ID 0xeaa4a1df0cf74904: Processing first storage report for DS-0859db44-0d72-4db0-817e-979b811bfb42 from datanode DatanodeRegistration(127.0.0.1:32825, datanodeUuid=619cb3e0-c96c-41b4-945f-caf82d951ccc, infoPort=35411, infoSecurePort=0, ipcPort=39583, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009) 2024-12-02T14:20:52,975 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xcf5019ed3b85cee9 with lease ID 0xeaa4a1df0cf74904: from storage DS-0859db44-0d72-4db0-817e-979b811bfb42 node DatanodeRegistration(127.0.0.1:32825, datanodeUuid=619cb3e0-c96c-41b4-945f-caf82d951ccc, infoPort=35411, infoSecurePort=0, ipcPort=39583, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-02T14:20:52,976 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x10b049ab4f74bd1 with lease ID 0xeaa4a1df0cf74903: Processing first storage report for DS-e5f4396a-707b-4f7f-91ef-1ebeb283c40d from datanode DatanodeRegistration(127.0.0.1:43509, datanodeUuid=157239cf-2ea8-4afc-8dd1-90ba56d41db7, infoPort=33577, infoSecurePort=0, ipcPort=43363, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009) 2024-12-02T14:20:52,976 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x10b049ab4f74bd1 with lease ID 0xeaa4a1df0cf74903: from storage DS-e5f4396a-707b-4f7f-91ef-1ebeb283c40d node DatanodeRegistration(127.0.0.1:43509, datanodeUuid=157239cf-2ea8-4afc-8dd1-90ba56d41db7, infoPort=33577, infoSecurePort=0, ipcPort=43363, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009), blocks: 0, hasStaleStorage: false, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-02T14:20:53,033 WARN [Thread-139 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data5/current/BP-1650638000-172.17.0.3-1733149251009/current, will proceed with Du for space computation calculation, 2024-12-02T14:20:53,033 WARN [Thread-140 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data6/current/BP-1650638000-172.17.0.3-1733149251009/current, will proceed with Du for space computation calculation, 2024-12-02T14:20:53,059 WARN [Thread-129 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-02T14:20:53,065 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x4bf5baa585022348 with lease ID 0xeaa4a1df0cf74905: Processing first storage report for DS-eafc4632-9690-4b7e-b992-4a9e1a12d3a7 from datanode DatanodeRegistration(127.0.0.1:34475, datanodeUuid=84c87fe3-f604-41b2-80ac-aa5745c30957, infoPort=37079, infoSecurePort=0, ipcPort=32885, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009) 2024-12-02T14:20:53,065 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x4bf5baa585022348 with lease ID 0xeaa4a1df0cf74905: from storage DS-eafc4632-9690-4b7e-b992-4a9e1a12d3a7 node DatanodeRegistration(127.0.0.1:34475, datanodeUuid=84c87fe3-f604-41b2-80ac-aa5745c30957, infoPort=37079, infoSecurePort=0, ipcPort=32885, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-02T14:20:53,066 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x4bf5baa585022348 with lease ID 0xeaa4a1df0cf74905: Processing first storage report for DS-37305f1e-ac77-474a-9882-a2103ceedce7 from datanode DatanodeRegistration(127.0.0.1:34475, datanodeUuid=84c87fe3-f604-41b2-80ac-aa5745c30957, infoPort=37079, infoSecurePort=0, ipcPort=32885, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009) 2024-12-02T14:20:53,066 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x4bf5baa585022348 with lease ID 0xeaa4a1df0cf74905: from storage DS-37305f1e-ac77-474a-9882-a2103ceedce7 node DatanodeRegistration(127.0.0.1:34475, datanodeUuid=84c87fe3-f604-41b2-80ac-aa5745c30957, infoPort=37079, infoSecurePort=0, ipcPort=32885, storageInfo=lv=-57;cid=testClusterID;nsid=689308150;c=1733149251009), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-02T14:20:53,282 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(631): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a 2024-12-02T14:20:53,296 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=155, OpenFileDescriptor=391, MaxFileDescriptor=1048576, SystemLoadAverage=305, ProcessCount=11, AvailableMemoryMB=9042 2024-12-02T14:20:53,318 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:20:53,322 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:20:53,544 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741825_1001 (size=7) 2024-12-02T14:20:53,545 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741825_1001 (size=7) 2024-12-02T14:20:53,545 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741825_1001 (size=7) 2024-12-02T14:20:53,951 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:20:53,952 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:20:53,954 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:20:53,962 DEBUG [Time-limited test {}] util.ClassSize(228): Using Unsafe to estimate memory layout 2024-12-02T14:20:53,980 INFO [Time-limited test {}] metrics.MetricRegistriesLoader(60): Loaded MetricRegistries class org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl 2024-12-02T14:20:53,982 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:20:53,990 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/testSyncRunnerIndexOverflow, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/oldWALs, maxLogs=1760 2024-12-02T14:20:54,034 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149254025 2024-12-02T14:20:54,099 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/testSyncRunnerIndexOverflow/wal.1733149254025 2024-12-02T14:20:54,146 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:33577:33577)] 2024-12-02T14:20:54,203 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:20:54,203 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:20:54,203 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:20:54,203 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:20:54,204 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:20:54,207 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741826_1002 (size=1293) 2024-12-02T14:20:54,208 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741826_1002 (size=1293) 2024-12-02T14:20:54,209 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741826_1002 (size=1293) 2024-12-02T14:20:54,217 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/oldWALs 2024-12-02T14:20:54,220 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733149254025) 2024-12-02T14:20:54,228 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=162 (was 155) Potentially hanging thread: sync.0 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: weak-ref-cleaner-strictcontextstorage java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:155) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:176) app//io.opentelemetry.context.StrictContextStorage$PendingScopes.run(StrictContextStorage.java:269) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: LeaseRenewer:jenkins@localhost:38049 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:441) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$800(LeaseRenewer.java:77) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:336) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=403 (was 391) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=305 (was 305), ProcessCount=11 (was 11), AvailableMemoryMB=8988 (was 9042) 2024-12-02T14:20:54,235 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=162, OpenFileDescriptor=403, MaxFileDescriptor=1048576, SystemLoadAverage=305, ProcessCount=11, AvailableMemoryMB=8988 2024-12-02T14:20:54,258 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741827_1003 (size=7) 2024-12-02T14:20:54,259 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741827_1003 (size=7) 2024-12-02T14:20:54,259 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741827_1003 (size=7) 2024-12-02T14:20:54,262 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:20:54,262 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:20:54,266 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:20:54,277 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:20:54,277 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/testUnflushedSeqIdTracking, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/oldWALs, maxLogs=1760 2024-12-02T14:20:54,280 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149254279 2024-12-02T14:20:54,290 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/testUnflushedSeqIdTracking/wal.1733149254279 2024-12-02T14:20:54,291 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577)] 2024-12-02T14:20:54,294 INFO [Time-limited test {}] regionserver.ChunkCreator(472): data poolSizePercentage is less than 0. So not using pool 2024-12-02T14:20:54,294 INFO [Time-limited test {}] regionserver.ChunkCreator(472): index poolSizePercentage is less than 0. So not using pool 2024-12-02T14:20:54,317 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 8d5f8b02616d67be474d279274acaffe, NAME => 'testUnflushedSeqIdTracking,,1733149254292.8d5f8b02616d67be474d279274acaffe.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testUnflushedSeqIdTracking', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a 2024-12-02T14:20:54,335 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741829_1005 (size=61) 2024-12-02T14:20:54,336 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741829_1005 (size=61) 2024-12-02T14:20:54,337 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741829_1005 (size=61) 2024-12-02T14:20:54,341 INFO [Time-limited test {}] throttle.StoreHotnessProtector(112): StoreHotnessProtector is disabled. Set hbase.region.store.parallel.put.limit > 0 to enable, which may help mitigate load under heavy write pressure. 2024-12-02T14:20:54,344 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testUnflushedSeqIdTracking,,1733149254292.8d5f8b02616d67be474d279274acaffe.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-02T14:20:54,387 INFO [StoreOpener-8d5f8b02616d67be474d279274acaffe-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 8d5f8b02616d67be474d279274acaffe 2024-12-02T14:20:54,416 INFO [StoreOpener-8d5f8b02616d67be474d279274acaffe-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 8d5f8b02616d67be474d279274acaffe columnFamilyName b 2024-12-02T14:20:54,421 DEBUG [StoreOpener-8d5f8b02616d67be474d279274acaffe-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-02T14:20:54,426 INFO [StoreOpener-8d5f8b02616d67be474d279274acaffe-1 {}] regionserver.HStore(327): Store=8d5f8b02616d67be474d279274acaffe/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-02T14:20:54,428 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 8d5f8b02616d67be474d279274acaffe 2024-12-02T14:20:54,431 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testUnflushedSeqIdTracking/8d5f8b02616d67be474d279274acaffe 2024-12-02T14:20:54,433 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testUnflushedSeqIdTracking/8d5f8b02616d67be474d279274acaffe 2024-12-02T14:20:54,433 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/f3a9184b-e9eb-e598-45cd-4b8ea9199831/data/default/testUnflushedSeqIdTracking/8d5f8b02616d67be474d279274acaffe 2024-12-02T14:20:54,436 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 8d5f8b02616d67be474d279274acaffe 2024-12-02T14:20:54,437 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 8d5f8b02616d67be474d279274acaffe 2024-12-02T14:20:54,443 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 8d5f8b02616d67be474d279274acaffe 2024-12-02T14:20:54,449 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:38049/user/jenkins/test-data/f3a9184b-e9eb-e598-45cd-4b8ea9199831/data/default/testUnflushedSeqIdTracking/8d5f8b02616d67be474d279274acaffe/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-02T14:20:54,450 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 8d5f8b02616d67be474d279274acaffe; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=64390342, jitterRate=-0.04050913453102112}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-02T14:20:54,466 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 8d5f8b02616d67be474d279274acaffe: Writing region info on filesystem at 1733149254364Initializing all the Stores at 1733149254366 (+2 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733149254366Cleaning up temporary data from old regions at 1733149254437 (+71 ms)Region opened successfully at 1733149254464 (+27 ms) 2024-12-02T14:20:57,493 INFO [pool-60-thread-2 {}] regionserver.HRegion(2902): Flushing 8d5f8b02616d67be474d279274acaffe 1/1 column families, dataSize=24 B heapSize=352 B 2024-12-02T14:21:00,604 DEBUG [pool-60-thread-2 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/testUnflushedSeqIdTracking/8d5f8b02616d67be474d279274acaffe/.tmp/b/da4d71abc77e4ca8b87d9c2216d51219 is 28, key is b/b:b/1733149254488/Put/seqid=0 2024-12-02T14:21:00,620 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741830_1006 (size=4945) 2024-12-02T14:21:00,621 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741830_1006 (size=4945) 2024-12-02T14:21:00,622 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741830_1006 (size=4945) 2024-12-02T14:21:00,622 INFO [pool-60-thread-2 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=4 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/testUnflushedSeqIdTracking/8d5f8b02616d67be474d279274acaffe/.tmp/b/da4d71abc77e4ca8b87d9c2216d51219 2024-12-02T14:21:00,729 DEBUG [pool-60-thread-2 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/testUnflushedSeqIdTracking/8d5f8b02616d67be474d279274acaffe/.tmp/b/da4d71abc77e4ca8b87d9c2216d51219 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/testUnflushedSeqIdTracking/8d5f8b02616d67be474d279274acaffe/b/da4d71abc77e4ca8b87d9c2216d51219 2024-12-02T14:21:00,742 INFO [pool-60-thread-2 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/testUnflushedSeqIdTracking/8d5f8b02616d67be474d279274acaffe/b/da4d71abc77e4ca8b87d9c2216d51219, entries=1, sequenceid=4, filesize=4.8 K 2024-12-02T14:21:00,750 INFO [pool-60-thread-2 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 8d5f8b02616d67be474d279274acaffe in 3256ms, sequenceid=4, compaction requested=false 2024-12-02T14:21:00,751 DEBUG [pool-60-thread-2 {}] regionserver.HRegion(2603): Flush status journal for 8d5f8b02616d67be474d279274acaffe: 2024-12-02T14:21:00,751 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(193): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-12-02T14:21:00,751 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(194): Flush succeeded:true 2024-12-02T14:21:00,752 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 8d5f8b02616d67be474d279274acaffe, disabling compactions & flushes 2024-12-02T14:21:00,752 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testUnflushedSeqIdTracking,,1733149254292.8d5f8b02616d67be474d279274acaffe. 2024-12-02T14:21:00,752 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testUnflushedSeqIdTracking,,1733149254292.8d5f8b02616d67be474d279274acaffe. 2024-12-02T14:21:00,752 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testUnflushedSeqIdTracking,,1733149254292.8d5f8b02616d67be474d279274acaffe. after waiting 0 ms 2024-12-02T14:21:00,753 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testUnflushedSeqIdTracking,,1733149254292.8d5f8b02616d67be474d279274acaffe. 2024-12-02T14:21:00,754 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testUnflushedSeqIdTracking,,1733149254292.8d5f8b02616d67be474d279274acaffe. 2024-12-02T14:21:00,755 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 8d5f8b02616d67be474d279274acaffe: Waiting for close lock at 1733149260751Disabling compacts and flushes for region at 1733149260752 (+1 ms)Disabling writes for close at 1733149260753 (+1 ms)Writing region close event to WAL at 1733149260754 (+1 ms)Closed at 1733149260754 2024-12-02T14:21:00,755 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,755 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,756 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,756 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,756 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,759 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741828_1004 (size=875) 2024-12-02T14:21:00,760 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741828_1004 (size=875) 2024-12-02T14:21:00,760 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741828_1004 (size=875) 2024-12-02T14:21:00,764 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/oldWALs 2024-12-02T14:21:00,764 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733149254279) 2024-12-02T14:21:00,775 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=178 (was 162) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:42066 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Monitor thread for TaskMonitor java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hbase.monitoring.TaskMonitor$MonitorRunnable.run(TaskMonitor.java:325) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: HBase-Metrics2-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:33014 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: org.apache.hadoop.hdfs.PeerCache@37f0137c java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.PeerCache.run(PeerCache.java:253) app//org.apache.hadoop.hdfs.PeerCache.access$000(PeerCache.java:46) app//org.apache.hadoop.hdfs.PeerCache$1.run(PeerCache.java:124) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=417 (was 403) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=282 (was 305), ProcessCount=11 (was 11), AvailableMemoryMB=8939 (was 8988) 2024-12-02T14:21:00,783 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALComparator Thread=178, OpenFileDescriptor=417, MaxFileDescriptor=1048576, SystemLoadAverage=282, ProcessCount=11, AvailableMemoryMB=8938 2024-12-02T14:21:00,799 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741831_1007 (size=7) 2024-12-02T14:21:00,799 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741831_1007 (size=7) 2024-12-02T14:21:00,800 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741831_1007 (size=7) 2024-12-02T14:21:00,802 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:21:00,802 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:00,804 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:00,811 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:00,812 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/2340df0c-32e0-280d-9624-637502a30759/testWALComparator, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/2340df0c-32e0-280d-9624-637502a30759/oldWALs, maxLogs=1760 2024-12-02T14:21:00,814 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149260813 2024-12-02T14:21:00,822 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/2340df0c-32e0-280d-9624-637502a30759/testWALComparator/wal.1733149260813 2024-12-02T14:21:00,823 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577)] 2024-12-02T14:21:00,824 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(215): Log obtained is: FSHLog wal:(num 1733149260813) 2024-12-02T14:21:00,827 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:00,827 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=.meta, logDir=hdfs://localhost:38049/user/jenkins/test-data/2340df0c-32e0-280d-9624-637502a30759/testWALComparator, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/2340df0c-32e0-280d-9624-637502a30759/oldWALs, maxLogs=1760 2024-12-02T14:21:00,830 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149260830.meta 2024-12-02T14:21:00,838 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/2340df0c-32e0-280d-9624-637502a30759/testWALComparator/wal.1733149260830.meta 2024-12-02T14:21:00,839 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577)] 2024-12-02T14:21:00,840 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,841 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,841 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,841 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,842 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,844 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741832_1008 (size=93) 2024-12-02T14:21:00,845 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741832_1008 (size=93) 2024-12-02T14:21:00,846 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741832_1008 (size=93) 2024-12-02T14:21:00,850 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/2340df0c-32e0-280d-9624-637502a30759/oldWALs 2024-12-02T14:21:00,850 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733149260813) 2024-12-02T14:21:00,850 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,850 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,851 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,851 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,851 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,854 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741833_1009 (size=93) 2024-12-02T14:21:00,854 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741833_1009 (size=93) 2024-12-02T14:21:00,855 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741833_1009 (size=93) 2024-12-02T14:21:00,858 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/2340df0c-32e0-280d-9624-637502a30759/oldWALs 2024-12-02T14:21:00,858 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:.meta(num 1733149260830) 2024-12-02T14:21:00,868 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALComparator Thread=188 (was 178) - Thread LEAK? -, OpenFileDescriptor=423 (was 417) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=282 (was 282), ProcessCount=11 (was 11), AvailableMemoryMB=8929 (was 8938) 2024-12-02T14:21:00,877 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=188, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=282, ProcessCount=11, AvailableMemoryMB=8927 2024-12-02T14:21:00,890 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741834_1010 (size=7) 2024-12-02T14:21:00,891 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741834_1010 (size=7) 2024-12-02T14:21:00,891 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741834_1010 (size=7) 2024-12-02T14:21:00,893 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:21:00,894 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:00,896 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:00,900 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(383): testFindMemStoresEligibleForFlush 2024-12-02T14:21:00,931 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:00,931 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs, maxLogs=1 2024-12-02T14:21:00,933 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149260933 2024-12-02T14:21:00,943 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260933 2024-12-02T14:21:00,948 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577)] 2024-12-02T14:21:00,956 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149260955 2024-12-02T14:21:00,965 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,966 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,966 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,966 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,966 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,967 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260933 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260955 2024-12-02T14:21:00,968 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:37079:37079)] 2024-12-02T14:21:00,968 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260933 is not closed yet, will try archiving it next time 2024-12-02T14:21:00,970 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741835_1011 (size=283) 2024-12-02T14:21:00,971 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741835_1011 (size=283) 2024-12-02T14:21:00,971 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741835_1011 (size=283) 2024-12-02T14:21:00,972 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149260972 2024-12-02T14:21:00,981 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,981 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,981 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,981 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,982 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:00,982 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260955 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260972 2024-12-02T14:21:00,985 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741836_1012 (size=283) 2024-12-02T14:21:00,985 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741836_1012 (size=283) 2024-12-02T14:21:00,986 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741836_1012 (size=283) 2024-12-02T14:21:00,987 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:37079:37079)] 2024-12-02T14:21:00,988 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): b164ffecff987d03ca17ec34888cf695[cf1] 2024-12-02T14:21:00,990 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-02T14:21:00,990 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): b164ffecff987d03ca17ec34888cf695[cf1] 2024-12-02T14:21:00,992 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): b164ffecff987d03ca17ec34888cf695[cf1] 2024-12-02T14:21:00,992 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149260992 2024-12-02T14:21:01,001 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,001 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,002 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,002 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,002 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,002 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260972 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260992 2024-12-02T14:21:01,003 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411)] 2024-12-02T14:21:01,003 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260972 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,005 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-02T14:21:01,005 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149261005 2024-12-02T14:21:01,005 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741837_1013 (size=283) 2024-12-02T14:21:01,005 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741837_1013 (size=283) 2024-12-02T14:21:01,007 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260933 to hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs/wal.1733149260933 2024-12-02T14:21:01,008 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741837_1013 (size=283) 2024-12-02T14:21:01,009 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260955 to hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs/wal.1733149260955 2024-12-02T14:21:01,011 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260972 to hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs/wal.1733149260972 2024-12-02T14:21:01,017 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,017 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,017 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,018 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,018 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,018 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260992 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261005 2024-12-02T14:21:01,019 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:37079:37079)] 2024-12-02T14:21:01,019 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260992 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,020 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-02T14:21:01,020 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741838_1014 (size=93) 2024-12-02T14:21:01,021 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741838_1014 (size=93) 2024-12-02T14:21:01,021 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741838_1014 (size=93) 2024-12-02T14:21:01,022 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149260992 to hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs/wal.1733149260992 2024-12-02T14:21:01,124 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149261124 2024-12-02T14:21:01,127 WARN [IPC Server handler 2 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,128 WARN [IPC Server handler 2 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,128 WARN [IPC Server handler 2 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,134 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,135 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,135 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,135 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,135 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,135 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261005 with entries=4, filesize=465 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261124 2024-12-02T14:21:01,137 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411)] 2024-12-02T14:21:01,137 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261005 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,137 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-02T14:21:01,138 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741839_1015 (size=473) 2024-12-02T14:21:01,139 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741839_1015 (size=473) 2024-12-02T14:21:01,139 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741839_1015 (size=473) 2024-12-02T14:21:01,140 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149261140 2024-12-02T14:21:01,143 WARN [IPC Server handler 4 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,144 WARN [IPC Server handler 4 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,144 WARN [IPC Server handler 4 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,149 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,149 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,149 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,150 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,150 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,150 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261124 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261140 2024-12-02T14:21:01,152 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577)] 2024-12-02T14:21:01,153 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261124 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,153 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 211e5827db956cf29432e7a7ce7c5511[cf1],b164ffecff987d03ca17ec34888cf695[cf1] 2024-12-02T14:21:01,153 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 211e5827db956cf29432e7a7ce7c5511[cf1],b164ffecff987d03ca17ec34888cf695[cf1] 2024-12-02T14:21:01,153 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741840_1016 (size=283) 2024-12-02T14:21:01,154 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149261153 2024-12-02T14:21:01,154 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741840_1016 (size=283) 2024-12-02T14:21:01,165 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,165 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,166 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,166 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,166 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,166 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261140 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261153 2024-12-02T14:21:01,167 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577)] 2024-12-02T14:21:01,168 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261124 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,168 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261140 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,168 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261005 to hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs/wal.1733149261005 2024-12-02T14:21:01,168 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-02T14:21:01,169 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741841_1017 (size=93) 2024-12-02T14:21:01,169 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741841_1017 (size=93) 2024-12-02T14:21:01,170 DEBUG [Close-WAL-Writer-1 {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261124 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,171 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261140 to hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs/wal.1733149261140 2024-12-02T14:21:01,555 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261124 to hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs/wal.1733149261124 2024-12-02T14:21:01,571 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149261571 2024-12-02T14:21:01,574 WARN [IPC Server handler 4 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,574 WARN [IPC Server handler 4 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,574 WARN [IPC Server handler 4 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,579 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,579 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,579 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,579 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,580 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,580 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261153 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261571 2024-12-02T14:21:01,581 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411)] 2024-12-02T14:21:01,581 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261153 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,581 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-02T14:21:01,582 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149261582 2024-12-02T14:21:01,584 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741842_1018 (size=283) 2024-12-02T14:21:01,584 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741842_1018 (size=283) 2024-12-02T14:21:01,585 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741842_1018 (size=283) 2024-12-02T14:21:01,585 WARN [IPC Server handler 2 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,586 WARN [IPC Server handler 2 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,586 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261153 to hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs/wal.1733149261153 2024-12-02T14:21:01,586 WARN [IPC Server handler 2 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,591 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,591 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,591 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,591 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,591 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,592 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261571 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261582 2024-12-02T14:21:01,592 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577)] 2024-12-02T14:21:01,593 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261571 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,594 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741843_1019 (size=93) 2024-12-02T14:21:01,595 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741843_1019 (size=93) 2024-12-02T14:21:01,596 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261571 to hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs/wal.1733149261571 2024-12-02T14:21:01,598 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149261598 2024-12-02T14:21:01,601 WARN [IPC Server handler 4 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,601 WARN [IPC Server handler 4 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,601 WARN [IPC Server handler 4 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,606 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,606 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,606 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,607 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,607 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,607 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261582 with entries=6, filesize=709 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261598 2024-12-02T14:21:01,608 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411)] 2024-12-02T14:21:01,608 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261582 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,609 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741844_1020 (size=717) 2024-12-02T14:21:01,610 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741844_1020 (size=717) 2024-12-02T14:21:01,612 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149261611 2024-12-02T14:21:01,615 WARN [IPC Server handler 2 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,615 WARN [IPC Server handler 2 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,615 WARN [IPC Server handler 2 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,620 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,620 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,620 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,621 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,621 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,621 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261598 with entries=2, filesize=293 B; new WAL /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261611 2024-12-02T14:21:01,622 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577)] 2024-12-02T14:21:01,622 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:38049/user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/testFindMemStoresEligibleForFlush/wal.1733149261598 is not closed yet, will try archiving it next time 2024-12-02T14:21:01,622 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): b8dc3306983196af270dd69c036f270f[cf1,cf3,cf2] 2024-12-02T14:21:01,623 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-02T14:21:01,623 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): b8dc3306983196af270dd69c036f270f[cf3,cf2] 2024-12-02T14:21:01,623 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741845_1021 (size=301) 2024-12-02T14:21:01,623 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,624 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,624 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741845_1021 (size=301) 2024-12-02T14:21:01,624 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,624 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,625 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,627 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741846_1022 (size=93) 2024-12-02T14:21:01,627 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741846_1022 (size=93) 2024-12-02T14:21:01,634 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 3 WAL file(s) to /user/jenkins/test-data/194c6619-73fb-38cd-76fd-cb32ae0aca02/oldWALs 2024-12-02T14:21:01,634 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733149261611) 2024-12-02T14:21:01,642 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=193 (was 188) - Thread LEAK? -, OpenFileDescriptor=423 (was 423), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=282 (was 282), ProcessCount=11 (was 11), AvailableMemoryMB=8914 (was 8927) 2024-12-02T14:21:01,649 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=193, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=282, ProcessCount=11, AvailableMemoryMB=8913 2024-12-02T14:21:01,655 WARN [IPC Server handler 3 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,655 WARN [IPC Server handler 3 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,655 WARN [IPC Server handler 3 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,666 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741847_1023 (size=7) 2024-12-02T14:21:01,666 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741847_1023 (size=7) 2024-12-02T14:21:01,668 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:21:01,668 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:01,671 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:01,679 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:01,679 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/535621e5-88af-2182-0a42-a919dbfa8af5/testRollWriterForClosedWAL, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/535621e5-88af-2182-0a42-a919dbfa8af5/testRollWriterForClosedWAL, maxLogs=1760 2024-12-02T14:21:01,681 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149261681 2024-12-02T14:21:01,684 WARN [IPC Server handler 0 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,684 WARN [IPC Server handler 0 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,685 WARN [IPC Server handler 0 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,692 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/535621e5-88af-2182-0a42-a919dbfa8af5/testRollWriterForClosedWAL/wal.1733149261681 2024-12-02T14:21:01,713 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411)] 2024-12-02T14:21:01,723 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,724 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,724 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,724 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,724 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:01,727 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741848_1024 (size=93) 2024-12-02T14:21:01,728 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741848_1024 (size=93) 2024-12-02T14:21:01,731 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/535621e5-88af-2182-0a42-a919dbfa8af5/testRollWriterForClosedWAL 2024-12-02T14:21:01,731 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733149261681) 2024-12-02T14:21:01,740 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=198 (was 193) - Thread LEAK? -, OpenFileDescriptor=423 (was 423), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=282 (was 282), ProcessCount=11 (was 11), AvailableMemoryMB=8904 (was 8913) 2024-12-02T14:21:01,748 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=198, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=282, ProcessCount=11, AvailableMemoryMB=8903 2024-12-02T14:21:01,753 WARN [IPC Server handler 4 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,753 WARN [IPC Server handler 4 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,753 WARN [IPC Server handler 4 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,759 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741849_1025 (size=7) 2024-12-02T14:21:01,761 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741849_1025 (size=7) 2024-12-02T14:21:01,762 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:21:01,762 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:01,764 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:01,771 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:01,772 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/testMaxFlushedSequenceIdGoBackwards, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/oldWALs, maxLogs=1760 2024-12-02T14:21:01,773 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149261773 2024-12-02T14:21:01,775 WARN [IPC Server handler 0 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,775 WARN [IPC Server handler 0 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,775 WARN [IPC Server handler 0 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,779 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/testMaxFlushedSequenceIdGoBackwards/wal.1733149261773 2024-12-02T14:21:01,780 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411)] 2024-12-02T14:21:01,781 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 11e72719bbe1d7dc9f1bb8dcb763d96c, NAME => 'table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a 2024-12-02T14:21:01,785 WARN [IPC Server handler 0 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-02T14:21:01,785 WARN [IPC Server handler 0 on default port 38049 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-02T14:21:01,785 WARN [IPC Server handler 0 on default port 38049 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-02T14:21:01,790 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741851_1027 (size=40) 2024-12-02T14:21:01,790 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741851_1027 (size=40) 2024-12-02T14:21:01,791 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-02T14:21:01,793 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:01,795 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 11e72719bbe1d7dc9f1bb8dcb763d96c columnFamilyName a 2024-12-02T14:21:01,795 DEBUG [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-02T14:21:01,795 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] regionserver.HStore(327): Store=11e72719bbe1d7dc9f1bb8dcb763d96c/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-02T14:21:01,796 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:01,797 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 11e72719bbe1d7dc9f1bb8dcb763d96c columnFamilyName b 2024-12-02T14:21:01,797 DEBUG [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-02T14:21:01,798 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] regionserver.HStore(327): Store=11e72719bbe1d7dc9f1bb8dcb763d96c/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-02T14:21:01,798 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:01,799 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:01,800 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:01,800 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/24cce2cd-5e57-badb-92df-656bc1349eb0/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:01,801 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:01,802 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:01,803 DEBUG [Time-limited test {}] regionserver.FlushLargeStoresPolicy(65): No hbase.hregion.percolumnfamilyflush.size.lower.bound set in table table descriptor;using region.getMemStoreFlushHeapSize/# of families (64.0 M)) instead. 2024-12-02T14:21:01,805 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:01,808 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:38049/user/jenkins/test-data/24cce2cd-5e57-badb-92df-656bc1349eb0/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-02T14:21:01,809 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 11e72719bbe1d7dc9f1bb8dcb763d96c; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=71861509, jitterRate=0.0708199292421341}}}, FlushLargeStoresPolicy{flushSizeLowerBound=67108864} 2024-12-02T14:21:01,812 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 11e72719bbe1d7dc9f1bb8dcb763d96c: Writing region info on filesystem at 1733149261791Initializing all the Stores at 1733149261792 (+1 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733149261792Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733149261793 (+1 ms)Cleaning up temporary data from old regions at 1733149261802 (+9 ms)Region opened successfully at 1733149261812 (+10 ms) 2024-12-02T14:21:01,812 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 11e72719bbe1d7dc9f1bb8dcb763d96c, disabling compactions & flushes 2024-12-02T14:21:01,812 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. 2024-12-02T14:21:01,812 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. 2024-12-02T14:21:01,812 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. after waiting 0 ms 2024-12-02T14:21:01,812 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. 2024-12-02T14:21:01,813 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. 2024-12-02T14:21:01,813 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 11e72719bbe1d7dc9f1bb8dcb763d96c: Waiting for close lock at 1733149261812Disabling compacts and flushes for region at 1733149261812Disabling writes for close at 1733149261812Writing region close event to WAL at 1733149261813 (+1 ms)Closed at 1733149261813 2024-12-02T14:21:02,255 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 11e72719bbe1d7dc9f1bb8dcb763d96c, NAME => 'table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c.', STARTKEY => '', ENDKEY => ''} 2024-12-02T14:21:02,274 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,275 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-02T14:21:02,277 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,278 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,282 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,283 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 11e72719bbe1d7dc9f1bb8dcb763d96c columnFamilyName a 2024-12-02T14:21:02,283 DEBUG [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-02T14:21:02,284 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] regionserver.HStore(327): Store=11e72719bbe1d7dc9f1bb8dcb763d96c/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-02T14:21:02,285 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,286 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 11e72719bbe1d7dc9f1bb8dcb763d96c columnFamilyName b 2024-12-02T14:21:02,286 DEBUG [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-02T14:21:02,287 INFO [StoreOpener-11e72719bbe1d7dc9f1bb8dcb763d96c-1 {}] regionserver.HStore(327): Store=11e72719bbe1d7dc9f1bb8dcb763d96c/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-02T14:21:02,287 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,288 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,289 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,291 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/24cce2cd-5e57-badb-92df-656bc1349eb0/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,293 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,293 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,297 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,299 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 11e72719bbe1d7dc9f1bb8dcb763d96c; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=64504903, jitterRate=-0.03880204260349274}}}, org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL$FlushSpecificStoresPolicy@22f19497 2024-12-02T14:21:02,299 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for 11e72719bbe1d7dc9f1bb8dcb763d96c 2024-12-02T14:21:02,303 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 11e72719bbe1d7dc9f1bb8dcb763d96c: Running coprocessor pre-open hook at 1733149262278Writing region info on filesystem at 1733149262278Initializing all the Stores at 1733149262280 (+2 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733149262280Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733149262281 (+1 ms)Cleaning up temporary data from old regions at 1733149262293 (+12 ms)Running coprocessor post-open hooks at 1733149262299 (+6 ms)Region opened successfully at 1733149262303 (+4 ms) 2024-12-02T14:21:04,935 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741849_1025 (size=7) 2024-12-02T14:21:04,935 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741851_1027 (size=40) 2024-12-02T14:21:05,327 INFO [pool-78-thread-1 {}] regionserver.HRegion(2902): Flushing 11e72719bbe1d7dc9f1bb8dcb763d96c 2/2 column families, dataSize=96 B heapSize=896 B 2024-12-02T14:21:07,785 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-12-02T14:21:08,356 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/a/56cefb00a706485f90bad67d49604e87 is 28, key is a/a:a/1733149262314/Put/seqid=0 2024-12-02T14:21:08,365 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741852_1028 (size=4945) 2024-12-02T14:21:08,365 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741852_1028 (size=4945) 2024-12-02T14:21:08,366 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741852_1028 (size=4945) 2024-12-02T14:21:08,367 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/a/56cefb00a706485f90bad67d49604e87 2024-12-02T14:21:08,397 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/b/a96959df775f4dd6bc724fc0dd5927cd is 28, key is a/b:b/1733149262314/Put/seqid=0 2024-12-02T14:21:08,404 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741853_1029 (size=4945) 2024-12-02T14:21:08,404 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741853_1029 (size=4945) 2024-12-02T14:21:08,406 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741853_1029 (size=4945) 2024-12-02T14:21:08,406 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/b/a96959df775f4dd6bc724fc0dd5927cd 2024-12-02T14:21:08,417 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/a/56cefb00a706485f90bad67d49604e87 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/a/56cefb00a706485f90bad67d49604e87 2024-12-02T14:21:08,427 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/a/56cefb00a706485f90bad67d49604e87, entries=1, sequenceid=6, filesize=4.8 K 2024-12-02T14:21:08,429 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/b/a96959df775f4dd6bc724fc0dd5927cd as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/b/a96959df775f4dd6bc724fc0dd5927cd 2024-12-02T14:21:08,444 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/b/a96959df775f4dd6bc724fc0dd5927cd, entries=1, sequenceid=6, filesize=4.8 K 2024-12-02T14:21:08,446 INFO [pool-78-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~96 B/96, heapSize ~864 B/864, currentSize=0 B/0 for 11e72719bbe1d7dc9f1bb8dcb763d96c in 3118ms, sequenceid=6, compaction requested=false 2024-12-02T14:21:08,446 DEBUG [pool-78-thread-1 {}] regionserver.HRegion(2603): Flush status journal for 11e72719bbe1d7dc9f1bb8dcb763d96c: 2024-12-02T14:21:08,446 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(676): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-12-02T14:21:08,446 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(677): Flush succeeded:true 2024-12-02T14:21:08,452 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 11e72719bbe1d7dc9f1bb8dcb763d96c 1/2 column families, dataSize=48 B heapSize=704 B; a={dataSize=24 B, heapSize=352 B, offHeapSize=0 B} 2024-12-02T14:21:08,459 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/a/2bea0a4f16ba423c9093d94df3a2c866 is 28, key is a/a:a/1733149262314/Put/seqid=0 2024-12-02T14:21:08,467 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741854_1030 (size=4945) 2024-12-02T14:21:08,467 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741854_1030 (size=4945) 2024-12-02T14:21:08,468 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741854_1030 (size=4945) 2024-12-02T14:21:08,468 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=10 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/a/2bea0a4f16ba423c9093d94df3a2c866 2024-12-02T14:21:08,480 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/a/2bea0a4f16ba423c9093d94df3a2c866 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/a/2bea0a4f16ba423c9093d94df3a2c866 2024-12-02T14:21:08,491 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/a/2bea0a4f16ba423c9093d94df3a2c866, entries=1, sequenceid=10, filesize=4.8 K 2024-12-02T14:21:08,493 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=24 B/24 for 11e72719bbe1d7dc9f1bb8dcb763d96c in 42ms, sequenceid=10, compaction requested=false 2024-12-02T14:21:08,493 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for 11e72719bbe1d7dc9f1bb8dcb763d96c: 2024-12-02T14:21:08,494 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 11e72719bbe1d7dc9f1bb8dcb763d96c, disabling compactions & flushes 2024-12-02T14:21:08,494 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. 2024-12-02T14:21:08,495 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. 2024-12-02T14:21:08,495 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. after waiting 0 ms 2024-12-02T14:21:08,495 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. 2024-12-02T14:21:08,495 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 11e72719bbe1d7dc9f1bb8dcb763d96c 2/2 column families, dataSize=24 B heapSize=608 B 2024-12-02T14:21:08,502 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/b/283d22e2db9b490e862064b8abfe10ab is 28, key is a/b:b/1733149262314/Put/seqid=0 2024-12-02T14:21:08,512 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741855_1031 (size=4945) 2024-12-02T14:21:08,512 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741855_1031 (size=4945) 2024-12-02T14:21:08,512 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741855_1031 (size=4945) 2024-12-02T14:21:08,513 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=13 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/b/283d22e2db9b490e862064b8abfe10ab 2024-12-02T14:21:08,524 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/.tmp/b/283d22e2db9b490e862064b8abfe10ab as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/b/283d22e2db9b490e862064b8abfe10ab 2024-12-02T14:21:08,533 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/b/283d22e2db9b490e862064b8abfe10ab, entries=1, sequenceid=13, filesize=4.8 K 2024-12-02T14:21:08,535 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 11e72719bbe1d7dc9f1bb8dcb763d96c in 40ms, sequenceid=13, compaction requested=false 2024-12-02T14:21:08,542 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:38049/user/jenkins/test-data/24cce2cd-5e57-badb-92df-656bc1349eb0/data/default/table/11e72719bbe1d7dc9f1bb8dcb763d96c/recovered.edits/16.seqid, newMaxSeqId=16, maxSeqId=1 2024-12-02T14:21:08,543 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. 2024-12-02T14:21:08,543 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 11e72719bbe1d7dc9f1bb8dcb763d96c: Waiting for close lock at 1733149268494Running coprocessor pre-close hooks at 1733149268494Disabling compacts and flushes for region at 1733149268494Disabling writes for close at 1733149268495 (+1 ms)Obtaining lock to block concurrent updates at 1733149268495Preparing flush snapshotting stores in 11e72719bbe1d7dc9f1bb8dcb763d96c at 1733149268495Finished memstore snapshotting table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c., syncing WAL and waiting on mvcc, flushsize=dataSize=24, getHeapSize=576, getOffHeapSize=0, getCellsCount=1 at 1733149268495Flushing stores of table,,1733149261781.11e72719bbe1d7dc9f1bb8dcb763d96c. at 1733149268496 (+1 ms)Flushing 11e72719bbe1d7dc9f1bb8dcb763d96c/b: creating writer at 1733149268496Flushing 11e72719bbe1d7dc9f1bb8dcb763d96c/b: appending metadata at 1733149268501 (+5 ms)Flushing 11e72719bbe1d7dc9f1bb8dcb763d96c/b: closing flushed file at 1733149268501Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@2aa6376b: reopening flushed file at 1733149268522 (+21 ms)Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 11e72719bbe1d7dc9f1bb8dcb763d96c in 40ms, sequenceid=13, compaction requested=false at 1733149268535 (+13 ms)Writing region close event to WAL at 1733149268537 (+2 ms)Running coprocessor post-close hooks at 1733149268543 (+6 ms)Closed at 1733149268543 2024-12-02T14:21:08,544 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:08,544 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:08,544 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:08,544 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:08,544 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:08,547 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741850_1026 (size=2357) 2024-12-02T14:21:08,547 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741850_1026 (size=2357) 2024-12-02T14:21:08,551 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/oldWALs 2024-12-02T14:21:08,551 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733149261773) 2024-12-02T14:21:08,560 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=208 (was 198) Potentially hanging thread: java.util.concurrent.ThreadPoolExecutor$Worker@3981181b[State = -1, empty queue] java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:401) java.base@17.0.11/java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:903) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data6 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: java.util.concurrent.ThreadPoolExecutor$Worker@7d99e028[State = -1, empty queue] java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:401) java.base@17.0.11/java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:903) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data5 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:45954 [Waiting for operation #7] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:46614 [Waiting for operation #9] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Timer for 'HBase' metrics system java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.util.TimerThread.mainLoop(Timer.java:563) java.base@17.0.11/java.util.TimerThread.run(Timer.java:516) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:41820 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=425 (was 423) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=283 (was 282) - SystemLoadAverage LEAK? -, ProcessCount=11 (was 11), AvailableMemoryMB=8837 (was 8903) 2024-12-02T14:21:08,568 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=208, OpenFileDescriptor=425, MaxFileDescriptor=1048576, SystemLoadAverage=283, ProcessCount=11, AvailableMemoryMB=8836 2024-12-02T14:21:08,582 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741856_1032 (size=7) 2024-12-02T14:21:08,583 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741856_1032 (size=7) 2024-12-02T14:21:08,583 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741856_1032 (size=7) 2024-12-02T14:21:08,584 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:21:08,585 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:08,587 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:08,596 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(244): No decryptEncryptedDataEncryptionKey method in DFSClient, should be hadoop version with HDFS-12396 java.lang.NoSuchMethodException: org.apache.hadoop.hdfs.DFSClient.decryptEncryptedDataEncryptionKey(org.apache.hadoop.fs.FileEncryptionInfo) at java.lang.Class.getDeclaredMethod(Class.java:2675) ~[?:?] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelperWithoutHDFS12396(FanOutOneBlockAsyncDFSOutputSaslHelper.java:183) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelper(FanOutOneBlockAsyncDFSOutputSaslHelper.java:242) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.(FanOutOneBlockAsyncDFSOutputSaslHelper.java:253) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at java.lang.Class.forName0(Native Method) ~[?:?] at java.lang.Class.forName(Class.java:375) ~[?:?] at org.apache.hadoop.hbase.wal.AsyncFSWALProvider.load(AsyncFSWALProvider.java:150) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.getProviderClass(WALFactory.java:174) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:262) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:214) ~[classes/:?] at org.apache.hadoop.hbase.HBaseTestingUtil.createWal(HBaseTestingUtil.java:2160) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2205) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2169) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFlushSequenceIdIsGreaterThanAllEditsInHFile(AbstractTestFSWAL.java:425) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-02T14:21:08,600 INFO [Time-limited test {}] wal.WALFactory(196): Instantiating WALProvider of type class org.apache.hadoop.hbase.wal.AsyncFSWALProvider 2024-12-02T14:21:08,604 DEBUG [Time-limited test {}] channel.MultithreadEventLoopGroup(44): -Dio.netty.eventLoopThreads: 16 2024-12-02T14:21:08,616 DEBUG [Time-limited test {}] nio.NioEventLoop(110): -Dio.netty.noKeySetOptimization: false 2024-12-02T14:21:08,616 DEBUG [Time-limited test {}] nio.NioEventLoop(111): -Dio.netty.selectorAutoRebuildThreshold: 512 2024-12-02T14:21:08,632 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor defaultMonitorName 2024-12-02T14:21:08,636 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:08,637 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=hregion-10550032, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/WALs/hregion-10550032, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/oldWALs, maxLogs=1760 2024-12-02T14:21:08,654 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputHelper(524): When create output stream for /user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/WALs/hregion-10550032/hregion-10550032.1733149268637, exclude list is [], retry=0 2024-12-02T14:21:08,668 DEBUG [Time-limited test {}] channel.DefaultChannelId(84): -Dio.netty.processId: 6245 (auto-detected) 2024-12-02T14:21:08,671 DEBUG [Time-limited test {}] channel.DefaultChannelId(106): -Dio.netty.machineId: 02:42:ac:ff:fe:11:00:03 (auto-detected) 2024-12-02T14:21:08,695 DEBUG [AsyncFSWAL-1-2 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:43509,DS-3b0edb9f-cf2f-4b01-8d9e-52c3bfc7008e,DISK] 2024-12-02T14:21:08,695 DEBUG [AsyncFSWAL-1-3 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:32825,DS-b26a3641-b3ea-4185-b2ac-f20b249f581a,DISK] 2024-12-02T14:21:08,695 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:34475,DS-eafc4632-9690-4b7e-b992-4a9e1a12d3a7,DISK] 2024-12-02T14:21:08,699 DEBUG [AsyncFSWAL-1-2 {}] asyncfs.ProtobufDecoder(117): Hadoop 3.3 and above shades protobuf. 2024-12-02T14:21:08,729 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/WALs/hregion-10550032/hregion-10550032.1733149268637 2024-12-02T14:21:08,730 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new AsyncFSWAL writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411)] 2024-12-02T14:21:08,730 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 1434de274af9f07632e9c88d2b64296b, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testFlushSequenceIdIsGreaterThanAllEditsInHFile', {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 2024-12-02T14:21:08,742 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741858_1034 (size=82) 2024-12-02T14:21:08,743 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741858_1034 (size=82) 2024-12-02T14:21:08,743 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741858_1034 (size=82) 2024-12-02T14:21:08,744 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-02T14:21:08,746 INFO [StoreOpener-1434de274af9f07632e9c88d2b64296b-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,748 INFO [StoreOpener-1434de274af9f07632e9c88d2b64296b-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 1434de274af9f07632e9c88d2b64296b columnFamilyName f 2024-12-02T14:21:08,748 DEBUG [StoreOpener-1434de274af9f07632e9c88d2b64296b-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-02T14:21:08,749 INFO [StoreOpener-1434de274af9f07632e9c88d2b64296b-1 {}] regionserver.HStore(327): Store=1434de274af9f07632e9c88d2b64296b/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-02T14:21:08,749 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,750 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,751 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,751 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,752 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,752 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,754 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,757 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-02T14:21:08,758 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 1434de274af9f07632e9c88d2b64296b; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=60463377, jitterRate=-0.09902547299861908}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-02T14:21:08,761 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 1434de274af9f07632e9c88d2b64296b: Writing region info on filesystem at 1733149268744Initializing all the Stores at 1733149268745 (+1 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733149268745Cleaning up temporary data from old regions at 1733149268752 (+7 ms)Region opened successfully at 1733149268760 (+8 ms) 2024-12-02T14:21:08,761 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 1434de274af9f07632e9c88d2b64296b, disabling compactions & flushes 2024-12-02T14:21:08,761 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b. 2024-12-02T14:21:08,761 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b. 2024-12-02T14:21:08,761 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b. after waiting 0 ms 2024-12-02T14:21:08,761 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b. 2024-12-02T14:21:08,761 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b. 2024-12-02T14:21:08,761 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 1434de274af9f07632e9c88d2b64296b: Waiting for close lock at 1733149268761Disabling compacts and flushes for region at 1733149268761Disabling writes for close at 1733149268761Writing region close event to WAL at 1733149268761Closed at 1733149268761 2024-12-02T14:21:08,766 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741857_1033 (size=93) 2024-12-02T14:21:08,766 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741857_1033 (size=93) 2024-12-02T14:21:08,766 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741857_1033 (size=93) 2024-12-02T14:21:08,769 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/oldWALs 2024-12-02T14:21:08,769 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: AsyncFSWAL hregion-10550032:(num 1733149268637) 2024-12-02T14:21:08,772 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:08,772 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/testFlushSequenceIdIsGreaterThanAllEditsInHFile, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/testFlushSequenceIdIsGreaterThanAllEditsInHFile, maxLogs=1760 2024-12-02T14:21:08,773 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149268773 2024-12-02T14:21:08,779 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/testFlushSequenceIdIsGreaterThanAllEditsInHFile/wal.1733149268773 2024-12-02T14:21:08,780 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411)] 2024-12-02T14:21:08,781 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:08,782 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 1434de274af9f07632e9c88d2b64296b, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b.', STARTKEY => '', ENDKEY => ''} 2024-12-02T14:21:08,782 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-02T14:21:08,783 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,783 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,784 INFO [StoreOpener-1434de274af9f07632e9c88d2b64296b-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,786 INFO [StoreOpener-1434de274af9f07632e9c88d2b64296b-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 1434de274af9f07632e9c88d2b64296b columnFamilyName f 2024-12-02T14:21:08,786 DEBUG [StoreOpener-1434de274af9f07632e9c88d2b64296b-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-02T14:21:08,786 INFO [StoreOpener-1434de274af9f07632e9c88d2b64296b-1 {}] regionserver.HStore(327): Store=1434de274af9f07632e9c88d2b64296b/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-02T14:21:08,786 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,787 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,788 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,790 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,790 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,790 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,792 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 1434de274af9f07632e9c88d2b64296b 2024-12-02T14:21:08,793 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 1434de274af9f07632e9c88d2b64296b; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=66692741, jitterRate=-0.006200715899467468}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-02T14:21:08,796 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 1434de274af9f07632e9c88d2b64296b: Writing region info on filesystem at 1733149268783Initializing all the Stores at 1733149268784 (+1 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733149268784Cleaning up temporary data from old regions at 1733149268790 (+6 ms)Region opened successfully at 1733149268796 (+6 ms) 2024-12-02T14:21:08,812 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.log.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir so I do NOT create it in target/test-data/995c9171-0731-9b7d-0077-08657522f291 2024-12-02T14:21:08,812 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.log.dir property value differs in configuration and system: Configuration=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/../logs while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir Erasing configuration value by system value. 2024-12-02T14:21:08,812 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.tmp.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.tmp.dir so I do NOT create it in target/test-data/995c9171-0731-9b7d-0077-08657522f291 2024-12-02T14:21:08,812 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.tmp.dir property value differs in configuration and system: Configuration=/tmp/hadoop-jenkins while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.tmp.dir Erasing configuration value by system value. 2024-12-02T14:21:08,812 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/995c9171-0731-9b7d-0077-08657522f291 2024-12-02T14:21:08,838 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 1434de274af9f07632e9c88d2b64296b 1/1 column families, dataSize=1.14 KB heapSize=2.13 KB 2024-12-02T14:21:08,938 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,039 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,139 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,239 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,340 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,440 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,541 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,641 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,742 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,842 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,943 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:09,970 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b/.tmp/f/7221ef80d4de4841b8691d02075a8d90 is 121, key is testFlushSequenceIdIsGreaterThanAllEditsInHFile/f:x0/1733149268812/Put/seqid=0 2024-12-02T14:21:09,980 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741860_1036 (size=6333) 2024-12-02T14:21:09,981 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741860_1036 (size=6333) 2024-12-02T14:21:09,982 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741860_1036 (size=6333) 2024-12-02T14:21:09,983 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=1.14 KB at sequenceid=23 (bloomFilter=true), to=hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b/.tmp/f/7221ef80d4de4841b8691d02075a8d90 2024-12-02T14:21:09,996 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b/.tmp/f/7221ef80d4de4841b8691d02075a8d90 as hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b/f/7221ef80d4de4841b8691d02075a8d90 2024-12-02T14:21:10,004 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/1434de274af9f07632e9c88d2b64296b/f/7221ef80d4de4841b8691d02075a8d90, entries=10, sequenceid=23, filesize=6.2 K 2024-12-02T14:21:10,105 DEBUG [FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-02T14:21:10,107 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~1.14 KB/1170, heapSize ~2.11 KB/2160, currentSize=0 B/0 for 1434de274af9f07632e9c88d2b64296b in 1269ms, sequenceid=23, compaction requested=false 2024-12-02T14:21:10,107 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for 1434de274af9f07632e9c88d2b64296b: 2024-12-02T14:21:10,108 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 1434de274af9f07632e9c88d2b64296b, disabling compactions & flushes 2024-12-02T14:21:10,108 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b. 2024-12-02T14:21:10,108 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b. 2024-12-02T14:21:10,108 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b. after waiting 0 ms 2024-12-02T14:21:10,108 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b. 2024-12-02T14:21:10,109 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733149268589.1434de274af9f07632e9c88d2b64296b. 2024-12-02T14:21:10,109 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 1434de274af9f07632e9c88d2b64296b: Waiting for close lock at 1733149270108Disabling compacts and flushes for region at 1733149270108Disabling writes for close at 1733149270108Writing region close event to WAL at 1733149270109 (+1 ms)Closed at 1733149270109 2024-12-02T14:21:10,110 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,110 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,110 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,110 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,110 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,114 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741859_1035 (size=16537) 2024-12-02T14:21:10,114 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741859_1035 (size=16537) 2024-12-02T14:21:10,114 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741859_1035 (size=16537) 2024-12-02T14:21:10,122 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/cd7ebb44-9fe9-4d15-6a01-90add0bd99e1/testFlushSequenceIdIsGreaterThanAllEditsInHFile 2024-12-02T14:21:10,122 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: wal:(num 1733149268773) 2024-12-02T14:21:10,135 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=216 (was 208) Potentially hanging thread: AsyncFSWAL-1-1 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:45954 [Waiting for operation #8] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_1932504778_22 at /127.0.0.1:46614 [Waiting for operation #11] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-2 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-3 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:41820 [Waiting for operation #4] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=457 (was 425) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=276 (was 283), ProcessCount=11 (was 11), AvailableMemoryMB=8840 (was 8836) - AvailableMemoryMB LEAK? - 2024-12-02T14:21:10,144 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=216, OpenFileDescriptor=457, MaxFileDescriptor=1048576, SystemLoadAverage=276, ProcessCount=11, AvailableMemoryMB=8840 2024-12-02T14:21:10,159 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741861_1037 (size=7) 2024-12-02T14:21:10,159 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741861_1037 (size=7) 2024-12-02T14:21:10,160 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741861_1037 (size=7) 2024-12-02T14:21:10,162 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:21:10,162 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:10,164 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:10,170 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:10,170 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/oldWALs, maxLogs=1760 2024-12-02T14:21:10,170 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149270170 2024-12-02T14:21:10,178 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270170 2024-12-02T14:21:10,179 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:37079:37079)] 2024-12-02T14:21:10,180 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149270180 2024-12-02T14:21:10,188 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149270181 2024-12-02T14:21:10,192 WARN [Time-limited test {}] wal.AbstractProtobufLogWriter(199): Init output failed, path=hdfs://localhost:38049/user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270181 java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-12-02T14:21:10,194 DEBUG [Time-limited test {}] wal.FSHLogProvider(93): Error instantiating log writer. java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-12-02T14:21:10,208 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=243 (was 216) Potentially hanging thread: PacketResponder: BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:43509, 127.0.0.1:34475] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:46028 [Receiving block BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:46678 [Receiving block BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: PacketResponder: BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:43509, 127.0.0.1:32825] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: FSHLog-0-hdfs://localhost:38049/user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20-prefix:default java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:34475] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:46688 [Receiving block BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:41918 [Receiving block BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:46038 [Receiving block BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270180 block BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270170 block BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: ResponseProcessor for block BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:41902 [Receiving block BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:32825] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=483 (was 457) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=276 (was 276), ProcessCount=11 (was 11), AvailableMemoryMB=8839 (was 8840) 2024-12-02T14:21:10,217 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=243, OpenFileDescriptor=483, MaxFileDescriptor=1048576, SystemLoadAverage=276, ProcessCount=11, AvailableMemoryMB=8838 2024-12-02T14:21:10,230 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741864_1040 (size=7) 2024-12-02T14:21:10,230 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741864_1040 (size=7) 2024-12-02T14:21:10,230 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741864_1040 (size=7) 2024-12-02T14:21:10,232 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:21:10,232 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:10,233 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:10,240 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:10,240 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/dc1e8df6-e641-7734-8363-161d09c86cee/testWALCoprocessorLoaded, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/dc1e8df6-e641-7734-8363-161d09c86cee/oldWALs, maxLogs=1760 2024-12-02T14:21:10,241 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149270241 2024-12-02T14:21:10,277 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/dc1e8df6-e641-7734-8363-161d09c86cee/testWALCoprocessorLoaded/wal.1733149270241 2024-12-02T14:21:10,285 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:33577:33577)] 2024-12-02T14:21:10,289 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,289 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,289 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,289 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,289 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,294 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741865_1041 (size=93) 2024-12-02T14:21:10,295 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741865_1041 (size=93) 2024-12-02T14:21:10,295 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741865_1041 (size=93) 2024-12-02T14:21:10,303 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/dc1e8df6-e641-7734-8363-161d09c86cee/oldWALs 2024-12-02T14:21:10,303 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733149270241) 2024-12-02T14:21:10,316 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=248 (was 243) - Thread LEAK? -, OpenFileDescriptor=495 (was 483) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=276 (was 276), ProcessCount=11 (was 11), AvailableMemoryMB=8843 (was 8838) - AvailableMemoryMB LEAK? - 2024-12-02T14:21:10,325 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=248, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=276, ProcessCount=11, AvailableMemoryMB=8843 2024-12-02T14:21:10,338 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741866_1042 (size=7) 2024-12-02T14:21:10,339 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741866_1042 (size=7) 2024-12-02T14:21:10,340 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741866_1042 (size=7) 2024-12-02T14:21:10,341 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:21:10,341 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:10,343 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:10,349 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:10,350 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/5c12628f-5d22-ff57-7b35-c7080ae1e0c9/testSyncNoAppend, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/5c12628f-5d22-ff57-7b35-c7080ae1e0c9/testSyncNoAppend, maxLogs=1760 2024-12-02T14:21:10,350 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149270350 2024-12-02T14:21:10,358 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/5c12628f-5d22-ff57-7b35-c7080ae1e0c9/testSyncNoAppend/wal.1733149270350 2024-12-02T14:21:10,358 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411)] 2024-12-02T14:21:10,360 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,360 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,360 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,360 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,360 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,362 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741867_1043 (size=93) 2024-12-02T14:21:10,363 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741867_1043 (size=93) 2024-12-02T14:21:10,364 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741867_1043 (size=93) 2024-12-02T14:21:10,367 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/5c12628f-5d22-ff57-7b35-c7080ae1e0c9/testSyncNoAppend 2024-12-02T14:21:10,367 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733149270350) 2024-12-02T14:21:10,376 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=253 (was 248) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=276 (was 276), ProcessCount=11 (was 11), AvailableMemoryMB=8840 (was 8843) 2024-12-02T14:21:10,385 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=253, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=276, ProcessCount=11, AvailableMemoryMB=8837 2024-12-02T14:21:10,395 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741868_1044 (size=7) 2024-12-02T14:21:10,397 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741868_1044 (size=7) 2024-12-02T14:21:10,397 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741868_1044 (size=7) 2024-12-02T14:21:10,398 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:21:10,398 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:10,400 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:10,405 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:10,405 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/29965132-8e67-50a6-e431-1002de286cd2/testWriteEntryCanBeNull, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/29965132-8e67-50a6-e431-1002de286cd2/testWriteEntryCanBeNull, maxLogs=1760 2024-12-02T14:21:10,406 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149270406 2024-12-02T14:21:10,414 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/29965132-8e67-50a6-e431-1002de286cd2/testWriteEntryCanBeNull/wal.1733149270406 2024-12-02T14:21:10,416 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37079:37079),(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411)] 2024-12-02T14:21:10,417 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,417 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,418 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,418 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,418 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:10,420 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741869_1045 (size=93) 2024-12-02T14:21:10,421 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741869_1045 (size=93) 2024-12-02T14:21:10,421 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741869_1045 (size=93) 2024-12-02T14:21:10,424 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/29965132-8e67-50a6-e431-1002de286cd2/testWriteEntryCanBeNull 2024-12-02T14:21:10,424 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733149270406) 2024-12-02T14:21:10,439 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=258 (was 253) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=276 (was 276), ProcessCount=11 (was 11), AvailableMemoryMB=8810 (was 8837) 2024-12-02T14:21:10,450 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=258, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=276, ProcessCount=11, AvailableMemoryMB=8807 2024-12-02T14:21:10,463 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741870_1046 (size=7) 2024-12-02T14:21:10,463 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741870_1046 (size=7) 2024-12-02T14:21:10,464 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741870_1046 (size=7) 2024-12-02T14:21:10,465 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45 with version=8 2024-12-02T14:21:10,465 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:10,467 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-02T14:21:10,472 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-02T14:21:10,473 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/testUnflushedSeqIdTrackingWithAsyncWal, archiveDir=hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/oldWALs, maxLogs=1760 2024-12-02T14:21:10,473 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733149270473 2024-12-02T14:21:10,485 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/testUnflushedSeqIdTrackingWithAsyncWal/wal.1733149270473 2024-12-02T14:21:10,491 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33577:33577),(127.0.0.1/127.0.0.1:35411:35411),(127.0.0.1/127.0.0.1:37079:37079)] 2024-12-02T14:21:10,494 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 8ee1f02942d85835540e9ff648aac041, NAME => 'table,,1733149270494.8ee1f02942d85835540e9ff648aac041.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a 2024-12-02T14:21:10,506 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741872_1048 (size=40) 2024-12-02T14:21:10,508 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741872_1048 (size=40) 2024-12-02T14:21:10,508 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741872_1048 (size=40) 2024-12-02T14:21:10,509 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733149270494.8ee1f02942d85835540e9ff648aac041.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-02T14:21:10,511 INFO [StoreOpener-8ee1f02942d85835540e9ff648aac041-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,513 INFO [StoreOpener-8ee1f02942d85835540e9ff648aac041-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 8ee1f02942d85835540e9ff648aac041 columnFamilyName b 2024-12-02T14:21:10,513 DEBUG [StoreOpener-8ee1f02942d85835540e9ff648aac041-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-02T14:21:10,514 INFO [StoreOpener-8ee1f02942d85835540e9ff648aac041-1 {}] regionserver.HStore(327): Store=8ee1f02942d85835540e9ff648aac041/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-02T14:21:10,514 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,515 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/table/8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,515 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/table/8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,516 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/1d8432a4-c9b9-6f54-d064-c3da96e20dd2/data/default/table/8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,516 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,516 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,519 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,525 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:38049/user/jenkins/test-data/1d8432a4-c9b9-6f54-d064-c3da96e20dd2/data/default/table/8ee1f02942d85835540e9ff648aac041/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-02T14:21:10,527 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 8ee1f02942d85835540e9ff648aac041; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=72092749, jitterRate=0.07426567375659943}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-02T14:21:10,531 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 8ee1f02942d85835540e9ff648aac041: Writing region info on filesystem at 1733149270509Initializing all the Stores at 1733149270510 (+1 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733149270510Cleaning up temporary data from old regions at 1733149270516 (+6 ms)Region opened successfully at 1733149270530 (+14 ms) 2024-12-02T14:21:10,531 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 8ee1f02942d85835540e9ff648aac041, disabling compactions & flushes 2024-12-02T14:21:10,531 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733149270494.8ee1f02942d85835540e9ff648aac041. 2024-12-02T14:21:10,531 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733149270494.8ee1f02942d85835540e9ff648aac041. 2024-12-02T14:21:10,531 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733149270494.8ee1f02942d85835540e9ff648aac041. after waiting 0 ms 2024-12-02T14:21:10,531 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733149270494.8ee1f02942d85835540e9ff648aac041. 2024-12-02T14:21:10,533 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733149270494.8ee1f02942d85835540e9ff648aac041. 2024-12-02T14:21:10,533 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 8ee1f02942d85835540e9ff648aac041: Waiting for close lock at 1733149270531Disabling compacts and flushes for region at 1733149270531Disabling writes for close at 1733149270531Writing region close event to WAL at 1733149270533 (+2 ms)Closed at 1733149270533 2024-12-02T14:21:10,534 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 8ee1f02942d85835540e9ff648aac041, NAME => 'table,,1733149270494.8ee1f02942d85835540e9ff648aac041.', STARTKEY => '', ENDKEY => ''} 2024-12-02T14:21:10,535 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,535 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733149270494.8ee1f02942d85835540e9ff648aac041.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-02T14:21:10,536 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,536 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,541 INFO [StoreOpener-8ee1f02942d85835540e9ff648aac041-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,542 INFO [StoreOpener-8ee1f02942d85835540e9ff648aac041-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 8ee1f02942d85835540e9ff648aac041 columnFamilyName b 2024-12-02T14:21:10,542 DEBUG [StoreOpener-8ee1f02942d85835540e9ff648aac041-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-02T14:21:10,543 INFO [StoreOpener-8ee1f02942d85835540e9ff648aac041-1 {}] regionserver.HStore(327): Store=8ee1f02942d85835540e9ff648aac041/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-02T14:21:10,543 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,544 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/table/8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,545 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/data/default/table/8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,547 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:38049/user/jenkins/test-data/1d8432a4-c9b9-6f54-d064-c3da96e20dd2/data/default/table/8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,547 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,547 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,550 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,552 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 8ee1f02942d85835540e9ff648aac041; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=73806216, jitterRate=0.09979832172393799}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-02T14:21:10,552 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for 8ee1f02942d85835540e9ff648aac041 2024-12-02T14:21:10,554 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 8ee1f02942d85835540e9ff648aac041: Running coprocessor pre-open hook at 1733149270536Writing region info on filesystem at 1733149270536Initializing all the Stores at 1733149270540 (+4 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733149270540Cleaning up temporary data from old regions at 1733149270547 (+7 ms)Running coprocessor post-open hooks at 1733149270552 (+5 ms)Region opened successfully at 1733149270554 (+2 ms) 2024-12-02T14:21:10,598 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor 2024-12-02T14:21:10,598 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor Metrics about HBase WALObservers 2024-12-02T14:21:10,599 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=TableRequests_Namespace_default_table_table 2024-12-02T14:21:10,599 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=TableRequests_Namespace_default_table_table Metrics about Tables on a single HBase RegionServer 2024-12-02T14:21:13,561 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1722): Closing 8ee1f02942d85835540e9ff648aac041, disabling compactions & flushes 2024-12-02T14:21:13,561 INFO [pool-106-thread-1 {}] regionserver.HRegion(1755): Closing region table,,1733149270494.8ee1f02942d85835540e9ff648aac041. 2024-12-02T14:21:13,561 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733149270494.8ee1f02942d85835540e9ff648aac041. 2024-12-02T14:21:13,562 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1843): Acquired close lock on table,,1733149270494.8ee1f02942d85835540e9ff648aac041. after waiting 0 ms 2024-12-02T14:21:13,562 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1853): Updates disabled for region table,,1733149270494.8ee1f02942d85835540e9ff648aac041. 2024-12-02T14:21:13,563 INFO [pool-106-thread-1 {}] regionserver.HRegion(2902): Flushing 8ee1f02942d85835540e9ff648aac041 1/1 column families, dataSize=48 B heapSize=448 B 2024-12-02T14:21:14,046 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-12-02T14:21:16,580 DEBUG [pool-106-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/8ee1f02942d85835540e9ff648aac041/.tmp/b/df04ac9ee33d4ddf833c73669a8f62f3 is 28, key is b/b:b/1733149270557/Put/seqid=0 2024-12-02T14:21:16,588 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741873_1049 (size=4945) 2024-12-02T14:21:16,589 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741873_1049 (size=4945) 2024-12-02T14:21:16,589 INFO [pool-106-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/8ee1f02942d85835540e9ff648aac041/.tmp/b/df04ac9ee33d4ddf833c73669a8f62f3 2024-12-02T14:21:16,589 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741873_1049 (size=4945) 2024-12-02T14:21:16,599 DEBUG [pool-106-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/8ee1f02942d85835540e9ff648aac041/.tmp/b/df04ac9ee33d4ddf833c73669a8f62f3 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/8ee1f02942d85835540e9ff648aac041/b/df04ac9ee33d4ddf833c73669a8f62f3 2024-12-02T14:21:16,608 INFO [pool-106-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/8ee1f02942d85835540e9ff648aac041/b/df04ac9ee33d4ddf833c73669a8f62f3, entries=1, sequenceid=6, filesize=4.8 K 2024-12-02T14:21:16,610 INFO [pool-106-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 8ee1f02942d85835540e9ff648aac041 in 3047ms, sequenceid=6, compaction requested=false 2024-12-02T14:21:16,616 DEBUG [pool-106-thread-1 {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:38049/user/jenkins/test-data/1d8432a4-c9b9-6f54-d064-c3da96e20dd2/data/default/table/8ee1f02942d85835540e9ff648aac041/recovered.edits/9.seqid, newMaxSeqId=9, maxSeqId=1 2024-12-02T14:21:16,616 INFO [pool-106-thread-1 {}] regionserver.HRegion(1973): Closed table,,1733149270494.8ee1f02942d85835540e9ff648aac041. 2024-12-02T14:21:16,617 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1676): Region close journal for 8ee1f02942d85835540e9ff648aac041: Waiting for close lock at 1733149273561Running coprocessor pre-close hooks at 1733149273561Disabling compacts and flushes for region at 1733149273561Disabling writes for close at 1733149273562 (+1 ms)Obtaining lock to block concurrent updates at 1733149273563 (+1 ms)Preparing flush snapshotting stores in 8ee1f02942d85835540e9ff648aac041 at 1733149273563Finished memstore snapshotting table,,1733149270494.8ee1f02942d85835540e9ff648aac041., syncing WAL and waiting on mvcc, flushsize=dataSize=48, getHeapSize=432, getOffHeapSize=0, getCellsCount=2 at 1733149273564 (+1 ms)Flushing stores of table,,1733149270494.8ee1f02942d85835540e9ff648aac041. at 1733149276562 (+2998 ms)Flushing 8ee1f02942d85835540e9ff648aac041/b: creating writer at 1733149276562Flushing 8ee1f02942d85835540e9ff648aac041/b: appending metadata at 1733149276579 (+17 ms)Flushing 8ee1f02942d85835540e9ff648aac041/b: closing flushed file at 1733149276579Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@48ce6ca3: reopening flushed file at 1733149276598 (+19 ms)Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 8ee1f02942d85835540e9ff648aac041 in 3047ms, sequenceid=6, compaction requested=false at 1733149276610 (+12 ms)Writing region close event to WAL at 1733149276610Running coprocessor post-close hooks at 1733149276616 (+6 ms)Closed at 1733149276616 2024-12-02T14:21:16,617 INFO [pool-106-thread-1 {}] wal.AbstractTestFSWAL(620): Close result:{[B@c3303b=[/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/data/default/table/8ee1f02942d85835540e9ff648aac041/b/df04ac9ee33d4ddf833c73669a8f62f3]} 2024-12-02T14:21:16,617 WARN [Time-limited test {}] regionserver.HRegion(1707): Region table,,1733149270494.8ee1f02942d85835540e9ff648aac041. already closed 2024-12-02T14:21:16,617 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 8ee1f02942d85835540e9ff648aac041: Waiting for close lock at 1733149276617 2024-12-02T14:21:16,618 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:16,618 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:16,618 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:16,618 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:16,618 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-02T14:21:16,621 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34475 is added to blk_1073741871_1047 (size=1206) 2024-12-02T14:21:16,621 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:32825 is added to blk_1073741871_1047 (size=1206) 2024-12-02T14:21:16,621 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43509 is added to blk_1073741871_1047 (size=1206) 2024-12-02T14:21:16,624 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/2824abdc-f853-185b-880e-3d39e3c92f45/oldWALs 2024-12-02T14:21:16,624 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733149270473) 2024-12-02T14:21:16,635 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=262 (was 258) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:46162 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:42016 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=503 (was 495) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=278 (was 276) - SystemLoadAverage LEAK? -, ProcessCount=11 (was 11), AvailableMemoryMB=8486 (was 8807) 2024-12-02T14:21:16,635 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1019): Shutting down minicluster 2024-12-02T14:21:16,636 WARN [PacketResponder: BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039, type=LAST_IN_PIPELINE {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-1650638000-172.17.0.3-1733149251009:1073741863 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-02T14:21:16,637 WARN [ResponseProcessor for block BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039 java.io.IOException: Bad response ERROR for BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039 from datanode DatanodeInfoWithStorage[127.0.0.1:32825,DS-b26a3641-b3ea-4185-b2ac-f20b249f581a,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-02T14:21:16,638 WARN [DataStreamer for file /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270180 block BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039 {}] hdfs.DataStreamer(1731): Error Recovery for BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039 in pipeline [DatanodeInfoWithStorage[127.0.0.1:34475,DS-eafc4632-9690-4b7e-b992-4a9e1a12d3a7,DISK], DatanodeInfoWithStorage[127.0.0.1:43509,DS-3b0edb9f-cf2f-4b01-8d9e-52c3bfc7008e,DISK], DatanodeInfoWithStorage[127.0.0.1:32825,DS-b26a3641-b3ea-4185-b2ac-f20b249f581a,DISK]]: datanode 2(DatanodeInfoWithStorage[127.0.0.1:32825,DS-b26a3641-b3ea-4185-b2ac-f20b249f581a,DISK]) is bad. 2024-12-02T14:21:16,638 WARN [PacketResponder: BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:43509, 127.0.0.1:32825] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-02T14:21:16,638 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:46688 [Receiving block BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039] {}] datanode.BlockReceiver(316): Block 1073741863 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-12-02T14:21:16,638 WARN [PacketResponder: BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:32825] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-02T14:21:16,639 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_-890839505_22 at /127.0.0.1:46038 [Receiving block BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039] {}] datanode.BlockReceiver(316): Block 1073741863 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-12-02T14:21:16,643 WARN [DataStreamer for file /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270180 block BP-1650638000-172.17.0.3-1733149251009:blk_1073741863_1039 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270180 (inode 16549) Holder DFSClient_NONMAPREDUCE_-890839505_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-02T14:21:16,644 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270180 with renewLeaseKey: DEFAULT_16549 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270180 (inode 16549) Holder DFSClient_NONMAPREDUCE_-890839505_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-02T14:21:16,647 WARN [PacketResponder: BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:43509, 127.0.0.1:34475] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-1650638000-172.17.0.3-1733149251009:1073741862 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-02T14:21:16,647 WARN [ResponseProcessor for block BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038 java.io.EOFException: Unexpected EOF while trying to read response from server at org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:529) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-02T14:21:16,647 WARN [DataStreamer for file /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270170 block BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038 {}] hdfs.DataStreamer(1731): Error Recovery for BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038 in pipeline [DatanodeInfoWithStorage[127.0.0.1:32825,DS-b26a3641-b3ea-4185-b2ac-f20b249f581a,DISK], DatanodeInfoWithStorage[127.0.0.1:43509,DS-3b0edb9f-cf2f-4b01-8d9e-52c3bfc7008e,DISK], DatanodeInfoWithStorage[127.0.0.1:34475,DS-eafc4632-9690-4b7e-b992-4a9e1a12d3a7,DISK]]: datanode 0(DatanodeInfoWithStorage[127.0.0.1:32825,DS-b26a3641-b3ea-4185-b2ac-f20b249f581a,DISK]) is bad. 2024-12-02T14:21:16,648 WARN [DataStreamer for file /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270170 block BP-1650638000-172.17.0.3-1733149251009:blk_1073741862_1038 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270170 (inode 16548) Holder DFSClient_NONMAPREDUCE_-890839505_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-02T14:21:16,649 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270170 with renewLeaseKey: DEFAULT_16548 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/6a2ff628-5558-a18a-83cb-fde0fd99aa20/testFailedToCreateWALIfParentRenamed/wal.1733149270170 (inode 16548) Holder DFSClient_NONMAPREDUCE_-890839505_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-02T14:21:16,654 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@4b2b884e{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-02T14:21:16,657 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@fa4aa4c{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-02T14:21:16,657 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-02T14:21:16,657 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@47ddd06a{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-02T14:21:16,657 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@4c49fcd3{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir/,STOPPED} 2024-12-02T14:21:16,660 WARN [BP-1650638000-172.17.0.3-1733149251009 heartbeating to localhost/127.0.0.1:38049 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-02T14:21:16,660 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-02T14:21:16,660 WARN [BP-1650638000-172.17.0.3-1733149251009 heartbeating to localhost/127.0.0.1:38049 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-1650638000-172.17.0.3-1733149251009 (Datanode Uuid 84c87fe3-f604-41b2-80ac-aa5745c30957) service to localhost/127.0.0.1:38049 2024-12-02T14:21:16,660 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-02T14:21:16,661 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data5/current/BP-1650638000-172.17.0.3-1733149251009 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-02T14:21:16,662 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data6/current/BP-1650638000-172.17.0.3-1733149251009 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-02T14:21:16,662 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-02T14:21:16,696 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@28637041{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-02T14:21:16,697 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@5e18b7fd{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-02T14:21:16,697 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-02T14:21:16,697 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@243038a3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-02T14:21:16,697 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@443ad5c2{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir/,STOPPED} 2024-12-02T14:21:16,709 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-02T14:21:16,710 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-02T14:21:16,716 WARN [BP-1650638000-172.17.0.3-1733149251009 heartbeating to localhost/127.0.0.1:38049 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-02T14:21:16,716 WARN [BP-1650638000-172.17.0.3-1733149251009 heartbeating to localhost/127.0.0.1:38049 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-1650638000-172.17.0.3-1733149251009 (Datanode Uuid 619cb3e0-c96c-41b4-945f-caf82d951ccc) service to localhost/127.0.0.1:38049 2024-12-02T14:21:16,717 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data3/current/BP-1650638000-172.17.0.3-1733149251009 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-02T14:21:16,717 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data4/current/BP-1650638000-172.17.0.3-1733149251009 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-02T14:21:16,718 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-02T14:21:16,729 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@411b19f7{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-02T14:21:16,730 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@64bb503e{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-02T14:21:16,730 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-02T14:21:16,730 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@43a917ce{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-02T14:21:16,730 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@208945{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir/,STOPPED} 2024-12-02T14:21:16,734 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-02T14:21:16,734 WARN [BP-1650638000-172.17.0.3-1733149251009 heartbeating to localhost/127.0.0.1:38049 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-02T14:21:16,734 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-02T14:21:16,734 WARN [BP-1650638000-172.17.0.3-1733149251009 heartbeating to localhost/127.0.0.1:38049 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-1650638000-172.17.0.3-1733149251009 (Datanode Uuid 157239cf-2ea8-4afc-8dd1-90ba56d41db7) service to localhost/127.0.0.1:38049 2024-12-02T14:21:16,735 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data1/current/BP-1650638000-172.17.0.3-1733149251009 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-02T14:21:16,735 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/cluster_07820fc2-8ab2-2adb-94ba-e6048c8741d8/data/data2/current/BP-1650638000-172.17.0.3-1733149251009 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-02T14:21:16,736 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-02T14:21:16,749 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@5599def{hdfs,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-12-02T14:21:16,750 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@493ba8a1{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-02T14:21:16,750 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-02T14:21:16,750 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@61b73bb3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-02T14:21:16,750 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@746f7db{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master@2/hbase-server/target/test-data/c8f47275-2864-77b5-962e-9b71a042be7a/hadoop.log.dir/,STOPPED} 2024-12-02T14:21:16,791 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1026): Minicluster is down