2024-12-06 03:45:23,926 main DEBUG Apache Log4j Core 2.17.2 initializing configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-12-06 03:45:23,937 main DEBUG Took 0.009125 seconds to load 1 plugins from package org.apache.hadoop.hbase.logging 2024-12-06 03:45:23,937 main DEBUG PluginManager 'Core' found 129 plugins 2024-12-06 03:45:23,937 main DEBUG PluginManager 'Level' found 0 plugins 2024-12-06 03:45:23,938 main DEBUG PluginManager 'Lookup' found 16 plugins 2024-12-06 03:45:23,939 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,945 main DEBUG PluginManager 'TypeConverter' found 26 plugins 2024-12-06 03:45:23,956 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.util.MBeans", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,958 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,958 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.logging.TestJul2Slf4j", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,959 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,959 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.zookeeper", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,960 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,960 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,961 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,961 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,961 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,962 main DEBUG LoggerConfig$Builder(additivity="false", level="WARN", levelAndRefs="null", name="org.apache.directory", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,962 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,963 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.ipc.FailedServers", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,963 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,963 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsConfig", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,964 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,964 main DEBUG LoggerConfig$Builder(additivity="null", level="INFO", levelAndRefs="null", name="org.apache.hadoop.hbase.ScheduledChore", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,964 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,965 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.regionserver.RSRpcServices", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,965 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,965 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,965 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,966 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,966 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-06 03:45:23,966 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hbase.thirdparty.io.netty.channel", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,967 main DEBUG Building Plugin[name=root, class=org.apache.logging.log4j.core.config.LoggerConfig$RootLogger]. 2024-12-06 03:45:23,968 main DEBUG LoggerConfig$RootLogger$Builder(additivity="null", level="null", levelAndRefs="INFO,Console", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-06 03:45:23,969 main DEBUG Building Plugin[name=loggers, class=org.apache.logging.log4j.core.config.LoggersPlugin]. 2024-12-06 03:45:23,971 main DEBUG createLoggers(={org.apache.hadoop.metrics2.util.MBeans, org.apache.hadoop.hbase.logging.TestJul2Slf4j, org.apache.zookeeper, org.apache.hadoop.metrics2.impl.MetricsSinkAdapter, org.apache.hadoop.metrics2.impl.MetricsSystemImpl, org.apache.directory, org.apache.hadoop.hbase.ipc.FailedServers, org.apache.hadoop.metrics2.impl.MetricsConfig, org.apache.hadoop.hbase.ScheduledChore, org.apache.hadoop.hbase.regionserver.RSRpcServices, org.apache.hadoop, org.apache.hadoop.hbase, org.apache.hbase.thirdparty.io.netty.channel, root}) 2024-12-06 03:45:23,971 main DEBUG Building Plugin[name=layout, class=org.apache.logging.log4j.core.layout.PatternLayout]. 2024-12-06 03:45:23,972 main DEBUG PatternLayout$Builder(pattern="%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n", PatternSelector=null, Configuration(PropertiesConfig), Replace=null, charset="null", alwaysWriteExceptions="null", disableAnsi="null", noConsoleNoAnsi="null", header="null", footer="null") 2024-12-06 03:45:23,972 main DEBUG PluginManager 'Converter' found 47 plugins 2024-12-06 03:45:23,980 main DEBUG Building Plugin[name=appender, class=org.apache.hadoop.hbase.logging.HBaseTestAppender]. 2024-12-06 03:45:23,982 main DEBUG HBaseTestAppender$Builder(target="SYSTEM_ERR", maxSize="1G", bufferedIo="null", bufferSize="null", immediateFlush="null", ignoreExceptions="null", PatternLayout(%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n), name="Console", Configuration(PropertiesConfig), Filter=null, ={}) 2024-12-06 03:45:23,984 main DEBUG Starting HBaseTestOutputStreamManager SYSTEM_ERR 2024-12-06 03:45:23,984 main DEBUG Building Plugin[name=appenders, class=org.apache.logging.log4j.core.config.AppendersPlugin]. 2024-12-06 03:45:23,984 main DEBUG createAppenders(={Console}) 2024-12-06 03:45:23,985 main DEBUG Configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f initialized 2024-12-06 03:45:23,985 main DEBUG Starting configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-12-06 03:45:23,986 main DEBUG Started configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f OK. 2024-12-06 03:45:23,986 main DEBUG Shutting down OutputStreamManager SYSTEM_OUT.false.false-1 2024-12-06 03:45:23,986 main DEBUG OutputStream closed 2024-12-06 03:45:23,986 main DEBUG Shut down OutputStreamManager SYSTEM_OUT.false.false-1, all resources released: true 2024-12-06 03:45:23,987 main DEBUG Appender DefaultConsole-1 stopped with status true 2024-12-06 03:45:23,987 main DEBUG Stopped org.apache.logging.log4j.core.config.DefaultConfiguration@61001b64 OK 2024-12-06 03:45:24,069 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6 2024-12-06 03:45:24,071 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=StatusLogger 2024-12-06 03:45:24,072 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=ContextSelector 2024-12-06 03:45:24,074 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name= 2024-12-06 03:45:24,075 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.directory 2024-12-06 03:45:24,075 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSinkAdapter 2024-12-06 03:45:24,076 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.zookeeper 2024-12-06 03:45:24,076 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.logging.TestJul2Slf4j 2024-12-06 03:45:24,077 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSystemImpl 2024-12-06 03:45:24,077 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.util.MBeans 2024-12-06 03:45:24,078 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase 2024-12-06 03:45:24,078 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop 2024-12-06 03:45:24,079 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ipc.FailedServers 2024-12-06 03:45:24,079 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.regionserver.RSRpcServices 2024-12-06 03:45:24,080 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsConfig 2024-12-06 03:45:24,080 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hbase.thirdparty.io.netty.channel 2024-12-06 03:45:24,080 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ScheduledChore 2024-12-06 03:45:24,081 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Appenders,name=Console 2024-12-06 03:45:24,084 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-12-06 03:45:24,084 main DEBUG Reconfiguration complete for context[name=1dbd16a6] at URI jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-logging/target/hbase-logging-4.0.0-alpha-1-SNAPSHOT-tests.jar!/log4j2.properties (org.apache.logging.log4j.core.LoggerContext@40db2a24) with optional ClassLoader: null 2024-12-06 03:45:24,084 main DEBUG Shutdown hook enabled. Registering a new one. 2024-12-06 03:45:24,085 main DEBUG LoggerContext[name=1dbd16a6, org.apache.logging.log4j.core.LoggerContext@40db2a24] started OK. 2024-12-06T03:45:24,330 DEBUG [main {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495 2024-12-06 03:45:24,333 main DEBUG AsyncLogger.ThreadNameStrategy=UNCACHED (user specified null, default is UNCACHED) 2024-12-06 03:45:24,333 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-12-06T03:45:24,343 INFO [main {}] hbase.HBaseClassTestRule(94): Test class org.apache.hadoop.hbase.regionserver.wal.TestFSHLog timeout: 13 mins 2024-12-06T03:45:24,372 INFO [Time-limited test {}] hbase.HBaseZKTestingUtil(84): Created new mini-cluster data directory: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba, deleteOnExit=true 2024-12-06T03:45:24,374 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting test.cache.data to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/test.cache.data in system properties and HBase conf 2024-12-06T03:45:24,374 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.tmp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.tmp.dir in system properties and HBase conf 2024-12-06T03:45:24,375 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.log.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir in system properties and HBase conf 2024-12-06T03:45:24,376 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.local.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/mapreduce.cluster.local.dir in system properties and HBase conf 2024-12-06T03:45:24,377 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.temp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/mapreduce.cluster.temp.dir in system properties and HBase conf 2024-12-06T03:45:24,377 INFO [Time-limited test {}] hbase.HBaseTestingUtil(738): read short circuit is OFF 2024-12-06T03:45:24,485 WARN [Time-limited test {}] util.NativeCodeLoader(60): Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2024-12-06T03:45:24,580 DEBUG [Time-limited test {}] fs.HFileSystem(310): The file system is not a DistributedFileSystem. Skipping on block location reordering 2024-12-06T03:45:24,583 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-labels.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/yarn.node-labels.fs-store.root-dir in system properties and HBase conf 2024-12-06T03:45:24,584 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-attribute.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/yarn.node-attribute.fs-store.root-dir in system properties and HBase conf 2024-12-06T03:45:24,585 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.log-dirs to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/yarn.nodemanager.log-dirs in system properties and HBase conf 2024-12-06T03:45:24,585 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-12-06T03:45:24,586 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.active-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/yarn.timeline-service.entity-group-fs-store.active-dir in system properties and HBase conf 2024-12-06T03:45:24,586 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.done-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/yarn.timeline-service.entity-group-fs-store.done-dir in system properties and HBase conf 2024-12-06T03:45:24,587 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-12-06T03:45:24,587 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/dfs.journalnode.edits.dir in system properties and HBase conf 2024-12-06T03:45:24,588 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.datanode.shared.file.descriptor.paths to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/dfs.datanode.shared.file.descriptor.paths in system properties and HBase conf 2024-12-06T03:45:24,588 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting nfs.dump.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/nfs.dump.dir in system properties and HBase conf 2024-12-06T03:45:24,588 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting java.io.tmpdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/java.io.tmpdir in system properties and HBase conf 2024-12-06T03:45:24,588 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/dfs.journalnode.edits.dir in system properties and HBase conf 2024-12-06T03:45:24,589 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.provided.aliasmap.inmemory.leveldb.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/dfs.provided.aliasmap.inmemory.leveldb.dir in system properties and HBase conf 2024-12-06T03:45:24,589 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting fs.s3a.committer.staging.tmp.path to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/fs.s3a.committer.staging.tmp.path in system properties and HBase conf 2024-12-06T03:45:25,002 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-12-06T03:45:25,519 WARN [Time-limited test {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 2024-12-06T03:45:25,585 INFO [Time-limited test {}] log.Log(170): Logging initialized @2339ms to org.eclipse.jetty.util.log.Slf4jLog 2024-12-06T03:45:25,651 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-06T03:45:25,705 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-06T03:45:25,722 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-06T03:45:25,722 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-06T03:45:25,724 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-06T03:45:25,735 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-06T03:45:25,738 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@746f7db{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir/,AVAILABLE} 2024-12-06T03:45:25,739 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@61b73bb3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-06T03:45:25,895 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@5599def{hdfs,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/java.io.tmpdir/jetty-localhost-40235-hadoop-hdfs-3_4_1-tests_jar-_-any-11776920128675892330/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-12-06T03:45:25,901 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@493ba8a1{HTTP/1.1, (http/1.1)}{localhost:40235} 2024-12-06T03:45:25,901 INFO [Time-limited test {}] server.Server(415): Started @2656ms 2024-12-06T03:45:25,932 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-12-06T03:45:26,384 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-06T03:45:26,393 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-06T03:45:26,394 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-06T03:45:26,394 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-06T03:45:26,394 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-12-06T03:45:26,395 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@208945{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir/,AVAILABLE} 2024-12-06T03:45:26,395 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@43a917ce{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-06T03:45:26,489 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@411b19f7{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/java.io.tmpdir/jetty-localhost-46423-hadoop-hdfs-3_4_1-tests_jar-_-any-18121451929476350157/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-06T03:45:26,490 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@64bb503e{HTTP/1.1, (http/1.1)}{localhost:46423} 2024-12-06T03:45:26,490 INFO [Time-limited test {}] server.Server(415): Started @3245ms 2024-12-06T03:45:26,533 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-06T03:45:26,630 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-06T03:45:26,634 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-06T03:45:26,638 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-06T03:45:26,638 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-06T03:45:26,639 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-12-06T03:45:26,639 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@443ad5c2{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir/,AVAILABLE} 2024-12-06T03:45:26,640 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@243038a3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-06T03:45:26,737 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@28637041{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/java.io.tmpdir/jetty-localhost-33703-hadoop-hdfs-3_4_1-tests_jar-_-any-6041401892594373334/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-06T03:45:26,738 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@5e18b7fd{HTTP/1.1, (http/1.1)}{localhost:33703} 2024-12-06T03:45:26,738 INFO [Time-limited test {}] server.Server(415): Started @3493ms 2024-12-06T03:45:26,740 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-06T03:45:26,774 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-06T03:45:26,777 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-06T03:45:26,779 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-06T03:45:26,779 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-06T03:45:26,779 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-06T03:45:26,780 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@4c49fcd3{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir/,AVAILABLE} 2024-12-06T03:45:26,781 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@47ddd06a{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-06T03:45:26,881 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@4b2b884e{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/java.io.tmpdir/jetty-localhost-43469-hadoop-hdfs-3_4_1-tests_jar-_-any-4150692817756285598/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-06T03:45:26,882 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@fa4aa4c{HTTP/1.1, (http/1.1)}{localhost:43469} 2024-12-06T03:45:26,882 INFO [Time-limited test {}] server.Server(415): Started @3637ms 2024-12-06T03:45:26,884 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-06T03:45:27,842 WARN [Thread-123 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data4/current/BP-1167662243-172.17.0.2-1733456725090/current, will proceed with Du for space computation calculation, 2024-12-06T03:45:27,842 WARN [Thread-122 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data3/current/BP-1167662243-172.17.0.2-1733456725090/current, will proceed with Du for space computation calculation, 2024-12-06T03:45:27,842 WARN [Thread-124 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data1/current/BP-1167662243-172.17.0.2-1733456725090/current, will proceed with Du for space computation calculation, 2024-12-06T03:45:27,842 WARN [Thread-125 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data2/current/BP-1167662243-172.17.0.2-1733456725090/current, will proceed with Du for space computation calculation, 2024-12-06T03:45:27,860 WARN [Thread-132 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data5/current/BP-1167662243-172.17.0.2-1733456725090/current, will proceed with Du for space computation calculation, 2024-12-06T03:45:27,861 WARN [Thread-133 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data6/current/BP-1167662243-172.17.0.2-1733456725090/current, will proceed with Du for space computation calculation, 2024-12-06T03:45:27,876 WARN [Thread-58 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-06T03:45:27,877 WARN [Thread-81 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-06T03:45:27,881 WARN [Thread-103 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-06T03:45:27,921 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x933b3ba0cc6c776 with lease ID 0x1bc653e145621906: Processing first storage report for DS-41a6a1a0-6244-4204-8f34-4cceaad1cf4d from datanode DatanodeRegistration(127.0.0.1:38345, datanodeUuid=410826ed-3178-4c9f-a460-11a642ea1bbd, infoPort=33355, infoSecurePort=0, ipcPort=35059, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090) 2024-12-06T03:45:27,922 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x933b3ba0cc6c776 with lease ID 0x1bc653e145621906: from storage DS-41a6a1a0-6244-4204-8f34-4cceaad1cf4d node DatanodeRegistration(127.0.0.1:38345, datanodeUuid=410826ed-3178-4c9f-a460-11a642ea1bbd, infoPort=33355, infoSecurePort=0, ipcPort=35059, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-06T03:45:27,922 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x2dc9b1a65870410d with lease ID 0x1bc653e145621905: Processing first storage report for DS-1bb302a9-c053-4e8f-b6c8-978d965dd803 from datanode DatanodeRegistration(127.0.0.1:37139, datanodeUuid=92ae2001-b2d7-4d4a-a2c3-b58f1ba3d3d7, infoPort=34043, infoSecurePort=0, ipcPort=39125, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090) 2024-12-06T03:45:27,922 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x2dc9b1a65870410d with lease ID 0x1bc653e145621905: from storage DS-1bb302a9-c053-4e8f-b6c8-978d965dd803 node DatanodeRegistration(127.0.0.1:37139, datanodeUuid=92ae2001-b2d7-4d4a-a2c3-b58f1ba3d3d7, infoPort=34043, infoSecurePort=0, ipcPort=39125, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-06T03:45:27,923 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x89224b076f56aca9 with lease ID 0x1bc653e145621904: Processing first storage report for DS-288cf1c6-00d8-4cf8-94c0-185e22eab264 from datanode DatanodeRegistration(127.0.0.1:45811, datanodeUuid=4c1a5a00-593f-49d0-bbf8-1b1cdbe1fea7, infoPort=40207, infoSecurePort=0, ipcPort=34427, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090) 2024-12-06T03:45:27,923 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x89224b076f56aca9 with lease ID 0x1bc653e145621904: from storage DS-288cf1c6-00d8-4cf8-94c0-185e22eab264 node DatanodeRegistration(127.0.0.1:45811, datanodeUuid=4c1a5a00-593f-49d0-bbf8-1b1cdbe1fea7, infoPort=40207, infoSecurePort=0, ipcPort=34427, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-06T03:45:27,923 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x933b3ba0cc6c776 with lease ID 0x1bc653e145621906: Processing first storage report for DS-16410990-8161-4267-9256-0e60ea2f01ea from datanode DatanodeRegistration(127.0.0.1:38345, datanodeUuid=410826ed-3178-4c9f-a460-11a642ea1bbd, infoPort=33355, infoSecurePort=0, ipcPort=35059, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090) 2024-12-06T03:45:27,923 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x933b3ba0cc6c776 with lease ID 0x1bc653e145621906: from storage DS-16410990-8161-4267-9256-0e60ea2f01ea node DatanodeRegistration(127.0.0.1:38345, datanodeUuid=410826ed-3178-4c9f-a460-11a642ea1bbd, infoPort=33355, infoSecurePort=0, ipcPort=35059, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-06T03:45:27,923 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x2dc9b1a65870410d with lease ID 0x1bc653e145621905: Processing first storage report for DS-57496df8-c87f-4072-8773-2b70bc89f93a from datanode DatanodeRegistration(127.0.0.1:37139, datanodeUuid=92ae2001-b2d7-4d4a-a2c3-b58f1ba3d3d7, infoPort=34043, infoSecurePort=0, ipcPort=39125, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090) 2024-12-06T03:45:27,923 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x2dc9b1a65870410d with lease ID 0x1bc653e145621905: from storage DS-57496df8-c87f-4072-8773-2b70bc89f93a node DatanodeRegistration(127.0.0.1:37139, datanodeUuid=92ae2001-b2d7-4d4a-a2c3-b58f1ba3d3d7, infoPort=34043, infoSecurePort=0, ipcPort=39125, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090), blocks: 0, hasStaleStorage: false, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-06T03:45:27,923 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x89224b076f56aca9 with lease ID 0x1bc653e145621904: Processing first storage report for DS-151523c1-8e58-46f1-b144-d0103ca9c757 from datanode DatanodeRegistration(127.0.0.1:45811, datanodeUuid=4c1a5a00-593f-49d0-bbf8-1b1cdbe1fea7, infoPort=40207, infoSecurePort=0, ipcPort=34427, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090) 2024-12-06T03:45:27,924 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x89224b076f56aca9 with lease ID 0x1bc653e145621904: from storage DS-151523c1-8e58-46f1-b144-d0103ca9c757 node DatanodeRegistration(127.0.0.1:45811, datanodeUuid=4c1a5a00-593f-49d0-bbf8-1b1cdbe1fea7, infoPort=40207, infoSecurePort=0, ipcPort=34427, storageInfo=lv=-57;cid=testClusterID;nsid=625668165;c=1733456725090), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-06T03:45:27,970 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(631): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495 2024-12-06T03:45:27,981 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=157, OpenFileDescriptor=391, MaxFileDescriptor=1048576, SystemLoadAverage=391, ProcessCount=11, AvailableMemoryMB=8874 2024-12-06T03:45:27,997 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:28,001 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:28,194 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741825_1001 (size=7) 2024-12-06T03:45:28,195 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741825_1001 (size=7) 2024-12-06T03:45:28,195 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741825_1001 (size=7) 2024-12-06T03:45:28,610 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:28,611 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:28,614 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:28,624 DEBUG [Time-limited test {}] util.ClassSize(228): Using Unsafe to estimate memory layout 2024-12-06T03:45:28,643 INFO [Time-limited test {}] metrics.MetricRegistriesLoader(60): Loaded MetricRegistries class org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl 2024-12-06T03:45:28,645 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:28,653 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/testSyncRunnerIndexOverflow, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/oldWALs, maxLogs=1760 2024-12-06T03:45:28,695 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456728686 2024-12-06T03:45:28,761 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/testSyncRunnerIndexOverflow/wal.1733456728686 2024-12-06T03:45:28,814 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:28,873 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:28,874 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:28,874 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:28,874 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:28,874 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:28,878 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741826_1002 (size=1293) 2024-12-06T03:45:28,879 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741826_1002 (size=1293) 2024-12-06T03:45:28,880 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741826_1002 (size=1293) 2024-12-06T03:45:28,886 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/oldWALs 2024-12-06T03:45:28,889 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733456728686) 2024-12-06T03:45:28,899 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=164 (was 157) Potentially hanging thread: sync.2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: weak-ref-cleaner-strictcontextstorage java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:155) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:176) app//io.opentelemetry.context.StrictContextStorage$PendingScopes.run(StrictContextStorage.java:269) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.0 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: LeaseRenewer:jenkins@localhost:44217 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:441) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$800(LeaseRenewer.java:77) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:336) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) - Thread LEAK? -, OpenFileDescriptor=403 (was 391) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=391 (was 391), ProcessCount=11 (was 11), AvailableMemoryMB=8811 (was 8874) 2024-12-06T03:45:28,906 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=164, OpenFileDescriptor=403, MaxFileDescriptor=1048576, SystemLoadAverage=391, ProcessCount=11, AvailableMemoryMB=8811 2024-12-06T03:45:28,930 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741827_1003 (size=7) 2024-12-06T03:45:28,931 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741827_1003 (size=7) 2024-12-06T03:45:28,931 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741827_1003 (size=7) 2024-12-06T03:45:28,933 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:28,934 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:28,937 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:28,947 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:28,948 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/testUnflushedSeqIdTracking, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/oldWALs, maxLogs=1760 2024-12-06T03:45:28,950 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456728950 2024-12-06T03:45:28,961 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/testUnflushedSeqIdTracking/wal.1733456728950 2024-12-06T03:45:28,962 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:28,965 INFO [Time-limited test {}] regionserver.ChunkCreator(472): data poolSizePercentage is less than 0. So not using pool 2024-12-06T03:45:28,965 INFO [Time-limited test {}] regionserver.ChunkCreator(472): index poolSizePercentage is less than 0. So not using pool 2024-12-06T03:45:28,989 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => b6e4a1e90c27d8aede6b74e9f56c8255, NAME => 'testUnflushedSeqIdTracking,,1733456728963.b6e4a1e90c27d8aede6b74e9f56c8255.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testUnflushedSeqIdTracking', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495 2024-12-06T03:45:29,011 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741829_1005 (size=61) 2024-12-06T03:45:29,012 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741829_1005 (size=61) 2024-12-06T03:45:29,012 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741829_1005 (size=61) 2024-12-06T03:45:29,016 INFO [Time-limited test {}] throttle.StoreHotnessProtector(112): StoreHotnessProtector is disabled. Set hbase.region.store.parallel.put.limit > 0 to enable, which may help mitigate load under heavy write pressure. 2024-12-06T03:45:29,020 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testUnflushedSeqIdTracking,,1733456728963.b6e4a1e90c27d8aede6b74e9f56c8255.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-06T03:45:29,062 INFO [StoreOpener-b6e4a1e90c27d8aede6b74e9f56c8255-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region b6e4a1e90c27d8aede6b74e9f56c8255 2024-12-06T03:45:29,096 INFO [StoreOpener-b6e4a1e90c27d8aede6b74e9f56c8255-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region b6e4a1e90c27d8aede6b74e9f56c8255 columnFamilyName b 2024-12-06T03:45:29,102 DEBUG [StoreOpener-b6e4a1e90c27d8aede6b74e9f56c8255-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-06T03:45:29,107 INFO [StoreOpener-b6e4a1e90c27d8aede6b74e9f56c8255-1 {}] regionserver.HStore(327): Store=b6e4a1e90c27d8aede6b74e9f56c8255/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-06T03:45:29,109 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for b6e4a1e90c27d8aede6b74e9f56c8255 2024-12-06T03:45:29,113 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testUnflushedSeqIdTracking/b6e4a1e90c27d8aede6b74e9f56c8255 2024-12-06T03:45:29,115 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testUnflushedSeqIdTracking/b6e4a1e90c27d8aede6b74e9f56c8255 2024-12-06T03:45:29,116 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/04eb28ac-020a-5d49-4031-78fbe487ae19/data/default/testUnflushedSeqIdTracking/b6e4a1e90c27d8aede6b74e9f56c8255 2024-12-06T03:45:29,119 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for b6e4a1e90c27d8aede6b74e9f56c8255 2024-12-06T03:45:29,119 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for b6e4a1e90c27d8aede6b74e9f56c8255 2024-12-06T03:45:29,128 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for b6e4a1e90c27d8aede6b74e9f56c8255 2024-12-06T03:45:29,133 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44217/user/jenkins/test-data/04eb28ac-020a-5d49-4031-78fbe487ae19/data/default/testUnflushedSeqIdTracking/b6e4a1e90c27d8aede6b74e9f56c8255/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-06T03:45:29,134 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened b6e4a1e90c27d8aede6b74e9f56c8255; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=69783291, jitterRate=0.039852067828178406}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-06T03:45:29,150 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for b6e4a1e90c27d8aede6b74e9f56c8255: Writing region info on filesystem at 1733456729039Initializing all the Stores at 1733456729042 (+3 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733456729043 (+1 ms)Cleaning up temporary data from old regions at 1733456729120 (+77 ms)Region opened successfully at 1733456729147 (+27 ms) 2024-12-06T03:45:32,183 INFO [pool-60-thread-2 {}] regionserver.HRegion(2902): Flushing b6e4a1e90c27d8aede6b74e9f56c8255 1/1 column families, dataSize=24 B heapSize=352 B 2024-12-06T03:45:35,275 DEBUG [pool-60-thread-2 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/testUnflushedSeqIdTracking/b6e4a1e90c27d8aede6b74e9f56c8255/.tmp/b/d2b5c78200b042fe94d42d5995452d4e is 28, key is b/b:b/1733456729170/Put/seqid=0 2024-12-06T03:45:35,288 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741830_1006 (size=4945) 2024-12-06T03:45:35,288 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741830_1006 (size=4945) 2024-12-06T03:45:35,289 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741830_1006 (size=4945) 2024-12-06T03:45:35,289 INFO [pool-60-thread-2 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=4 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/testUnflushedSeqIdTracking/b6e4a1e90c27d8aede6b74e9f56c8255/.tmp/b/d2b5c78200b042fe94d42d5995452d4e 2024-12-06T03:45:35,363 DEBUG [pool-60-thread-2 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/testUnflushedSeqIdTracking/b6e4a1e90c27d8aede6b74e9f56c8255/.tmp/b/d2b5c78200b042fe94d42d5995452d4e as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/testUnflushedSeqIdTracking/b6e4a1e90c27d8aede6b74e9f56c8255/b/d2b5c78200b042fe94d42d5995452d4e 2024-12-06T03:45:35,374 INFO [pool-60-thread-2 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/testUnflushedSeqIdTracking/b6e4a1e90c27d8aede6b74e9f56c8255/b/d2b5c78200b042fe94d42d5995452d4e, entries=1, sequenceid=4, filesize=4.8 K 2024-12-06T03:45:35,382 INFO [pool-60-thread-2 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for b6e4a1e90c27d8aede6b74e9f56c8255 in 3205ms, sequenceid=4, compaction requested=false 2024-12-06T03:45:35,382 DEBUG [pool-60-thread-2 {}] regionserver.HRegion(2603): Flush status journal for b6e4a1e90c27d8aede6b74e9f56c8255: 2024-12-06T03:45:35,382 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(193): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-12-06T03:45:35,383 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(194): Flush succeeded:true 2024-12-06T03:45:35,383 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing b6e4a1e90c27d8aede6b74e9f56c8255, disabling compactions & flushes 2024-12-06T03:45:35,383 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testUnflushedSeqIdTracking,,1733456728963.b6e4a1e90c27d8aede6b74e9f56c8255. 2024-12-06T03:45:35,383 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testUnflushedSeqIdTracking,,1733456728963.b6e4a1e90c27d8aede6b74e9f56c8255. 2024-12-06T03:45:35,384 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testUnflushedSeqIdTracking,,1733456728963.b6e4a1e90c27d8aede6b74e9f56c8255. after waiting 0 ms 2024-12-06T03:45:35,384 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testUnflushedSeqIdTracking,,1733456728963.b6e4a1e90c27d8aede6b74e9f56c8255. 2024-12-06T03:45:35,385 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testUnflushedSeqIdTracking,,1733456728963.b6e4a1e90c27d8aede6b74e9f56c8255. 2024-12-06T03:45:35,386 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for b6e4a1e90c27d8aede6b74e9f56c8255: Waiting for close lock at 1733456735383Disabling compacts and flushes for region at 1733456735383Disabling writes for close at 1733456735384 (+1 ms)Writing region close event to WAL at 1733456735385 (+1 ms)Closed at 1733456735385 2024-12-06T03:45:35,386 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,386 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,387 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,387 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,387 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,390 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741828_1004 (size=875) 2024-12-06T03:45:35,390 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741828_1004 (size=875) 2024-12-06T03:45:35,391 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741828_1004 (size=875) 2024-12-06T03:45:35,394 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/oldWALs 2024-12-06T03:45:35,394 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733456728950) 2024-12-06T03:45:35,401 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=180 (was 164) Potentially hanging thread: pool-60-thread-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:52178 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Monitor thread for TaskMonitor java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hbase.monitoring.TaskMonitor$MonitorRunnable.run(TaskMonitor.java:325) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:37556 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: org.apache.hadoop.hdfs.PeerCache@75336cd6 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.PeerCache.run(PeerCache.java:253) app//org.apache.hadoop.hdfs.PeerCache.access$000(PeerCache.java:46) app//org.apache.hadoop.hdfs.PeerCache$1.run(PeerCache.java:124) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: HBase-Metrics2-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=417 (was 403) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=346 (was 391), ProcessCount=11 (was 11), AvailableMemoryMB=8788 (was 8811) 2024-12-06T03:45:35,408 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALComparator Thread=180, OpenFileDescriptor=417, MaxFileDescriptor=1048576, SystemLoadAverage=346, ProcessCount=11, AvailableMemoryMB=8786 2024-12-06T03:45:35,425 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741831_1007 (size=7) 2024-12-06T03:45:35,425 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741831_1007 (size=7) 2024-12-06T03:45:35,425 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741831_1007 (size=7) 2024-12-06T03:45:35,427 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:35,427 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:35,428 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:35,434 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:35,435 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/d52171d6-3a3d-c641-3c43-670d6e949dc5/testWALComparator, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/d52171d6-3a3d-c641-3c43-670d6e949dc5/oldWALs, maxLogs=1760 2024-12-06T03:45:35,436 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735436 2024-12-06T03:45:35,444 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/d52171d6-3a3d-c641-3c43-670d6e949dc5/testWALComparator/wal.1733456735436 2024-12-06T03:45:35,445 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:34043:34043)] 2024-12-06T03:45:35,445 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(215): Log obtained is: FSHLog wal:(num 1733456735436) 2024-12-06T03:45:35,449 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:35,449 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=.meta, logDir=hdfs://localhost:44217/user/jenkins/test-data/d52171d6-3a3d-c641-3c43-670d6e949dc5/testWALComparator, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/d52171d6-3a3d-c641-3c43-670d6e949dc5/oldWALs, maxLogs=1760 2024-12-06T03:45:35,451 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735451.meta 2024-12-06T03:45:35,459 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/d52171d6-3a3d-c641-3c43-670d6e949dc5/testWALComparator/wal.1733456735451.meta 2024-12-06T03:45:35,460 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:35,462 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,462 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,463 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,463 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,463 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,467 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741832_1008 (size=93) 2024-12-06T03:45:35,467 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741832_1008 (size=93) 2024-12-06T03:45:35,467 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741832_1008 (size=93) 2024-12-06T03:45:35,472 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/d52171d6-3a3d-c641-3c43-670d6e949dc5/oldWALs 2024-12-06T03:45:35,472 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733456735436) 2024-12-06T03:45:35,473 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,473 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,474 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,474 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,474 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,477 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741833_1009 (size=93) 2024-12-06T03:45:35,478 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741833_1009 (size=93) 2024-12-06T03:45:35,478 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741833_1009 (size=93) 2024-12-06T03:45:35,481 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/d52171d6-3a3d-c641-3c43-670d6e949dc5/oldWALs 2024-12-06T03:45:35,481 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:.meta(num 1733456735451) 2024-12-06T03:45:35,489 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALComparator Thread=190 (was 180) - Thread LEAK? -, OpenFileDescriptor=423 (was 417) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=346 (was 346), ProcessCount=11 (was 11), AvailableMemoryMB=8781 (was 8786) 2024-12-06T03:45:35,495 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=190, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=346, ProcessCount=11, AvailableMemoryMB=8781 2024-12-06T03:45:35,508 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741834_1010 (size=7) 2024-12-06T03:45:35,509 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741834_1010 (size=7) 2024-12-06T03:45:35,509 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741834_1010 (size=7) 2024-12-06T03:45:35,510 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:35,511 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:35,512 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:35,515 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(383): testFindMemStoresEligibleForFlush 2024-12-06T03:45:35,533 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:35,534 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs, maxLogs=1 2024-12-06T03:45:35,535 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735535 2024-12-06T03:45:35,543 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735535 2024-12-06T03:45:35,547 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:40207:40207)] 2024-12-06T03:45:35,551 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735550 2024-12-06T03:45:35,561 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,561 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,561 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,561 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,561 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,562 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735535 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735550 2024-12-06T03:45:35,563 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:35,563 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735535 is not closed yet, will try archiving it next time 2024-12-06T03:45:35,565 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741835_1011 (size=283) 2024-12-06T03:45:35,565 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741835_1011 (size=283) 2024-12-06T03:45:35,567 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735566 2024-12-06T03:45:35,567 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741835_1011 (size=283) 2024-12-06T03:45:35,577 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,577 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,577 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,578 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,578 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,578 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735550 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735566 2024-12-06T03:45:35,581 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741836_1012 (size=283) 2024-12-06T03:45:35,581 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741836_1012 (size=283) 2024-12-06T03:45:35,581 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741836_1012 (size=283) 2024-12-06T03:45:35,582 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:35,583 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 37efde565a2ab827101a46ef7039d386[cf1] 2024-12-06T03:45:35,585 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-06T03:45:35,585 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 37efde565a2ab827101a46ef7039d386[cf1] 2024-12-06T03:45:35,586 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 37efde565a2ab827101a46ef7039d386[cf1] 2024-12-06T03:45:35,587 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735587 2024-12-06T03:45:35,595 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,595 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,595 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,596 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,596 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,596 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735566 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735587 2024-12-06T03:45:35,597 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:40207:40207)] 2024-12-06T03:45:35,597 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735566 is not closed yet, will try archiving it next time 2024-12-06T03:45:35,598 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735535 to hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs/wal.1733456735535 2024-12-06T03:45:35,598 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-06T03:45:35,599 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741837_1013 (size=283) 2024-12-06T03:45:35,599 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735599 2024-12-06T03:45:35,599 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741837_1013 (size=283) 2024-12-06T03:45:35,600 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741837_1013 (size=283) 2024-12-06T03:45:35,602 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735550 to hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs/wal.1733456735550 2024-12-06T03:45:35,604 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735566 to hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs/wal.1733456735566 2024-12-06T03:45:35,609 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,609 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,609 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,610 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,610 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,610 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735587 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735599 2024-12-06T03:45:35,612 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741838_1014 (size=93) 2024-12-06T03:45:35,613 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741838_1014 (size=93) 2024-12-06T03:45:35,614 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043)] 2024-12-06T03:45:35,614 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741838_1014 (size=93) 2024-12-06T03:45:35,614 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735587 is not closed yet, will try archiving it next time 2024-12-06T03:45:35,614 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735587 to hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs/wal.1733456735587 2024-12-06T03:45:35,614 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-06T03:45:35,617 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735617 2024-12-06T03:45:35,627 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,627 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,628 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,628 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,628 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,628 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735599 with entries=4, filesize=465 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735617 2024-12-06T03:45:35,629 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:35,629 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735599 is not closed yet, will try archiving it next time 2024-12-06T03:45:35,630 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-06T03:45:35,631 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741839_1015 (size=473) 2024-12-06T03:45:35,632 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741839_1015 (size=473) 2024-12-06T03:45:35,633 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735632 2024-12-06T03:45:35,635 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741839_1015 (size=473) 2024-12-06T03:45:35,644 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,644 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,644 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,644 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,644 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,644 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735617 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735632 2024-12-06T03:45:35,646 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:35,646 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735617 is not closed yet, will try archiving it next time 2024-12-06T03:45:35,647 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 37efde565a2ab827101a46ef7039d386[cf1],45efa203c00e664c920dd3aa6ee7dcc1[cf1] 2024-12-06T03:45:35,647 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 37efde565a2ab827101a46ef7039d386[cf1],45efa203c00e664c920dd3aa6ee7dcc1[cf1] 2024-12-06T03:45:35,647 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741840_1016 (size=283) 2024-12-06T03:45:35,648 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735647 2024-12-06T03:45:35,648 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741840_1016 (size=283) 2024-12-06T03:45:35,648 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741840_1016 (size=283) 2024-12-06T03:45:35,649 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735599 to hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs/wal.1733456735599 2024-12-06T03:45:35,651 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735617 to hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs/wal.1733456735617 2024-12-06T03:45:35,656 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,656 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,656 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,657 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,657 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,657 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735632 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735647 2024-12-06T03:45:35,658 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:35,658 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735632 is not closed yet, will try archiving it next time 2024-12-06T03:45:35,658 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-06T03:45:35,660 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741841_1017 (size=93) 2024-12-06T03:45:35,660 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741841_1017 (size=93) 2024-12-06T03:45:35,660 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741841_1017 (size=93) 2024-12-06T03:45:35,661 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735632 to hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs/wal.1733456735632 2024-12-06T03:45:35,762 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735761 2024-12-06T03:45:35,775 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,775 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,775 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,775 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,776 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,776 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735647 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735761 2024-12-06T03:45:35,777 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:35,777 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735647 is not closed yet, will try archiving it next time 2024-12-06T03:45:35,777 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-06T03:45:35,778 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735777 2024-12-06T03:45:35,778 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741842_1018 (size=283) 2024-12-06T03:45:35,779 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741842_1018 (size=283) 2024-12-06T03:45:35,779 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741842_1018 (size=283) 2024-12-06T03:45:35,780 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735647 to hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs/wal.1733456735647 2024-12-06T03:45:35,787 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,787 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,787 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,788 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,788 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,788 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735761 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735777 2024-12-06T03:45:35,789 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:40207:40207)] 2024-12-06T03:45:35,789 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735761 is not closed yet, will try archiving it next time 2024-12-06T03:45:35,791 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741843_1019 (size=93) 2024-12-06T03:45:35,792 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741843_1019 (size=93) 2024-12-06T03:45:35,792 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741843_1019 (size=93) 2024-12-06T03:45:35,793 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735761 to hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs/wal.1733456735761 2024-12-06T03:45:35,796 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735795 2024-12-06T03:45:35,806 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,806 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,806 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,806 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,806 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,807 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735777 with entries=6, filesize=709 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735795 2024-12-06T03:45:35,808 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043)] 2024-12-06T03:45:35,808 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735777 is not closed yet, will try archiving it next time 2024-12-06T03:45:35,809 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741844_1020 (size=717) 2024-12-06T03:45:35,810 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735809 2024-12-06T03:45:35,810 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741844_1020 (size=717) 2024-12-06T03:45:35,811 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741844_1020 (size=717) 2024-12-06T03:45:35,819 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,819 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,819 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,819 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,820 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,820 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735795 with entries=2, filesize=293 B; new WAL /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735809 2024-12-06T03:45:35,821 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:35,821 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44217/user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/testFindMemStoresEligibleForFlush/wal.1733456735795 is not closed yet, will try archiving it next time 2024-12-06T03:45:35,821 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 3ccc21a7eabc196fd0c042157a651f7e[cf1,cf3,cf2] 2024-12-06T03:45:35,822 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-06T03:45:35,822 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 3ccc21a7eabc196fd0c042157a651f7e[cf3,cf2] 2024-12-06T03:45:35,823 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741845_1021 (size=301) 2024-12-06T03:45:35,823 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741845_1021 (size=301) 2024-12-06T03:45:35,824 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741845_1021 (size=301) 2024-12-06T03:45:35,825 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,825 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,825 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,825 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,825 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,828 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741846_1022 (size=93) 2024-12-06T03:45:35,828 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741846_1022 (size=93) 2024-12-06T03:45:35,828 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741846_1022 (size=93) 2024-12-06T03:45:35,835 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 3 WAL file(s) to /user/jenkins/test-data/1c8296e3-3019-adfd-673f-e117ed3a9a59/oldWALs 2024-12-06T03:45:35,835 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733456735809) 2024-12-06T03:45:35,842 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=195 (was 190) - Thread LEAK? -, OpenFileDescriptor=423 (was 423), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=346 (was 346), ProcessCount=11 (was 11), AvailableMemoryMB=8767 (was 8781) 2024-12-06T03:45:35,849 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=195, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=346, ProcessCount=11, AvailableMemoryMB=8767 2024-12-06T03:45:35,861 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741847_1023 (size=7) 2024-12-06T03:45:35,862 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741847_1023 (size=7) 2024-12-06T03:45:35,862 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741847_1023 (size=7) 2024-12-06T03:45:35,864 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:35,864 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:35,866 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:35,872 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:35,872 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/21ed3223-7bc4-0f45-5b37-ae9d5392b219/testRollWriterForClosedWAL, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/21ed3223-7bc4-0f45-5b37-ae9d5392b219/testRollWriterForClosedWAL, maxLogs=1760 2024-12-06T03:45:35,873 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735873 2024-12-06T03:45:35,881 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/21ed3223-7bc4-0f45-5b37-ae9d5392b219/testRollWriterForClosedWAL/wal.1733456735873 2024-12-06T03:45:35,886 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043)] 2024-12-06T03:45:35,889 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,889 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,889 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,890 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,890 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:35,893 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741848_1024 (size=93) 2024-12-06T03:45:35,893 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741848_1024 (size=93) 2024-12-06T03:45:35,893 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741848_1024 (size=93) 2024-12-06T03:45:35,896 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/21ed3223-7bc4-0f45-5b37-ae9d5392b219/testRollWriterForClosedWAL 2024-12-06T03:45:35,896 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733456735873) 2024-12-06T03:45:35,905 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=200 (was 195) - Thread LEAK? -, OpenFileDescriptor=423 (was 423), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=346 (was 346), ProcessCount=11 (was 11), AvailableMemoryMB=8767 (was 8767) 2024-12-06T03:45:35,912 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=202, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=346, ProcessCount=11, AvailableMemoryMB=8767 2024-12-06T03:45:35,926 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741849_1025 (size=7) 2024-12-06T03:45:35,926 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741849_1025 (size=7) 2024-12-06T03:45:35,927 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741849_1025 (size=7) 2024-12-06T03:45:35,929 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:35,929 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:35,931 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:35,940 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:35,941 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/testMaxFlushedSequenceIdGoBackwards, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/oldWALs, maxLogs=1760 2024-12-06T03:45:35,942 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456735942 2024-12-06T03:45:35,950 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/testMaxFlushedSequenceIdGoBackwards/wal.1733456735942 2024-12-06T03:45:35,950 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:35,952 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 0dd1ad34cd934bde547838838ccf4b7f, NAME => 'table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495 2024-12-06T03:45:35,961 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741851_1027 (size=40) 2024-12-06T03:45:35,962 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741851_1027 (size=40) 2024-12-06T03:45:35,962 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741851_1027 (size=40) 2024-12-06T03:45:35,963 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-06T03:45:35,965 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:35,966 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 0dd1ad34cd934bde547838838ccf4b7f columnFamilyName a 2024-12-06T03:45:35,967 DEBUG [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-06T03:45:35,967 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] regionserver.HStore(327): Store=0dd1ad34cd934bde547838838ccf4b7f/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-06T03:45:35,967 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:35,969 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 0dd1ad34cd934bde547838838ccf4b7f columnFamilyName b 2024-12-06T03:45:35,969 DEBUG [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-06T03:45:35,970 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] regionserver.HStore(327): Store=0dd1ad34cd934bde547838838ccf4b7f/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-06T03:45:35,970 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:35,971 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/table/0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:35,972 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/table/0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:35,972 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/2ccbf3ac-72b3-56ae-df78-723bb965f00a/data/default/table/0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:35,973 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:35,973 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:35,975 DEBUG [Time-limited test {}] regionserver.FlushLargeStoresPolicy(65): No hbase.hregion.percolumnfamilyflush.size.lower.bound set in table table descriptor;using region.getMemStoreFlushHeapSize/# of families (64.0 M)) instead. 2024-12-06T03:45:35,976 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:35,980 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44217/user/jenkins/test-data/2ccbf3ac-72b3-56ae-df78-723bb965f00a/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-06T03:45:35,980 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 0dd1ad34cd934bde547838838ccf4b7f; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=74327887, jitterRate=0.10757182538509369}}}, FlushLargeStoresPolicy{flushSizeLowerBound=67108864} 2024-12-06T03:45:35,982 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 0dd1ad34cd934bde547838838ccf4b7f: Writing region info on filesystem at 1733456735963Initializing all the Stores at 1733456735964 (+1 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733456735964Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733456735964Cleaning up temporary data from old regions at 1733456735973 (+9 ms)Region opened successfully at 1733456735982 (+9 ms) 2024-12-06T03:45:35,983 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 0dd1ad34cd934bde547838838ccf4b7f, disabling compactions & flushes 2024-12-06T03:45:35,983 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. 2024-12-06T03:45:35,983 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. 2024-12-06T03:45:35,983 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. after waiting 0 ms 2024-12-06T03:45:35,983 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. 2024-12-06T03:45:35,983 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. 2024-12-06T03:45:35,984 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 0dd1ad34cd934bde547838838ccf4b7f: Waiting for close lock at 1733456735983Disabling compacts and flushes for region at 1733456735983Disabling writes for close at 1733456735983Writing region close event to WAL at 1733456735983Closed at 1733456735983 2024-12-06T03:45:36,338 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 0dd1ad34cd934bde547838838ccf4b7f, NAME => 'table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f.', STARTKEY => '', ENDKEY => ''} 2024-12-06T03:45:36,355 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,356 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-06T03:45:36,358 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,359 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,362 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,363 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 0dd1ad34cd934bde547838838ccf4b7f columnFamilyName a 2024-12-06T03:45:36,363 DEBUG [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-06T03:45:36,364 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] regionserver.HStore(327): Store=0dd1ad34cd934bde547838838ccf4b7f/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-06T03:45:36,365 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,366 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 0dd1ad34cd934bde547838838ccf4b7f columnFamilyName b 2024-12-06T03:45:36,366 DEBUG [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-06T03:45:36,367 INFO [StoreOpener-0dd1ad34cd934bde547838838ccf4b7f-1 {}] regionserver.HStore(327): Store=0dd1ad34cd934bde547838838ccf4b7f/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-06T03:45:36,367 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,368 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/table/0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,369 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/table/0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,372 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/2ccbf3ac-72b3-56ae-df78-723bb965f00a/data/default/table/0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,373 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,373 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,376 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,377 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 0dd1ad34cd934bde547838838ccf4b7f; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=74994431, jitterRate=0.11750410497188568}}}, org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL$FlushSpecificStoresPolicy@32a97fb 2024-12-06T03:45:36,377 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for 0dd1ad34cd934bde547838838ccf4b7f 2024-12-06T03:45:36,379 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 0dd1ad34cd934bde547838838ccf4b7f: Running coprocessor pre-open hook at 1733456736359Writing region info on filesystem at 1733456736359Initializing all the Stores at 1733456736361 (+2 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733456736361Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733456736361Cleaning up temporary data from old regions at 1733456736373 (+12 ms)Running coprocessor post-open hooks at 1733456736378 (+5 ms)Region opened successfully at 1733456736379 (+1 ms) 2024-12-06T03:45:39,396 INFO [pool-78-thread-1 {}] regionserver.HRegion(2902): Flushing 0dd1ad34cd934bde547838838ccf4b7f 2/2 column families, dataSize=96 B heapSize=896 B 2024-12-06T03:45:41,873 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-12-06T03:45:42,422 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/a/da936fee357644098faeea7cbf064823 is 28, key is a/a:a/1733456736386/Put/seqid=0 2024-12-06T03:45:42,430 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741852_1028 (size=4945) 2024-12-06T03:45:42,431 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741852_1028 (size=4945) 2024-12-06T03:45:42,431 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741852_1028 (size=4945) 2024-12-06T03:45:42,431 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/a/da936fee357644098faeea7cbf064823 2024-12-06T03:45:42,456 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/b/6de831dda9de4f7e8954d2238e8306a3 is 28, key is a/b:b/1733456736386/Put/seqid=0 2024-12-06T03:45:42,463 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741853_1029 (size=4945) 2024-12-06T03:45:42,464 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741853_1029 (size=4945) 2024-12-06T03:45:42,464 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741853_1029 (size=4945) 2024-12-06T03:45:42,465 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/b/6de831dda9de4f7e8954d2238e8306a3 2024-12-06T03:45:42,476 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/a/da936fee357644098faeea7cbf064823 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/a/da936fee357644098faeea7cbf064823 2024-12-06T03:45:42,486 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/a/da936fee357644098faeea7cbf064823, entries=1, sequenceid=6, filesize=4.8 K 2024-12-06T03:45:42,488 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/b/6de831dda9de4f7e8954d2238e8306a3 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/b/6de831dda9de4f7e8954d2238e8306a3 2024-12-06T03:45:42,499 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/b/6de831dda9de4f7e8954d2238e8306a3, entries=1, sequenceid=6, filesize=4.8 K 2024-12-06T03:45:42,501 INFO [pool-78-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~96 B/96, heapSize ~864 B/864, currentSize=0 B/0 for 0dd1ad34cd934bde547838838ccf4b7f in 3105ms, sequenceid=6, compaction requested=false 2024-12-06T03:45:42,501 DEBUG [pool-78-thread-1 {}] regionserver.HRegion(2603): Flush status journal for 0dd1ad34cd934bde547838838ccf4b7f: 2024-12-06T03:45:42,501 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(676): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-12-06T03:45:42,501 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(677): Flush succeeded:true 2024-12-06T03:45:42,506 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 0dd1ad34cd934bde547838838ccf4b7f 1/2 column families, dataSize=48 B heapSize=704 B; a={dataSize=24 B, heapSize=352 B, offHeapSize=0 B} 2024-12-06T03:45:42,512 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/a/e01dddbd9c2942dca74c09299b4306b7 is 28, key is a/a:a/1733456736386/Put/seqid=0 2024-12-06T03:45:42,520 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741854_1030 (size=4945) 2024-12-06T03:45:42,521 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741854_1030 (size=4945) 2024-12-06T03:45:42,521 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741854_1030 (size=4945) 2024-12-06T03:45:42,521 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=10 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/a/e01dddbd9c2942dca74c09299b4306b7 2024-12-06T03:45:42,531 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/a/e01dddbd9c2942dca74c09299b4306b7 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/a/e01dddbd9c2942dca74c09299b4306b7 2024-12-06T03:45:42,540 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/a/e01dddbd9c2942dca74c09299b4306b7, entries=1, sequenceid=10, filesize=4.8 K 2024-12-06T03:45:42,542 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=24 B/24 for 0dd1ad34cd934bde547838838ccf4b7f in 36ms, sequenceid=10, compaction requested=false 2024-12-06T03:45:42,542 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for 0dd1ad34cd934bde547838838ccf4b7f: 2024-12-06T03:45:42,544 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 0dd1ad34cd934bde547838838ccf4b7f, disabling compactions & flushes 2024-12-06T03:45:42,544 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. 2024-12-06T03:45:42,544 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. 2024-12-06T03:45:42,544 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. after waiting 0 ms 2024-12-06T03:45:42,544 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. 2024-12-06T03:45:42,544 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 0dd1ad34cd934bde547838838ccf4b7f 2/2 column families, dataSize=24 B heapSize=608 B 2024-12-06T03:45:42,551 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/b/7859440b0170413b91c86a733084b786 is 28, key is a/b:b/1733456736386/Put/seqid=0 2024-12-06T03:45:42,558 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741855_1031 (size=4945) 2024-12-06T03:45:42,558 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741855_1031 (size=4945) 2024-12-06T03:45:42,558 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741855_1031 (size=4945) 2024-12-06T03:45:42,559 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=13 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/b/7859440b0170413b91c86a733084b786 2024-12-06T03:45:42,570 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/.tmp/b/7859440b0170413b91c86a733084b786 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/b/7859440b0170413b91c86a733084b786 2024-12-06T03:45:42,580 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/b/7859440b0170413b91c86a733084b786, entries=1, sequenceid=13, filesize=4.8 K 2024-12-06T03:45:42,582 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 0dd1ad34cd934bde547838838ccf4b7f in 38ms, sequenceid=13, compaction requested=false 2024-12-06T03:45:42,589 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44217/user/jenkins/test-data/2ccbf3ac-72b3-56ae-df78-723bb965f00a/data/default/table/0dd1ad34cd934bde547838838ccf4b7f/recovered.edits/16.seqid, newMaxSeqId=16, maxSeqId=1 2024-12-06T03:45:42,590 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. 2024-12-06T03:45:42,590 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 0dd1ad34cd934bde547838838ccf4b7f: Waiting for close lock at 1733456742544Running coprocessor pre-close hooks at 1733456742544Disabling compacts and flushes for region at 1733456742544Disabling writes for close at 1733456742544Obtaining lock to block concurrent updates at 1733456742544Preparing flush snapshotting stores in 0dd1ad34cd934bde547838838ccf4b7f at 1733456742544Finished memstore snapshotting table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f., syncing WAL and waiting on mvcc, flushsize=dataSize=24, getHeapSize=576, getOffHeapSize=0, getCellsCount=1 at 1733456742545 (+1 ms)Flushing stores of table,,1733456735951.0dd1ad34cd934bde547838838ccf4b7f. at 1733456742546 (+1 ms)Flushing 0dd1ad34cd934bde547838838ccf4b7f/b: creating writer at 1733456742546Flushing 0dd1ad34cd934bde547838838ccf4b7f/b: appending metadata at 1733456742550 (+4 ms)Flushing 0dd1ad34cd934bde547838838ccf4b7f/b: closing flushed file at 1733456742550Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@6c6cf8e1: reopening flushed file at 1733456742568 (+18 ms)Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 0dd1ad34cd934bde547838838ccf4b7f in 38ms, sequenceid=13, compaction requested=false at 1733456742582 (+14 ms)Writing region close event to WAL at 1733456742583 (+1 ms)Running coprocessor post-close hooks at 1733456742589 (+6 ms)Closed at 1733456742590 (+1 ms) 2024-12-06T03:45:42,590 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:42,590 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:42,590 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:42,591 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:42,591 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:42,593 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741850_1026 (size=2357) 2024-12-06T03:45:42,594 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741850_1026 (size=2357) 2024-12-06T03:45:42,594 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741850_1026 (size=2357) 2024-12-06T03:45:42,596 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/oldWALs 2024-12-06T03:45:42,596 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733456735942) 2024-12-06T03:45:42,604 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=208 (was 202) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:56044 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:44152 [Waiting for operation #9] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:51950 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Timer for 'HBase' metrics system java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.util.TimerThread.mainLoop(Timer.java:563) java.base@17.0.11/java.util.TimerThread.run(Timer.java:516) - Thread LEAK? -, OpenFileDescriptor=425 (was 423) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=334 (was 346), ProcessCount=11 (was 11), AvailableMemoryMB=8711 (was 8767) 2024-12-06T03:45:42,611 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=208, OpenFileDescriptor=425, MaxFileDescriptor=1048576, SystemLoadAverage=334, ProcessCount=11, AvailableMemoryMB=8710 2024-12-06T03:45:42,622 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741856_1032 (size=7) 2024-12-06T03:45:42,623 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741856_1032 (size=7) 2024-12-06T03:45:42,624 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741856_1032 (size=7) 2024-12-06T03:45:42,627 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:42,627 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:42,629 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:42,636 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(244): No decryptEncryptedDataEncryptionKey method in DFSClient, should be hadoop version with HDFS-12396 java.lang.NoSuchMethodException: org.apache.hadoop.hdfs.DFSClient.decryptEncryptedDataEncryptionKey(org.apache.hadoop.fs.FileEncryptionInfo) at java.lang.Class.getDeclaredMethod(Class.java:2675) ~[?:?] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelperWithoutHDFS12396(FanOutOneBlockAsyncDFSOutputSaslHelper.java:183) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelper(FanOutOneBlockAsyncDFSOutputSaslHelper.java:242) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.(FanOutOneBlockAsyncDFSOutputSaslHelper.java:253) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at java.lang.Class.forName0(Native Method) ~[?:?] at java.lang.Class.forName(Class.java:375) ~[?:?] at org.apache.hadoop.hbase.wal.AsyncFSWALProvider.load(AsyncFSWALProvider.java:150) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.getProviderClass(WALFactory.java:174) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:262) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:214) ~[classes/:?] at org.apache.hadoop.hbase.HBaseTestingUtil.createWal(HBaseTestingUtil.java:2160) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2205) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2169) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFlushSequenceIdIsGreaterThanAllEditsInHFile(AbstractTestFSWAL.java:425) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-06T03:45:42,639 INFO [Time-limited test {}] wal.WALFactory(196): Instantiating WALProvider of type class org.apache.hadoop.hbase.wal.AsyncFSWALProvider 2024-12-06T03:45:42,643 DEBUG [Time-limited test {}] channel.MultithreadEventLoopGroup(44): -Dio.netty.eventLoopThreads: 16 2024-12-06T03:45:42,652 DEBUG [Time-limited test {}] nio.NioEventLoop(110): -Dio.netty.noKeySetOptimization: false 2024-12-06T03:45:42,652 DEBUG [Time-limited test {}] nio.NioEventLoop(111): -Dio.netty.selectorAutoRebuildThreshold: 512 2024-12-06T03:45:42,663 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor defaultMonitorName 2024-12-06T03:45:42,667 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:42,667 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=hregion-92628914, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/WALs/hregion-92628914, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/oldWALs, maxLogs=1760 2024-12-06T03:45:42,681 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputHelper(524): When create output stream for /user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/WALs/hregion-92628914/hregion-92628914.1733456742668, exclude list is [], retry=0 2024-12-06T03:45:42,691 DEBUG [Time-limited test {}] channel.DefaultChannelId(84): -Dio.netty.processId: 23992 (auto-detected) 2024-12-06T03:45:42,693 DEBUG [Time-limited test {}] channel.DefaultChannelId(106): -Dio.netty.machineId: 02:42:ac:ff:fe:11:00:02 (auto-detected) 2024-12-06T03:45:42,711 DEBUG [AsyncFSWAL-1-3 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:45811,DS-288cf1c6-00d8-4cf8-94c0-185e22eab264,DISK] 2024-12-06T03:45:42,711 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:38345,DS-41a6a1a0-6244-4204-8f34-4cceaad1cf4d,DISK] 2024-12-06T03:45:42,711 DEBUG [AsyncFSWAL-1-2 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:37139,DS-1bb302a9-c053-4e8f-b6c8-978d965dd803,DISK] 2024-12-06T03:45:42,714 DEBUG [AsyncFSWAL-1-3 {}] asyncfs.ProtobufDecoder(117): Hadoop 3.3 and above shades protobuf. 2024-12-06T03:45:42,741 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/WALs/hregion-92628914/hregion-92628914.1733456742668 2024-12-06T03:45:42,742 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new AsyncFSWAL writer with pipeline: [(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:40207:40207)] 2024-12-06T03:45:42,742 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => f53ce5050b24b3a626c30ab99d909679, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testFlushSequenceIdIsGreaterThanAllEditsInHFile', {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 2024-12-06T03:45:42,750 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741858_1034 (size=82) 2024-12-06T03:45:42,751 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741858_1034 (size=82) 2024-12-06T03:45:42,751 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741858_1034 (size=82) 2024-12-06T03:45:42,753 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-06T03:45:42,755 INFO [StoreOpener-f53ce5050b24b3a626c30ab99d909679-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,758 INFO [StoreOpener-f53ce5050b24b3a626c30ab99d909679-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region f53ce5050b24b3a626c30ab99d909679 columnFamilyName f 2024-12-06T03:45:42,758 DEBUG [StoreOpener-f53ce5050b24b3a626c30ab99d909679-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-06T03:45:42,759 INFO [StoreOpener-f53ce5050b24b3a626c30ab99d909679-1 {}] regionserver.HStore(327): Store=f53ce5050b24b3a626c30ab99d909679/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-06T03:45:42,759 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,760 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,761 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,761 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,762 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,762 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,764 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,766 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-06T03:45:42,767 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened f53ce5050b24b3a626c30ab99d909679; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=66546759, jitterRate=-0.008376017212867737}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-06T03:45:42,769 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for f53ce5050b24b3a626c30ab99d909679: Writing region info on filesystem at 1733456742753Initializing all the Stores at 1733456742755 (+2 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733456742755Cleaning up temporary data from old regions at 1733456742762 (+7 ms)Region opened successfully at 1733456742769 (+7 ms) 2024-12-06T03:45:42,769 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing f53ce5050b24b3a626c30ab99d909679, disabling compactions & flushes 2024-12-06T03:45:42,769 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679. 2024-12-06T03:45:42,770 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679. 2024-12-06T03:45:42,770 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679. after waiting 0 ms 2024-12-06T03:45:42,770 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679. 2024-12-06T03:45:42,770 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679. 2024-12-06T03:45:42,770 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for f53ce5050b24b3a626c30ab99d909679: Waiting for close lock at 1733456742769Disabling compacts and flushes for region at 1733456742769Disabling writes for close at 1733456742770 (+1 ms)Writing region close event to WAL at 1733456742770Closed at 1733456742770 2024-12-06T03:45:42,775 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741857_1033 (size=93) 2024-12-06T03:45:42,775 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741857_1033 (size=93) 2024-12-06T03:45:42,775 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741857_1033 (size=93) 2024-12-06T03:45:42,778 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/oldWALs 2024-12-06T03:45:42,778 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: AsyncFSWAL hregion-92628914:(num 1733456742668) 2024-12-06T03:45:42,781 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:42,781 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/testFlushSequenceIdIsGreaterThanAllEditsInHFile, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/testFlushSequenceIdIsGreaterThanAllEditsInHFile, maxLogs=1760 2024-12-06T03:45:42,782 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456742782 2024-12-06T03:45:42,789 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/testFlushSequenceIdIsGreaterThanAllEditsInHFile/wal.1733456742782 2024-12-06T03:45:42,790 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new writer with pipeline: [(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:33355:33355)] 2024-12-06T03:45:42,794 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:42,796 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => f53ce5050b24b3a626c30ab99d909679, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679.', STARTKEY => '', ENDKEY => ''} 2024-12-06T03:45:42,796 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-06T03:45:42,796 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,796 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,800 INFO [StoreOpener-f53ce5050b24b3a626c30ab99d909679-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,802 INFO [StoreOpener-f53ce5050b24b3a626c30ab99d909679-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region f53ce5050b24b3a626c30ab99d909679 columnFamilyName f 2024-12-06T03:45:42,802 DEBUG [StoreOpener-f53ce5050b24b3a626c30ab99d909679-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-06T03:45:42,803 INFO [StoreOpener-f53ce5050b24b3a626c30ab99d909679-1 {}] regionserver.HStore(327): Store=f53ce5050b24b3a626c30ab99d909679/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-06T03:45:42,803 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,804 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,805 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,807 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,807 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,807 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,810 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for f53ce5050b24b3a626c30ab99d909679 2024-12-06T03:45:42,811 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened f53ce5050b24b3a626c30ab99d909679; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=63496854, jitterRate=-0.05382314324378967}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-06T03:45:42,815 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for f53ce5050b24b3a626c30ab99d909679: Writing region info on filesystem at 1733456742797Initializing all the Stores at 1733456742799 (+2 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733456742799Cleaning up temporary data from old regions at 1733456742807 (+8 ms)Region opened successfully at 1733456742814 (+7 ms) 2024-12-06T03:45:42,829 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.log.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir so I do NOT create it in target/test-data/f7b9789f-100e-03f7-a0c9-2b6572d90917 2024-12-06T03:45:42,829 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.log.dir property value differs in configuration and system: Configuration=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/../logs while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir Erasing configuration value by system value. 2024-12-06T03:45:42,829 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.tmp.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.tmp.dir so I do NOT create it in target/test-data/f7b9789f-100e-03f7-a0c9-2b6572d90917 2024-12-06T03:45:42,829 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.tmp.dir property value differs in configuration and system: Configuration=/tmp/hadoop-jenkins while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.tmp.dir Erasing configuration value by system value. 2024-12-06T03:45:42,829 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f7b9789f-100e-03f7-a0c9-2b6572d90917 2024-12-06T03:45:42,854 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing f53ce5050b24b3a626c30ab99d909679 1/1 column families, dataSize=1.14 KB heapSize=2.13 KB 2024-12-06T03:45:42,954 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,055 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,156 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,257 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,358 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,459 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,559 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,661 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,761 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,862 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,963 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:43,990 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679/.tmp/f/c2637b180ba74a0e8cc5a459ca340aef is 121, key is testFlushSequenceIdIsGreaterThanAllEditsInHFile/f:x0/1733456742829/Put/seqid=0 2024-12-06T03:45:43,997 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741860_1036 (size=6333) 2024-12-06T03:45:43,997 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741860_1036 (size=6333) 2024-12-06T03:45:43,997 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741860_1036 (size=6333) 2024-12-06T03:45:43,998 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=1.14 KB at sequenceid=23 (bloomFilter=true), to=hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679/.tmp/f/c2637b180ba74a0e8cc5a459ca340aef 2024-12-06T03:45:44,009 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679/.tmp/f/c2637b180ba74a0e8cc5a459ca340aef as hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679/f/c2637b180ba74a0e8cc5a459ca340aef 2024-12-06T03:45:44,020 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/f53ce5050b24b3a626c30ab99d909679/f/c2637b180ba74a0e8cc5a459ca340aef, entries=10, sequenceid=23, filesize=6.2 K 2024-12-06T03:45:44,121 DEBUG [FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-06T03:45:44,126 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~1.14 KB/1170, heapSize ~2.11 KB/2160, currentSize=0 B/0 for f53ce5050b24b3a626c30ab99d909679 in 1271ms, sequenceid=23, compaction requested=false 2024-12-06T03:45:44,126 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for f53ce5050b24b3a626c30ab99d909679: 2024-12-06T03:45:44,126 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing f53ce5050b24b3a626c30ab99d909679, disabling compactions & flushes 2024-12-06T03:45:44,126 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679. 2024-12-06T03:45:44,127 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679. 2024-12-06T03:45:44,127 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679. after waiting 0 ms 2024-12-06T03:45:44,127 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679. 2024-12-06T03:45:44,130 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733456742631.f53ce5050b24b3a626c30ab99d909679. 2024-12-06T03:45:44,130 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for f53ce5050b24b3a626c30ab99d909679: Waiting for close lock at 1733456744126Disabling compacts and flushes for region at 1733456744126Disabling writes for close at 1733456744127 (+1 ms)Writing region close event to WAL at 1733456744129 (+2 ms)Closed at 1733456744129 2024-12-06T03:45:44,131 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,131 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,131 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,132 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,132 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,136 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741859_1035 (size=16537) 2024-12-06T03:45:44,136 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741859_1035 (size=16537) 2024-12-06T03:45:44,137 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741859_1035 (size=16537) 2024-12-06T03:45:44,141 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/0b7f2860-7a71-8046-79b1-f8413752b86d/testFlushSequenceIdIsGreaterThanAllEditsInHFile 2024-12-06T03:45:44,141 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: wal:(num 1733456742782) 2024-12-06T03:45:44,151 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=216 (was 208) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-432277904_22 at /127.0.0.1:56044 [Waiting for operation #6] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-3 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-432277904_22 at /127.0.0.1:44152 [Waiting for operation #12] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-1 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-2 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=457 (was 425) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=334 (was 334), ProcessCount=11 (was 11), AvailableMemoryMB=8683 (was 8710) 2024-12-06T03:45:44,158 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=216, OpenFileDescriptor=457, MaxFileDescriptor=1048576, SystemLoadAverage=334, ProcessCount=11, AvailableMemoryMB=8683 2024-12-06T03:45:44,169 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741861_1037 (size=7) 2024-12-06T03:45:44,170 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741861_1037 (size=7) 2024-12-06T03:45:44,170 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741861_1037 (size=7) 2024-12-06T03:45:44,172 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:44,172 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:44,174 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:44,178 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:44,178 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/oldWALs, maxLogs=1760 2024-12-06T03:45:44,179 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456744179 2024-12-06T03:45:44,187 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744179 2024-12-06T03:45:44,188 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:34043:34043)] 2024-12-06T03:45:44,189 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456744188 2024-12-06T03:45:44,197 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456744189 2024-12-06T03:45:44,202 WARN [Time-limited test {}] wal.AbstractProtobufLogWriter(199): Init output failed, path=hdfs://localhost:44217/user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744189 java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-12-06T03:45:44,203 DEBUG [Time-limited test {}] wal.FSHLogProvider(93): Error instantiating log writer. java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-12-06T03:45:44,217 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=243 (was 216) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:56120 [Receiving block BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:56134 [Receiving block BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:38345, 127.0.0.1:37139] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:44242 [Receiving block BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:38345, 127.0.0.1:45811] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:52028 [Receiving block BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:52018 [Receiving block BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:45811] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:44256 [Receiving block BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: FSHLog-0-hdfs://localhost:44217/user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8-prefix:default java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744179 block BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:37139] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744188 block BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: ResponseProcessor for block BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) - Thread LEAK? -, OpenFileDescriptor=483 (was 457) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=334 (was 334), ProcessCount=11 (was 11), AvailableMemoryMB=8673 (was 8683) 2024-12-06T03:45:44,226 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=243, OpenFileDescriptor=483, MaxFileDescriptor=1048576, SystemLoadAverage=334, ProcessCount=11, AvailableMemoryMB=8672 2024-12-06T03:45:44,236 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741864_1040 (size=7) 2024-12-06T03:45:44,236 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741864_1040 (size=7) 2024-12-06T03:45:44,236 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741864_1040 (size=7) 2024-12-06T03:45:44,238 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:44,238 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:44,240 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:44,246 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:44,246 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/7184f35b-ce66-3f6c-7cf3-3e840917403c/testWALCoprocessorLoaded, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/7184f35b-ce66-3f6c-7cf3-3e840917403c/oldWALs, maxLogs=1760 2024-12-06T03:45:44,246 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456744246 2024-12-06T03:45:44,287 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/7184f35b-ce66-3f6c-7cf3-3e840917403c/testWALCoprocessorLoaded/wal.1733456744246 2024-12-06T03:45:44,288 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:34043:34043)] 2024-12-06T03:45:44,290 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,290 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,290 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,290 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,290 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,294 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741865_1041 (size=93) 2024-12-06T03:45:44,294 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741865_1041 (size=93) 2024-12-06T03:45:44,295 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741865_1041 (size=93) 2024-12-06T03:45:44,299 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/7184f35b-ce66-3f6c-7cf3-3e840917403c/oldWALs 2024-12-06T03:45:44,299 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733456744246) 2024-12-06T03:45:44,308 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=248 (was 243) - Thread LEAK? -, OpenFileDescriptor=495 (was 483) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=334 (was 334), ProcessCount=11 (was 11), AvailableMemoryMB=8667 (was 8672) 2024-12-06T03:45:44,315 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=248, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=334, ProcessCount=11, AvailableMemoryMB=8666 2024-12-06T03:45:44,325 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741866_1042 (size=7) 2024-12-06T03:45:44,325 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741866_1042 (size=7) 2024-12-06T03:45:44,325 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741866_1042 (size=7) 2024-12-06T03:45:44,327 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:44,327 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:44,329 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:44,334 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:44,334 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/e27c772d-1a26-f7b3-1ec7-44a6988cbc29/testSyncNoAppend, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/e27c772d-1a26-f7b3-1ec7-44a6988cbc29/testSyncNoAppend, maxLogs=1760 2024-12-06T03:45:44,334 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456744334 2024-12-06T03:45:44,341 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/e27c772d-1a26-f7b3-1ec7-44a6988cbc29/testSyncNoAppend/wal.1733456744334 2024-12-06T03:45:44,342 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40207:40207),(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:34043:34043)] 2024-12-06T03:45:44,343 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,343 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,343 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,343 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,343 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,345 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741867_1043 (size=93) 2024-12-06T03:45:44,346 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741867_1043 (size=93) 2024-12-06T03:45:44,346 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741867_1043 (size=93) 2024-12-06T03:45:44,348 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/e27c772d-1a26-f7b3-1ec7-44a6988cbc29/testSyncNoAppend 2024-12-06T03:45:44,349 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733456744334) 2024-12-06T03:45:44,357 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=253 (was 248) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=334 (was 334), ProcessCount=11 (was 11), AvailableMemoryMB=8661 (was 8666) 2024-12-06T03:45:44,365 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=253, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=334, ProcessCount=11, AvailableMemoryMB=8661 2024-12-06T03:45:44,377 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741868_1044 (size=7) 2024-12-06T03:45:44,378 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741868_1044 (size=7) 2024-12-06T03:45:44,378 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741868_1044 (size=7) 2024-12-06T03:45:44,380 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:44,380 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:44,381 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:44,386 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:44,386 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/99b770df-be78-c39f-488f-2bec937f3026/testWriteEntryCanBeNull, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/99b770df-be78-c39f-488f-2bec937f3026/testWriteEntryCanBeNull, maxLogs=1760 2024-12-06T03:45:44,387 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456744387 2024-12-06T03:45:44,393 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/99b770df-be78-c39f-488f-2bec937f3026/testWriteEntryCanBeNull/wal.1733456744387 2024-12-06T03:45:44,394 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:40207:40207)] 2024-12-06T03:45:44,394 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,394 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,395 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,395 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,395 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:44,397 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741869_1045 (size=93) 2024-12-06T03:45:44,397 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741869_1045 (size=93) 2024-12-06T03:45:44,398 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741869_1045 (size=93) 2024-12-06T03:45:44,400 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/99b770df-be78-c39f-488f-2bec937f3026/testWriteEntryCanBeNull 2024-12-06T03:45:44,401 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733456744387) 2024-12-06T03:45:44,412 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=258 (was 253) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=334 (was 334), ProcessCount=11 (was 11), AvailableMemoryMB=8655 (was 8661) 2024-12-06T03:45:44,420 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=258, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=334, ProcessCount=11, AvailableMemoryMB=8654 2024-12-06T03:45:44,430 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741870_1046 (size=7) 2024-12-06T03:45:44,430 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741870_1046 (size=7) 2024-12-06T03:45:44,430 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741870_1046 (size=7) 2024-12-06T03:45:44,431 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8 with version=8 2024-12-06T03:45:44,432 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:44,433 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-06T03:45:44,439 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-06T03:45:44,439 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/testUnflushedSeqIdTrackingWithAsyncWal, archiveDir=hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/oldWALs, maxLogs=1760 2024-12-06T03:45:44,440 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733456744440 2024-12-06T03:45:44,448 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/testUnflushedSeqIdTrackingWithAsyncWal/wal.1733456744440 2024-12-06T03:45:44,449 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:34043:34043),(127.0.0.1/127.0.0.1:33355:33355),(127.0.0.1/127.0.0.1:40207:40207)] 2024-12-06T03:45:44,449 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => b3de6b4010ebf1d9de6449c3dc08e663, NAME => 'table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495 2024-12-06T03:45:44,461 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741872_1048 (size=40) 2024-12-06T03:45:44,461 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741872_1048 (size=40) 2024-12-06T03:45:44,461 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741872_1048 (size=40) 2024-12-06T03:45:44,462 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-06T03:45:44,463 INFO [StoreOpener-b3de6b4010ebf1d9de6449c3dc08e663-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,465 INFO [StoreOpener-b3de6b4010ebf1d9de6449c3dc08e663-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region b3de6b4010ebf1d9de6449c3dc08e663 columnFamilyName b 2024-12-06T03:45:44,465 DEBUG [StoreOpener-b3de6b4010ebf1d9de6449c3dc08e663-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-06T03:45:44,466 INFO [StoreOpener-b3de6b4010ebf1d9de6449c3dc08e663-1 {}] regionserver.HStore(327): Store=b3de6b4010ebf1d9de6449c3dc08e663/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-06T03:45:44,466 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,467 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,467 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,468 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/ac338920-7362-e99e-fa3a-c4d2b6ec6bea/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,468 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,468 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,471 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,474 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44217/user/jenkins/test-data/ac338920-7362-e99e-fa3a-c4d2b6ec6bea/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-06T03:45:44,475 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened b3de6b4010ebf1d9de6449c3dc08e663; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=71166760, jitterRate=0.06046736240386963}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-06T03:45:44,477 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for b3de6b4010ebf1d9de6449c3dc08e663: Writing region info on filesystem at 1733456744462Initializing all the Stores at 1733456744463 (+1 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733456744463Cleaning up temporary data from old regions at 1733456744468 (+5 ms)Region opened successfully at 1733456744477 (+9 ms) 2024-12-06T03:45:44,477 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing b3de6b4010ebf1d9de6449c3dc08e663, disabling compactions & flushes 2024-12-06T03:45:44,477 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. 2024-12-06T03:45:44,477 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. 2024-12-06T03:45:44,477 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. after waiting 0 ms 2024-12-06T03:45:44,477 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. 2024-12-06T03:45:44,478 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. 2024-12-06T03:45:44,478 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for b3de6b4010ebf1d9de6449c3dc08e663: Waiting for close lock at 1733456744477Disabling compacts and flushes for region at 1733456744477Disabling writes for close at 1733456744477Writing region close event to WAL at 1733456744478 (+1 ms)Closed at 1733456744478 2024-12-06T03:45:44,479 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => b3de6b4010ebf1d9de6449c3dc08e663, NAME => 'table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663.', STARTKEY => '', ENDKEY => ''} 2024-12-06T03:45:44,481 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,481 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-06T03:45:44,481 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,481 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,483 INFO [StoreOpener-b3de6b4010ebf1d9de6449c3dc08e663-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,484 INFO [StoreOpener-b3de6b4010ebf1d9de6449c3dc08e663-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region b3de6b4010ebf1d9de6449c3dc08e663 columnFamilyName b 2024-12-06T03:45:44,485 DEBUG [StoreOpener-b3de6b4010ebf1d9de6449c3dc08e663-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-06T03:45:44,485 INFO [StoreOpener-b3de6b4010ebf1d9de6449c3dc08e663-1 {}] regionserver.HStore(327): Store=b3de6b4010ebf1d9de6449c3dc08e663/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-06T03:45:44,485 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,486 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,486 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,488 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44217/user/jenkins/test-data/ac338920-7362-e99e-fa3a-c4d2b6ec6bea/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,488 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,488 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,491 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,492 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened b3de6b4010ebf1d9de6449c3dc08e663; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=61775590, jitterRate=-0.07947197556495667}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-06T03:45:44,492 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for b3de6b4010ebf1d9de6449c3dc08e663 2024-12-06T03:45:44,493 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for b3de6b4010ebf1d9de6449c3dc08e663: Running coprocessor pre-open hook at 1733456744481Writing region info on filesystem at 1733456744481Initializing all the Stores at 1733456744483 (+2 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733456744483Cleaning up temporary data from old regions at 1733456744488 (+5 ms)Running coprocessor post-open hooks at 1733456744492 (+4 ms)Region opened successfully at 1733456744493 (+1 ms) 2024-12-06T03:45:45,271 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor 2024-12-06T03:45:45,272 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor Metrics about HBase WALObservers 2024-12-06T03:45:45,275 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=TableRequests_Namespace_default_table_table 2024-12-06T03:45:45,275 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=TableRequests_Namespace_default_table_table Metrics about Tables on a single HBase RegionServer 2024-12-06T03:45:47,500 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1722): Closing b3de6b4010ebf1d9de6449c3dc08e663, disabling compactions & flushes 2024-12-06T03:45:47,500 INFO [pool-106-thread-1 {}] regionserver.HRegion(1755): Closing region table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. 2024-12-06T03:45:47,501 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. 2024-12-06T03:45:47,501 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1843): Acquired close lock on table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. after waiting 0 ms 2024-12-06T03:45:47,501 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1853): Updates disabled for region table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. 2024-12-06T03:45:47,503 INFO [pool-106-thread-1 {}] regionserver.HRegion(2902): Flushing b3de6b4010ebf1d9de6449c3dc08e663 1/1 column families, dataSize=48 B heapSize=448 B 2024-12-06T03:45:48,095 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-12-06T03:45:50,522 DEBUG [pool-106-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663/.tmp/b/d73727b531d3436b9711f4f0fa074d2e is 28, key is b/b:b/1733456744495/Put/seqid=0 2024-12-06T03:45:50,529 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741873_1049 (size=4945) 2024-12-06T03:45:50,529 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741873_1049 (size=4945) 2024-12-06T03:45:50,529 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741873_1049 (size=4945) 2024-12-06T03:45:50,529 INFO [pool-106-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663/.tmp/b/d73727b531d3436b9711f4f0fa074d2e 2024-12-06T03:45:50,538 DEBUG [pool-106-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663/.tmp/b/d73727b531d3436b9711f4f0fa074d2e as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663/b/d73727b531d3436b9711f4f0fa074d2e 2024-12-06T03:45:50,547 INFO [pool-106-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663/b/d73727b531d3436b9711f4f0fa074d2e, entries=1, sequenceid=6, filesize=4.8 K 2024-12-06T03:45:50,548 INFO [pool-106-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for b3de6b4010ebf1d9de6449c3dc08e663 in 3046ms, sequenceid=6, compaction requested=false 2024-12-06T03:45:50,554 DEBUG [pool-106-thread-1 {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44217/user/jenkins/test-data/ac338920-7362-e99e-fa3a-c4d2b6ec6bea/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663/recovered.edits/9.seqid, newMaxSeqId=9, maxSeqId=1 2024-12-06T03:45:50,554 INFO [pool-106-thread-1 {}] regionserver.HRegion(1973): Closed table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. 2024-12-06T03:45:50,555 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1676): Region close journal for b3de6b4010ebf1d9de6449c3dc08e663: Waiting for close lock at 1733456747500Running coprocessor pre-close hooks at 1733456747500Disabling compacts and flushes for region at 1733456747500Disabling writes for close at 1733456747501 (+1 ms)Obtaining lock to block concurrent updates at 1733456747503 (+2 ms)Preparing flush snapshotting stores in b3de6b4010ebf1d9de6449c3dc08e663 at 1733456747503Finished memstore snapshotting table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663., syncing WAL and waiting on mvcc, flushsize=dataSize=48, getHeapSize=432, getOffHeapSize=0, getCellsCount=2 at 1733456747505 (+2 ms)Flushing stores of table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. at 1733456750501 (+2996 ms)Flushing b3de6b4010ebf1d9de6449c3dc08e663/b: creating writer at 1733456750502 (+1 ms)Flushing b3de6b4010ebf1d9de6449c3dc08e663/b: appending metadata at 1733456750522 (+20 ms)Flushing b3de6b4010ebf1d9de6449c3dc08e663/b: closing flushed file at 1733456750522Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@239e2089: reopening flushed file at 1733456750537 (+15 ms)Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for b3de6b4010ebf1d9de6449c3dc08e663 in 3046ms, sequenceid=6, compaction requested=false at 1733456750548 (+11 ms)Writing region close event to WAL at 1733456750549 (+1 ms)Running coprocessor post-close hooks at 1733456750554 (+5 ms)Closed at 1733456750554 2024-12-06T03:45:50,555 INFO [pool-106-thread-1 {}] wal.AbstractTestFSWAL(620): Close result:{[B@62c903aa=[/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/data/default/table/b3de6b4010ebf1d9de6449c3dc08e663/b/d73727b531d3436b9711f4f0fa074d2e]} 2024-12-06T03:45:50,555 WARN [Time-limited test {}] regionserver.HRegion(1707): Region table,,1733456744449.b3de6b4010ebf1d9de6449c3dc08e663. already closed 2024-12-06T03:45:50,555 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for b3de6b4010ebf1d9de6449c3dc08e663: Waiting for close lock at 1733456750555 2024-12-06T03:45:50,555 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:50,556 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:50,556 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:50,556 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:50,556 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-06T03:45:50,558 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:45811 is added to blk_1073741871_1047 (size=1206) 2024-12-06T03:45:50,558 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:38345 is added to blk_1073741871_1047 (size=1206) 2024-12-06T03:45:50,558 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37139 is added to blk_1073741871_1047 (size=1206) 2024-12-06T03:45:50,561 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/0d3f5dd1-f109-eb1d-70fd-920ba3d0a2a8/oldWALs 2024-12-06T03:45:50,561 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733456744440) 2024-12-06T03:45:50,569 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=262 (was 258) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:52118 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:44342 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=503 (was 495) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=297 (was 334), ProcessCount=11 (was 11), AvailableMemoryMB=8633 (was 8654) 2024-12-06T03:45:50,569 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1019): Shutting down minicluster 2024-12-06T03:45:50,570 WARN [PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039, type=LAST_IN_PIPELINE {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-1167662243-172.17.0.2-1733456725090:1073741863 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-06T03:45:50,571 WARN [ResponseProcessor for block BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039 java.io.IOException: Bad response ERROR for BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039 from datanode DatanodeInfoWithStorage[127.0.0.1:45811,DS-288cf1c6-00d8-4cf8-94c0-185e22eab264,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-06T03:45:50,571 WARN [DataStreamer for file /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744188 block BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039 {}] hdfs.DataStreamer(1731): Error Recovery for BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039 in pipeline [DatanodeInfoWithStorage[127.0.0.1:37139,DS-1bb302a9-c053-4e8f-b6c8-978d965dd803,DISK], DatanodeInfoWithStorage[127.0.0.1:38345,DS-41a6a1a0-6244-4204-8f34-4cceaad1cf4d,DISK], DatanodeInfoWithStorage[127.0.0.1:45811,DS-288cf1c6-00d8-4cf8-94c0-185e22eab264,DISK]]: datanode 2(DatanodeInfoWithStorage[127.0.0.1:45811,DS-288cf1c6-00d8-4cf8-94c0-185e22eab264,DISK]) is bad. 2024-12-06T03:45:50,571 WARN [PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:38345, 127.0.0.1:45811] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-06T03:45:50,572 WARN [PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:45811] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-06T03:45:50,572 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:52028 [Receiving block BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039] {}] datanode.BlockReceiver(316): Block 1073741863 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-12-06T03:45:50,576 WARN [DataStreamer for file /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744188 block BP-1167662243-172.17.0.2-1733456725090:blk_1073741863_1039 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744188 (inode 16549) Holder DFSClient_NONMAPREDUCE_2007928288_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-06T03:45:50,576 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744188 with renewLeaseKey: DEFAULT_16549 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744188 (inode 16549) Holder DFSClient_NONMAPREDUCE_2007928288_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-06T03:45:50,578 WARN [PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038, type=LAST_IN_PIPELINE {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-1167662243-172.17.0.2-1733456725090:1073741862 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-06T03:45:50,579 WARN [ResponseProcessor for block BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038 java.io.IOException: Bad response ERROR for BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038 from datanode DatanodeInfoWithStorage[127.0.0.1:37139,DS-1bb302a9-c053-4e8f-b6c8-978d965dd803,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-06T03:45:50,579 WARN [DataStreamer for file /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744179 block BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038 {}] hdfs.DataStreamer(1731): Error Recovery for BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038 in pipeline [DatanodeInfoWithStorage[127.0.0.1:45811,DS-288cf1c6-00d8-4cf8-94c0-185e22eab264,DISK], DatanodeInfoWithStorage[127.0.0.1:38345,DS-41a6a1a0-6244-4204-8f34-4cceaad1cf4d,DISK], DatanodeInfoWithStorage[127.0.0.1:37139,DS-1bb302a9-c053-4e8f-b6c8-978d965dd803,DISK]]: datanode 2(DatanodeInfoWithStorage[127.0.0.1:37139,DS-1bb302a9-c053-4e8f-b6c8-978d965dd803,DISK]) is bad. 2024-12-06T03:45:50,579 WARN [PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:38345, 127.0.0.1:37139] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-06T03:45:50,579 WARN [PacketResponder: BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:37139] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-06T03:45:50,580 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_2007928288_22 at /127.0.0.1:52018 [Receiving block BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038] {}] datanode.BlockReceiver(316): Block 1073741862 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-12-06T03:45:50,580 WARN [DataStreamer for file /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744179 block BP-1167662243-172.17.0.2-1733456725090:blk_1073741862_1038 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744179 (inode 16548) Holder DFSClient_NONMAPREDUCE_2007928288_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-06T03:45:50,581 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744179 with renewLeaseKey: DEFAULT_16548 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/e5a19304-67ea-2b04-eb15-e37c022c6ef8/testFailedToCreateWALIfParentRenamed/wal.1733456744179 (inode 16548) Holder DFSClient_NONMAPREDUCE_2007928288_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-06T03:45:50,626 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@4b2b884e{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-06T03:45:50,630 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@fa4aa4c{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-06T03:45:50,630 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-06T03:45:50,631 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@47ddd06a{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-06T03:45:50,631 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@4c49fcd3{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir/,STOPPED} 2024-12-06T03:45:50,634 WARN [BP-1167662243-172.17.0.2-1733456725090 heartbeating to localhost/127.0.0.1:44217 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-06T03:45:50,634 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-06T03:45:50,634 WARN [BP-1167662243-172.17.0.2-1733456725090 heartbeating to localhost/127.0.0.1:44217 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-1167662243-172.17.0.2-1733456725090 (Datanode Uuid 410826ed-3178-4c9f-a460-11a642ea1bbd) service to localhost/127.0.0.1:44217 2024-12-06T03:45:50,634 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-06T03:45:50,635 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data5/current/BP-1167662243-172.17.0.2-1733456725090 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-06T03:45:50,635 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data6/current/BP-1167662243-172.17.0.2-1733456725090 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-06T03:45:50,636 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-06T03:45:50,638 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@28637041{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-06T03:45:50,638 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@5e18b7fd{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-06T03:45:50,638 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-06T03:45:50,639 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@243038a3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-06T03:45:50,639 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@443ad5c2{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir/,STOPPED} 2024-12-06T03:45:50,640 WARN [BP-1167662243-172.17.0.2-1733456725090 heartbeating to localhost/127.0.0.1:44217 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-06T03:45:50,640 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-06T03:45:50,640 WARN [BP-1167662243-172.17.0.2-1733456725090 heartbeating to localhost/127.0.0.1:44217 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-1167662243-172.17.0.2-1733456725090 (Datanode Uuid 4c1a5a00-593f-49d0-bbf8-1b1cdbe1fea7) service to localhost/127.0.0.1:44217 2024-12-06T03:45:50,640 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-06T03:45:50,641 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data3/current/BP-1167662243-172.17.0.2-1733456725090 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-06T03:45:50,641 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data4/current/BP-1167662243-172.17.0.2-1733456725090 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-06T03:45:50,641 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-06T03:45:50,643 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@411b19f7{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-06T03:45:50,644 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@64bb503e{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-06T03:45:50,644 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-06T03:45:50,644 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@43a917ce{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-06T03:45:50,644 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@208945{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir/,STOPPED} 2024-12-06T03:45:50,645 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-06T03:45:50,645 WARN [BP-1167662243-172.17.0.2-1733456725090 heartbeating to localhost/127.0.0.1:44217 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-06T03:45:50,645 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-06T03:45:50,645 WARN [BP-1167662243-172.17.0.2-1733456725090 heartbeating to localhost/127.0.0.1:44217 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-1167662243-172.17.0.2-1733456725090 (Datanode Uuid 92ae2001-b2d7-4d4a-a2c3-b58f1ba3d3d7) service to localhost/127.0.0.1:44217 2024-12-06T03:45:50,646 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data1/current/BP-1167662243-172.17.0.2-1733456725090 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-06T03:45:50,646 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/cluster_61344cbb-c52a-a3c4-d6a2-c987d343d9ba/data/data2/current/BP-1167662243-172.17.0.2-1733456725090 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-06T03:45:50,646 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-06T03:45:50,653 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@5599def{hdfs,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-12-06T03:45:50,654 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@493ba8a1{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-06T03:45:50,654 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-06T03:45:50,654 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@61b73bb3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-06T03:45:50,654 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@746f7db{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/2dd3b850-0e83-952c-7eb4-e501c831b495/hadoop.log.dir/,STOPPED} 2024-12-06T03:45:50,693 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1026): Minicluster is down