2024-12-10 11:00:40,529 main DEBUG Apache Log4j Core 2.17.2 initializing configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-12-10 11:00:40,540 main DEBUG Took 0.009341 seconds to load 1 plugins from package org.apache.hadoop.hbase.logging 2024-12-10 11:00:40,540 main DEBUG PluginManager 'Core' found 129 plugins 2024-12-10 11:00:40,540 main DEBUG PluginManager 'Level' found 0 plugins 2024-12-10 11:00:40,541 main DEBUG PluginManager 'Lookup' found 16 plugins 2024-12-10 11:00:40,542 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,548 main DEBUG PluginManager 'TypeConverter' found 26 plugins 2024-12-10 11:00:40,559 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.util.MBeans", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,561 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,561 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.logging.TestJul2Slf4j", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,561 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,562 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.zookeeper", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,562 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,563 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,563 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,563 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,564 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,564 main DEBUG LoggerConfig$Builder(additivity="false", level="WARN", levelAndRefs="null", name="org.apache.directory", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,565 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,565 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.ipc.FailedServers", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,565 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,566 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsConfig", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,566 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,566 main DEBUG LoggerConfig$Builder(additivity="null", level="INFO", levelAndRefs="null", name="org.apache.hadoop.hbase.ScheduledChore", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,567 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,567 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.regionserver.RSRpcServices", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,567 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,568 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,568 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,568 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,569 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-12-10 11:00:40,569 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hbase.thirdparty.io.netty.channel", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,569 main DEBUG Building Plugin[name=root, class=org.apache.logging.log4j.core.config.LoggerConfig$RootLogger]. 2024-12-10 11:00:40,570 main DEBUG LoggerConfig$RootLogger$Builder(additivity="null", level="null", levelAndRefs="INFO,Console", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-12-10 11:00:40,572 main DEBUG Building Plugin[name=loggers, class=org.apache.logging.log4j.core.config.LoggersPlugin]. 2024-12-10 11:00:40,573 main DEBUG createLoggers(={org.apache.hadoop.metrics2.util.MBeans, org.apache.hadoop.hbase.logging.TestJul2Slf4j, org.apache.zookeeper, org.apache.hadoop.metrics2.impl.MetricsSinkAdapter, org.apache.hadoop.metrics2.impl.MetricsSystemImpl, org.apache.directory, org.apache.hadoop.hbase.ipc.FailedServers, org.apache.hadoop.metrics2.impl.MetricsConfig, org.apache.hadoop.hbase.ScheduledChore, org.apache.hadoop.hbase.regionserver.RSRpcServices, org.apache.hadoop, org.apache.hadoop.hbase, org.apache.hbase.thirdparty.io.netty.channel, root}) 2024-12-10 11:00:40,573 main DEBUG Building Plugin[name=layout, class=org.apache.logging.log4j.core.layout.PatternLayout]. 2024-12-10 11:00:40,574 main DEBUG PatternLayout$Builder(pattern="%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n", PatternSelector=null, Configuration(PropertiesConfig), Replace=null, charset="null", alwaysWriteExceptions="null", disableAnsi="null", noConsoleNoAnsi="null", header="null", footer="null") 2024-12-10 11:00:40,575 main DEBUG PluginManager 'Converter' found 47 plugins 2024-12-10 11:00:40,582 main DEBUG Building Plugin[name=appender, class=org.apache.hadoop.hbase.logging.HBaseTestAppender]. 2024-12-10 11:00:40,585 main DEBUG HBaseTestAppender$Builder(target="SYSTEM_ERR", maxSize="1G", bufferedIo="null", bufferSize="null", immediateFlush="null", ignoreExceptions="null", PatternLayout(%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n), name="Console", Configuration(PropertiesConfig), Filter=null, ={}) 2024-12-10 11:00:40,586 main DEBUG Starting HBaseTestOutputStreamManager SYSTEM_ERR 2024-12-10 11:00:40,586 main DEBUG Building Plugin[name=appenders, class=org.apache.logging.log4j.core.config.AppendersPlugin]. 2024-12-10 11:00:40,587 main DEBUG createAppenders(={Console}) 2024-12-10 11:00:40,588 main DEBUG Configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f initialized 2024-12-10 11:00:40,588 main DEBUG Starting configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-12-10 11:00:40,588 main DEBUG Started configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f OK. 2024-12-10 11:00:40,588 main DEBUG Shutting down OutputStreamManager SYSTEM_OUT.false.false-1 2024-12-10 11:00:40,589 main DEBUG OutputStream closed 2024-12-10 11:00:40,589 main DEBUG Shut down OutputStreamManager SYSTEM_OUT.false.false-1, all resources released: true 2024-12-10 11:00:40,589 main DEBUG Appender DefaultConsole-1 stopped with status true 2024-12-10 11:00:40,589 main DEBUG Stopped org.apache.logging.log4j.core.config.DefaultConfiguration@61001b64 OK 2024-12-10 11:00:40,654 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6 2024-12-10 11:00:40,656 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=StatusLogger 2024-12-10 11:00:40,657 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=ContextSelector 2024-12-10 11:00:40,658 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name= 2024-12-10 11:00:40,658 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.directory 2024-12-10 11:00:40,658 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSinkAdapter 2024-12-10 11:00:40,659 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.zookeeper 2024-12-10 11:00:40,659 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.logging.TestJul2Slf4j 2024-12-10 11:00:40,659 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSystemImpl 2024-12-10 11:00:40,660 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.util.MBeans 2024-12-10 11:00:40,660 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase 2024-12-10 11:00:40,660 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop 2024-12-10 11:00:40,660 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ipc.FailedServers 2024-12-10 11:00:40,661 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.regionserver.RSRpcServices 2024-12-10 11:00:40,661 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsConfig 2024-12-10 11:00:40,661 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hbase.thirdparty.io.netty.channel 2024-12-10 11:00:40,661 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ScheduledChore 2024-12-10 11:00:40,662 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Appenders,name=Console 2024-12-10 11:00:40,664 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-12-10 11:00:40,665 main DEBUG Reconfiguration complete for context[name=1dbd16a6] at URI jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-logging/target/hbase-logging-4.0.0-alpha-1-SNAPSHOT-tests.jar!/log4j2.properties (org.apache.logging.log4j.core.LoggerContext@40db2a24) with optional ClassLoader: null 2024-12-10 11:00:40,665 main DEBUG Shutdown hook enabled. Registering a new one. 2024-12-10 11:00:40,666 main DEBUG LoggerContext[name=1dbd16a6, org.apache.logging.log4j.core.LoggerContext@40db2a24] started OK. 2024-12-10T11:00:40,934 DEBUG [main {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82 2024-12-10 11:00:40,937 main DEBUG AsyncLogger.ThreadNameStrategy=UNCACHED (user specified null, default is UNCACHED) 2024-12-10 11:00:40,937 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-12-10T11:00:40,946 INFO [main {}] hbase.HBaseClassTestRule(94): Test class org.apache.hadoop.hbase.regionserver.wal.TestFSHLog timeout: 13 mins 2024-12-10T11:00:40,967 INFO [Time-limited test {}] hbase.HBaseZKTestingUtil(84): Created new mini-cluster data directory: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70, deleteOnExit=true 2024-12-10T11:00:40,968 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting test.cache.data to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/test.cache.data in system properties and HBase conf 2024-12-10T11:00:40,969 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.tmp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.tmp.dir in system properties and HBase conf 2024-12-10T11:00:40,970 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.log.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir in system properties and HBase conf 2024-12-10T11:00:40,970 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.local.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/mapreduce.cluster.local.dir in system properties and HBase conf 2024-12-10T11:00:40,971 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.temp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/mapreduce.cluster.temp.dir in system properties and HBase conf 2024-12-10T11:00:40,971 INFO [Time-limited test {}] hbase.HBaseTestingUtil(738): read short circuit is OFF 2024-12-10T11:00:41,072 WARN [Time-limited test {}] util.NativeCodeLoader(60): Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2024-12-10T11:00:41,156 DEBUG [Time-limited test {}] fs.HFileSystem(310): The file system is not a DistributedFileSystem. Skipping on block location reordering 2024-12-10T11:00:41,159 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-labels.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/yarn.node-labels.fs-store.root-dir in system properties and HBase conf 2024-12-10T11:00:41,160 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-attribute.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/yarn.node-attribute.fs-store.root-dir in system properties and HBase conf 2024-12-10T11:00:41,160 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.log-dirs to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/yarn.nodemanager.log-dirs in system properties and HBase conf 2024-12-10T11:00:41,161 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-12-10T11:00:41,161 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.active-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/yarn.timeline-service.entity-group-fs-store.active-dir in system properties and HBase conf 2024-12-10T11:00:41,162 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.done-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/yarn.timeline-service.entity-group-fs-store.done-dir in system properties and HBase conf 2024-12-10T11:00:41,162 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-12-10T11:00:41,162 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/dfs.journalnode.edits.dir in system properties and HBase conf 2024-12-10T11:00:41,163 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.datanode.shared.file.descriptor.paths to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/dfs.datanode.shared.file.descriptor.paths in system properties and HBase conf 2024-12-10T11:00:41,163 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting nfs.dump.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/nfs.dump.dir in system properties and HBase conf 2024-12-10T11:00:41,164 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting java.io.tmpdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/java.io.tmpdir in system properties and HBase conf 2024-12-10T11:00:41,164 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/dfs.journalnode.edits.dir in system properties and HBase conf 2024-12-10T11:00:41,164 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.provided.aliasmap.inmemory.leveldb.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/dfs.provided.aliasmap.inmemory.leveldb.dir in system properties and HBase conf 2024-12-10T11:00:41,165 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting fs.s3a.committer.staging.tmp.path to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/fs.s3a.committer.staging.tmp.path in system properties and HBase conf 2024-12-10T11:00:41,641 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-12-10T11:00:41,969 WARN [Time-limited test {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 2024-12-10T11:00:42,043 INFO [Time-limited test {}] log.Log(170): Logging initialized @2210ms to org.eclipse.jetty.util.log.Slf4jLog 2024-12-10T11:00:42,117 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-10T11:00:42,180 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-10T11:00:42,200 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-10T11:00:42,200 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-10T11:00:42,201 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-10T11:00:42,214 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-10T11:00:42,216 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@5ea63753{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir/,AVAILABLE} 2024-12-10T11:00:42,217 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@7d70b283{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-10T11:00:42,405 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@23572bf7{hdfs,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/java.io.tmpdir/jetty-localhost-45901-hadoop-hdfs-3_4_1-tests_jar-_-any-4886474131486666056/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-12-10T11:00:42,416 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@69a5d44d{HTTP/1.1, (http/1.1)}{localhost:45901} 2024-12-10T11:00:42,416 INFO [Time-limited test {}] server.Server(415): Started @2584ms 2024-12-10T11:00:42,444 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-12-10T11:00:42,803 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-10T11:00:42,811 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-10T11:00:42,812 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-10T11:00:42,812 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-10T11:00:42,813 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-12-10T11:00:42,813 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@4d1f1349{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir/,AVAILABLE} 2024-12-10T11:00:42,814 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@d548ecc{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-10T11:00:42,938 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@16bc1793{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/java.io.tmpdir/jetty-localhost-41685-hadoop-hdfs-3_4_1-tests_jar-_-any-6979374521284272102/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-10T11:00:42,939 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@292b61eb{HTTP/1.1, (http/1.1)}{localhost:41685} 2024-12-10T11:00:42,939 INFO [Time-limited test {}] server.Server(415): Started @3108ms 2024-12-10T11:00:42,998 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-10T11:00:43,123 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-10T11:00:43,130 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-10T11:00:43,131 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-10T11:00:43,131 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-10T11:00:43,131 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-10T11:00:43,132 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@58722aa{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir/,AVAILABLE} 2024-12-10T11:00:43,132 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@6949af4f{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-10T11:00:43,252 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@478a8ea{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/java.io.tmpdir/jetty-localhost-32773-hadoop-hdfs-3_4_1-tests_jar-_-any-1876857136213525197/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-10T11:00:43,253 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@8a88f04{HTTP/1.1, (http/1.1)}{localhost:32773} 2024-12-10T11:00:43,253 INFO [Time-limited test {}] server.Server(415): Started @3421ms 2024-12-10T11:00:43,255 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-10T11:00:43,296 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-12-10T11:00:43,301 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-12-10T11:00:43,302 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-12-10T11:00:43,303 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-12-10T11:00:43,303 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-12-10T11:00:43,306 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@56b1d6f5{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir/,AVAILABLE} 2024-12-10T11:00:43,307 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@71ef7580{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-12-10T11:00:43,442 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@4932d2e3{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/java.io.tmpdir/jetty-localhost-38361-hadoop-hdfs-3_4_1-tests_jar-_-any-4521510180223366708/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-10T11:00:43,443 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@6613ef7b{HTTP/1.1, (http/1.1)}{localhost:38361} 2024-12-10T11:00:43,443 INFO [Time-limited test {}] server.Server(415): Started @3611ms 2024-12-10T11:00:43,445 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-12-10T11:00:43,449 WARN [Thread-106 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data1/current/BP-622500072-172.17.0.2-1733828441734/current, will proceed with Du for space computation calculation, 2024-12-10T11:00:43,449 WARN [Thread-105 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data3/current/BP-622500072-172.17.0.2-1733828441734/current, will proceed with Du for space computation calculation, 2024-12-10T11:00:43,449 WARN [Thread-108 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data2/current/BP-622500072-172.17.0.2-1733828441734/current, will proceed with Du for space computation calculation, 2024-12-10T11:00:43,450 WARN [Thread-107 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data4/current/BP-622500072-172.17.0.2-1733828441734/current, will proceed with Du for space computation calculation, 2024-12-10T11:00:43,493 WARN [Thread-58 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-10T11:00:43,501 WARN [Thread-81 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-10T11:00:43,573 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x610b8a02befbb1fb with lease ID 0x7e0f255f3ccd7: Processing first storage report for DS-75516e36-daad-4b45-b0c0-0db5f534803e from datanode DatanodeRegistration(127.0.0.1:39069, datanodeUuid=f4751df2-89e4-4cad-9294-95ab54da39f2, infoPort=36879, infoSecurePort=0, ipcPort=39699, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734) 2024-12-10T11:00:43,575 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x610b8a02befbb1fb with lease ID 0x7e0f255f3ccd7: from storage DS-75516e36-daad-4b45-b0c0-0db5f534803e node DatanodeRegistration(127.0.0.1:39069, datanodeUuid=f4751df2-89e4-4cad-9294-95ab54da39f2, infoPort=36879, infoSecurePort=0, ipcPort=39699, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734), blocks: 0, hasStaleStorage: true, processing time: 2 msecs, invalidatedBlocks: 0 2024-12-10T11:00:43,575 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x640f82d3a69ad48c with lease ID 0x7e0f255f3ccd8: Processing first storage report for DS-02e043e0-225a-4869-b5f6-49f0d0f9039c from datanode DatanodeRegistration(127.0.0.1:34987, datanodeUuid=bdf59f75-0b7a-4ef6-95c4-e5947fef9ef8, infoPort=38683, infoSecurePort=0, ipcPort=36773, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734) 2024-12-10T11:00:43,576 WARN [Thread-139 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data5/current/BP-622500072-172.17.0.2-1733828441734/current, will proceed with Du for space computation calculation, 2024-12-10T11:00:43,576 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x640f82d3a69ad48c with lease ID 0x7e0f255f3ccd8: from storage DS-02e043e0-225a-4869-b5f6-49f0d0f9039c node DatanodeRegistration(127.0.0.1:34987, datanodeUuid=bdf59f75-0b7a-4ef6-95c4-e5947fef9ef8, infoPort=38683, infoSecurePort=0, ipcPort=36773, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-10T11:00:43,576 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x610b8a02befbb1fb with lease ID 0x7e0f255f3ccd7: Processing first storage report for DS-38d88c86-6c5f-4672-81c6-f22e4f6bfdb9 from datanode DatanodeRegistration(127.0.0.1:39069, datanodeUuid=f4751df2-89e4-4cad-9294-95ab54da39f2, infoPort=36879, infoSecurePort=0, ipcPort=39699, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734) 2024-12-10T11:00:43,576 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x610b8a02befbb1fb with lease ID 0x7e0f255f3ccd7: from storage DS-38d88c86-6c5f-4672-81c6-f22e4f6bfdb9 node DatanodeRegistration(127.0.0.1:39069, datanodeUuid=f4751df2-89e4-4cad-9294-95ab54da39f2, infoPort=36879, infoSecurePort=0, ipcPort=39699, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-12-10T11:00:43,576 WARN [Thread-140 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data6/current/BP-622500072-172.17.0.2-1733828441734/current, will proceed with Du for space computation calculation, 2024-12-10T11:00:43,577 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x640f82d3a69ad48c with lease ID 0x7e0f255f3ccd8: Processing first storage report for DS-d027e858-8597-4fd1-b647-3f8713a850c4 from datanode DatanodeRegistration(127.0.0.1:34987, datanodeUuid=bdf59f75-0b7a-4ef6-95c4-e5947fef9ef8, infoPort=38683, infoSecurePort=0, ipcPort=36773, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734) 2024-12-10T11:00:43,577 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x640f82d3a69ad48c with lease ID 0x7e0f255f3ccd8: from storage DS-d027e858-8597-4fd1-b647-3f8713a850c4 node DatanodeRegistration(127.0.0.1:34987, datanodeUuid=bdf59f75-0b7a-4ef6-95c4-e5947fef9ef8, infoPort=38683, infoSecurePort=0, ipcPort=36773, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734), blocks: 0, hasStaleStorage: false, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-10T11:00:43,606 WARN [Thread-121 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-12-10T11:00:43,612 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x4400a3692c4e3f94 with lease ID 0x7e0f255f3ccd9: Processing first storage report for DS-a7373fce-bbbd-4059-870f-74703d77f511 from datanode DatanodeRegistration(127.0.0.1:34489, datanodeUuid=c1c9f96c-5c9e-4cb6-a75f-4a166c4b5d88, infoPort=40781, infoSecurePort=0, ipcPort=36763, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734) 2024-12-10T11:00:43,612 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x4400a3692c4e3f94 with lease ID 0x7e0f255f3ccd9: from storage DS-a7373fce-bbbd-4059-870f-74703d77f511 node DatanodeRegistration(127.0.0.1:34489, datanodeUuid=c1c9f96c-5c9e-4cb6-a75f-4a166c4b5d88, infoPort=40781, infoSecurePort=0, ipcPort=36763, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-10T11:00:43,612 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x4400a3692c4e3f94 with lease ID 0x7e0f255f3ccd9: Processing first storage report for DS-a6b8274a-ae07-4c74-b79a-145503036c62 from datanode DatanodeRegistration(127.0.0.1:34489, datanodeUuid=c1c9f96c-5c9e-4cb6-a75f-4a166c4b5d88, infoPort=40781, infoSecurePort=0, ipcPort=36763, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734) 2024-12-10T11:00:43,613 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x4400a3692c4e3f94 with lease ID 0x7e0f255f3ccd9: from storage DS-a6b8274a-ae07-4c74-b79a-145503036c62 node DatanodeRegistration(127.0.0.1:34489, datanodeUuid=c1c9f96c-5c9e-4cb6-a75f-4a166c4b5d88, infoPort=40781, infoSecurePort=0, ipcPort=36763, storageInfo=lv=-57;cid=testClusterID;nsid=479420424;c=1733828441734), blocks: 0, hasStaleStorage: false, processing time: 1 msecs, invalidatedBlocks: 0 2024-12-10T11:00:43,837 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(631): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82 2024-12-10T11:00:43,849 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=155, OpenFileDescriptor=391, MaxFileDescriptor=1048576, SystemLoadAverage=1412, ProcessCount=11, AvailableMemoryMB=4647 2024-12-10T11:00:43,869 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:43,871 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:44,099 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741825_1001 (size=7) 2024-12-10T11:00:44,100 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741825_1001 (size=7) 2024-12-10T11:00:44,100 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741825_1001 (size=7) 2024-12-10T11:00:44,504 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:00:44,505 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:44,507 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:44,516 DEBUG [Time-limited test {}] util.ClassSize(228): Using Unsafe to estimate memory layout 2024-12-10T11:00:44,536 INFO [Time-limited test {}] metrics.MetricRegistriesLoader(60): Loaded MetricRegistries class org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl 2024-12-10T11:00:44,537 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:00:44,550 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/testSyncRunnerIndexOverflow, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/oldWALs, maxLogs=1760 2024-12-10T11:00:44,601 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828444592 2024-12-10T11:00:44,654 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/testSyncRunnerIndexOverflow/wal.1733828444592 2024-12-10T11:00:44,704 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:00:44,766 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:44,766 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:44,766 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:44,767 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:44,767 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:44,772 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741826_1002 (size=1293) 2024-12-10T11:00:44,772 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741826_1002 (size=1293) 2024-12-10T11:00:44,773 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741826_1002 (size=1293) 2024-12-10T11:00:44,781 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/oldWALs 2024-12-10T11:00:44,783 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733828444592) 2024-12-10T11:00:44,792 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=162 (was 155) Potentially hanging thread: sync.0 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: LeaseRenewer:jenkins@localhost:44515 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:441) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$800(LeaseRenewer.java:77) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:336) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: weak-ref-cleaner-strictcontextstorage java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:155) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:176) app//io.opentelemetry.context.StrictContextStorage$PendingScopes.run(StrictContextStorage.java:269) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=403 (was 391) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1412 (was 1412), ProcessCount=11 (was 11), AvailableMemoryMB=4600 (was 4647) 2024-12-10T11:00:44,799 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=162, OpenFileDescriptor=403, MaxFileDescriptor=1048576, SystemLoadAverage=1412, ProcessCount=11, AvailableMemoryMB=4600 2024-12-10T11:00:44,822 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741827_1003 (size=7) 2024-12-10T11:00:44,822 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741827_1003 (size=7) 2024-12-10T11:00:44,823 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741827_1003 (size=7) 2024-12-10T11:00:44,825 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:00:44,826 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:44,828 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:44,838 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:00:44,839 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/testUnflushedSeqIdTracking, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/oldWALs, maxLogs=1760 2024-12-10T11:00:44,841 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828444841 2024-12-10T11:00:44,852 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/testUnflushedSeqIdTracking/wal.1733828444841 2024-12-10T11:00:44,857 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:00:44,860 INFO [Time-limited test {}] regionserver.ChunkCreator(472): data poolSizePercentage is less than 0. So not using pool 2024-12-10T11:00:44,860 INFO [Time-limited test {}] regionserver.ChunkCreator(472): index poolSizePercentage is less than 0. So not using pool 2024-12-10T11:00:44,883 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 991d6ba103d209471bcdc51686911762, NAME => 'testUnflushedSeqIdTracking,,1733828444858.991d6ba103d209471bcdc51686911762.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testUnflushedSeqIdTracking', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82 2024-12-10T11:00:44,904 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741829_1005 (size=61) 2024-12-10T11:00:44,904 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741829_1005 (size=61) 2024-12-10T11:00:44,904 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741829_1005 (size=61) 2024-12-10T11:00:44,910 INFO [Time-limited test {}] throttle.StoreHotnessProtector(112): StoreHotnessProtector is disabled. Set hbase.region.store.parallel.put.limit > 0 to enable, which may help mitigate load under heavy write pressure. 2024-12-10T11:00:44,914 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testUnflushedSeqIdTracking,,1733828444858.991d6ba103d209471bcdc51686911762.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-10T11:00:44,960 INFO [StoreOpener-991d6ba103d209471bcdc51686911762-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 991d6ba103d209471bcdc51686911762 2024-12-10T11:00:44,989 INFO [StoreOpener-991d6ba103d209471bcdc51686911762-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 991d6ba103d209471bcdc51686911762 columnFamilyName b 2024-12-10T11:00:44,994 DEBUG [StoreOpener-991d6ba103d209471bcdc51686911762-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-10T11:00:44,998 INFO [StoreOpener-991d6ba103d209471bcdc51686911762-1 {}] regionserver.HStore(327): Store=991d6ba103d209471bcdc51686911762/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-10T11:00:45,000 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 991d6ba103d209471bcdc51686911762 2024-12-10T11:00:45,004 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testUnflushedSeqIdTracking/991d6ba103d209471bcdc51686911762 2024-12-10T11:00:45,005 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testUnflushedSeqIdTracking/991d6ba103d209471bcdc51686911762 2024-12-10T11:00:45,006 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/39919b2c-24ca-6993-51d3-4066b391f8f5/data/default/testUnflushedSeqIdTracking/991d6ba103d209471bcdc51686911762 2024-12-10T11:00:45,009 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 991d6ba103d209471bcdc51686911762 2024-12-10T11:00:45,009 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 991d6ba103d209471bcdc51686911762 2024-12-10T11:00:45,016 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 991d6ba103d209471bcdc51686911762 2024-12-10T11:00:45,021 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44515/user/jenkins/test-data/39919b2c-24ca-6993-51d3-4066b391f8f5/data/default/testUnflushedSeqIdTracking/991d6ba103d209471bcdc51686911762/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-10T11:00:45,023 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 991d6ba103d209471bcdc51686911762; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=60072871, jitterRate=-0.10484446585178375}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-10T11:00:45,034 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 991d6ba103d209471bcdc51686911762: Writing region info on filesystem at 1733828444934Initializing all the Stores at 1733828444937 (+3 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733828444937Cleaning up temporary data from old regions at 1733828445009 (+72 ms)Region opened successfully at 1733828445032 (+23 ms) 2024-12-10T11:00:48,060 INFO [pool-60-thread-2 {}] regionserver.HRegion(2902): Flushing 991d6ba103d209471bcdc51686911762 1/1 column families, dataSize=24 B heapSize=352 B 2024-12-10T11:00:51,221 DEBUG [pool-60-thread-2 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/testUnflushedSeqIdTracking/991d6ba103d209471bcdc51686911762/.tmp/b/6ba0cb0a3a7d4af1be56e82f7fc6b82f is 28, key is b/b:b/1733828445053/Put/seqid=0 2024-12-10T11:00:51,241 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741830_1006 (size=4945) 2024-12-10T11:00:51,242 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741830_1006 (size=4945) 2024-12-10T11:00:51,242 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741830_1006 (size=4945) 2024-12-10T11:00:51,244 INFO [pool-60-thread-2 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=4 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/testUnflushedSeqIdTracking/991d6ba103d209471bcdc51686911762/.tmp/b/6ba0cb0a3a7d4af1be56e82f7fc6b82f 2024-12-10T11:00:51,338 DEBUG [pool-60-thread-2 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/testUnflushedSeqIdTracking/991d6ba103d209471bcdc51686911762/.tmp/b/6ba0cb0a3a7d4af1be56e82f7fc6b82f as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/testUnflushedSeqIdTracking/991d6ba103d209471bcdc51686911762/b/6ba0cb0a3a7d4af1be56e82f7fc6b82f 2024-12-10T11:00:51,353 INFO [pool-60-thread-2 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/testUnflushedSeqIdTracking/991d6ba103d209471bcdc51686911762/b/6ba0cb0a3a7d4af1be56e82f7fc6b82f, entries=1, sequenceid=4, filesize=4.8 K 2024-12-10T11:00:51,362 INFO [pool-60-thread-2 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 991d6ba103d209471bcdc51686911762 in 3303ms, sequenceid=4, compaction requested=false 2024-12-10T11:00:51,363 DEBUG [pool-60-thread-2 {}] regionserver.HRegion(2603): Flush status journal for 991d6ba103d209471bcdc51686911762: 2024-12-10T11:00:51,363 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(193): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-12-10T11:00:51,364 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(194): Flush succeeded:true 2024-12-10T11:00:51,364 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 991d6ba103d209471bcdc51686911762, disabling compactions & flushes 2024-12-10T11:00:51,364 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testUnflushedSeqIdTracking,,1733828444858.991d6ba103d209471bcdc51686911762. 2024-12-10T11:00:51,364 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testUnflushedSeqIdTracking,,1733828444858.991d6ba103d209471bcdc51686911762. 2024-12-10T11:00:51,365 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testUnflushedSeqIdTracking,,1733828444858.991d6ba103d209471bcdc51686911762. after waiting 0 ms 2024-12-10T11:00:51,365 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testUnflushedSeqIdTracking,,1733828444858.991d6ba103d209471bcdc51686911762. 2024-12-10T11:00:51,367 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testUnflushedSeqIdTracking,,1733828444858.991d6ba103d209471bcdc51686911762. 2024-12-10T11:00:51,367 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 991d6ba103d209471bcdc51686911762: Waiting for close lock at 1733828451364Disabling compacts and flushes for region at 1733828451364Disabling writes for close at 1733828451365 (+1 ms)Writing region close event to WAL at 1733828451367 (+2 ms)Closed at 1733828451367 2024-12-10T11:00:51,368 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,369 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,369 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,369 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,369 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,373 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741828_1004 (size=875) 2024-12-10T11:00:51,373 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741828_1004 (size=875) 2024-12-10T11:00:51,375 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741828_1004 (size=875) 2024-12-10T11:00:51,381 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/oldWALs 2024-12-10T11:00:51,381 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733828444841) 2024-12-10T11:00:51,394 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=177 (was 162) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:33402 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: org.apache.hadoop.hdfs.PeerCache@765de4c3 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.PeerCache.run(PeerCache.java:253) app//org.apache.hadoop.hdfs.PeerCache.access$000(PeerCache.java:46) app//org.apache.hadoop.hdfs.PeerCache$1.run(PeerCache.java:124) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: HBase-Metrics2-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Monitor thread for TaskMonitor java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hbase.monitoring.TaskMonitor$MonitorRunnable.run(TaskMonitor.java:325) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=415 (was 403) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1323 (was 1412), ProcessCount=11 (was 11), AvailableMemoryMB=5298 (was 4600) - AvailableMemoryMB LEAK? - 2024-12-10T11:00:51,404 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALComparator Thread=177, OpenFileDescriptor=415, MaxFileDescriptor=1048576, SystemLoadAverage=1323, ProcessCount=11, AvailableMemoryMB=5297 2024-12-10T11:00:51,428 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741831_1007 (size=7) 2024-12-10T11:00:51,429 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741831_1007 (size=7) 2024-12-10T11:00:51,429 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741831_1007 (size=7) 2024-12-10T11:00:51,433 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:00:51,434 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:51,437 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:51,448 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:00:51,448 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/c6ee3af7-0fb7-cd76-4785-2801b197fa19/testWALComparator, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/c6ee3af7-0fb7-cd76-4785-2801b197fa19/oldWALs, maxLogs=1760 2024-12-10T11:00:51,450 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828451450 2024-12-10T11:00:51,464 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/c6ee3af7-0fb7-cd76-4785-2801b197fa19/testWALComparator/wal.1733828451450 2024-12-10T11:00:51,470 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:36879:36879)] 2024-12-10T11:00:51,471 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(215): Log obtained is: FSHLog wal:(num 1733828451450) 2024-12-10T11:00:51,475 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:00:51,476 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=.meta, logDir=hdfs://localhost:44515/user/jenkins/test-data/c6ee3af7-0fb7-cd76-4785-2801b197fa19/testWALComparator, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/c6ee3af7-0fb7-cd76-4785-2801b197fa19/oldWALs, maxLogs=1760 2024-12-10T11:00:51,478 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828451478.meta 2024-12-10T11:00:51,490 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/c6ee3af7-0fb7-cd76-4785-2801b197fa19/testWALComparator/wal.1733828451478.meta 2024-12-10T11:00:51,497 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:38683:38683)] 2024-12-10T11:00:51,502 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,502 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,503 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,503 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,503 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,507 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741832_1008 (size=93) 2024-12-10T11:00:51,507 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741832_1008 (size=93) 2024-12-10T11:00:51,508 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741832_1008 (size=93) 2024-12-10T11:00:51,513 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/c6ee3af7-0fb7-cd76-4785-2801b197fa19/oldWALs 2024-12-10T11:00:51,513 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733828451450) 2024-12-10T11:00:51,517 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,518 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,518 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,518 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,518 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:51,525 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741833_1009 (size=93) 2024-12-10T11:00:51,525 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741833_1009 (size=93) 2024-12-10T11:00:51,526 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741833_1009 (size=93) 2024-12-10T11:00:51,929 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/c6ee3af7-0fb7-cd76-4785-2801b197fa19/oldWALs 2024-12-10T11:00:51,929 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:.meta(num 1733828451478) 2024-12-10T11:00:51,939 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALComparator Thread=187 (was 177) - Thread LEAK? -, OpenFileDescriptor=419 (was 415) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1323 (was 1323), ProcessCount=11 (was 11), AvailableMemoryMB=5287 (was 5297) 2024-12-10T11:00:51,948 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=187, OpenFileDescriptor=419, MaxFileDescriptor=1048576, SystemLoadAverage=1323, ProcessCount=11, AvailableMemoryMB=5287 2024-12-10T11:00:51,955 WARN [IPC Server handler 3 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-10T11:00:51,956 WARN [IPC Server handler 3 on default port 44515 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-10T11:00:51,956 WARN [IPC Server handler 3 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-10T11:00:51,964 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741834_1010 (size=7) 2024-12-10T11:00:51,964 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741834_1010 (size=7) 2024-12-10T11:00:51,967 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:00:51,967 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:51,969 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:51,972 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(383): testFindMemStoresEligibleForFlush 2024-12-10T11:00:52,005 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:00:52,005 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs, maxLogs=1 2024-12-10T11:00:52,007 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452007 2024-12-10T11:00:52,021 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452007 2024-12-10T11:00:52,027 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:36879:36879)] 2024-12-10T11:00:52,034 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452034 2024-12-10T11:00:52,039 WARN [IPC Server handler 4 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-10T11:00:52,039 WARN [IPC Server handler 4 on default port 44515 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-10T11:00:52,039 WARN [IPC Server handler 4 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-10T11:00:52,049 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,050 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,050 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,050 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,051 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,051 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452007 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452034 2024-12-10T11:00:52,054 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741835_1011 (size=283) 2024-12-10T11:00:52,055 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741835_1011 (size=283) 2024-12-10T11:00:52,055 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741835_1011 (size=283) 2024-12-10T11:00:52,060 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:36879:36879)] 2024-12-10T11:00:52,062 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452062 2024-12-10T11:00:52,065 WARN [IPC Server handler 2 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-10T11:00:52,065 WARN [IPC Server handler 2 on default port 44515 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-10T11:00:52,065 WARN [IPC Server handler 2 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-10T11:00:52,073 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,074 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,074 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,074 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,075 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,075 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452034 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452062 2024-12-10T11:00:52,078 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741836_1012 (size=283) 2024-12-10T11:00:52,078 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741836_1012 (size=283) 2024-12-10T11:00:52,085 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:36879:36879)] 2024-12-10T11:00:52,087 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 137bd165453055538dda0e80dbe1e848[cf1] 2024-12-10T11:00:52,089 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-10T11:00:52,089 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 137bd165453055538dda0e80dbe1e848[cf1] 2024-12-10T11:00:52,091 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 137bd165453055538dda0e80dbe1e848[cf1] 2024-12-10T11:00:52,092 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452091 2024-12-10T11:00:52,102 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,102 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,102 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,103 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,103 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,103 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452062 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452091 2024-12-10T11:00:52,104 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:36879:36879)] 2024-12-10T11:00:52,105 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452062 is not closed yet, will try archiving it next time 2024-12-10T11:00:52,106 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741837_1013 (size=283) 2024-12-10T11:00:52,106 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741837_1013 (size=283) 2024-12-10T11:00:52,109 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-10T11:00:52,110 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452007 to hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs/wal.1733828452007 2024-12-10T11:00:52,110 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452110 2024-12-10T11:00:52,112 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452034 to hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs/wal.1733828452034 2024-12-10T11:00:52,113 WARN [IPC Server handler 0 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-10T11:00:52,113 WARN [IPC Server handler 0 on default port 44515 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-10T11:00:52,113 WARN [IPC Server handler 0 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-10T11:00:52,119 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,119 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,119 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,119 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,120 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,120 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452091 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452110 2024-12-10T11:00:52,123 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:36879:36879)] 2024-12-10T11:00:52,123 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452091 is not closed yet, will try archiving it next time 2024-12-10T11:00:52,123 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452062 to hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs/wal.1733828452062 2024-12-10T11:00:52,124 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-10T11:00:52,126 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741838_1014 (size=93) 2024-12-10T11:00:52,127 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741838_1014 (size=93) 2024-12-10T11:00:52,127 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741838_1014 (size=93) 2024-12-10T11:00:52,128 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452091 to hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs/wal.1733828452091 2024-12-10T11:00:52,230 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452229 2024-12-10T11:00:52,233 WARN [IPC Server handler 4 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-10T11:00:52,233 WARN [IPC Server handler 4 on default port 44515 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-10T11:00:52,234 WARN [IPC Server handler 4 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-10T11:00:52,243 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,243 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,244 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,244 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,244 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,244 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452110 with entries=4, filesize=465 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452229 2024-12-10T11:00:52,247 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:36879:36879)] 2024-12-10T11:00:52,247 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452110 is not closed yet, will try archiving it next time 2024-12-10T11:00:52,248 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-10T11:00:52,248 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741839_1015 (size=473) 2024-12-10T11:00:52,248 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741839_1015 (size=473) 2024-12-10T11:00:52,250 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452249 2024-12-10T11:00:52,259 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,259 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,259 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,259 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,259 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,260 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452229 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452249 2024-12-10T11:00:52,262 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:00:52,263 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452229 is not closed yet, will try archiving it next time 2024-12-10T11:00:52,263 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741840_1016 (size=283) 2024-12-10T11:00:52,263 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 02f588659ff818671b134d171afc90c9[cf1],137bd165453055538dda0e80dbe1e848[cf1] 2024-12-10T11:00:52,263 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 02f588659ff818671b134d171afc90c9[cf1],137bd165453055538dda0e80dbe1e848[cf1] 2024-12-10T11:00:52,263 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452263 2024-12-10T11:00:52,266 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741840_1016 (size=283) 2024-12-10T11:00:52,267 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452110 to hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs/wal.1733828452110 2024-12-10T11:00:52,268 WARN [IPC Server handler 0 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-10T11:00:52,268 WARN [IPC Server handler 0 on default port 44515 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-10T11:00:52,268 WARN [IPC Server handler 0 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-10T11:00:52,269 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452229 to hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs/wal.1733828452229 2024-12-10T11:00:52,276 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,276 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,276 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,276 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,276 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,277 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452249 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452263 2024-12-10T11:00:52,279 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:00:52,279 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452249 is not closed yet, will try archiving it next time 2024-12-10T11:00:52,279 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-10T11:00:52,282 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741841_1017 (size=93) 2024-12-10T11:00:52,282 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741841_1017 (size=93) 2024-12-10T11:00:52,283 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741841_1017 (size=93) 2024-12-10T11:00:52,284 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452249 to hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs/wal.1733828452249 2024-12-10T11:00:52,382 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452382 2024-12-10T11:00:52,391 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,392 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,392 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,392 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,392 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,392 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452263 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452382 2024-12-10T11:00:52,393 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:00:52,393 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452263 is not closed yet, will try archiving it next time 2024-12-10T11:00:52,394 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-10T11:00:52,394 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452394 2024-12-10T11:00:52,395 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741842_1018 (size=283) 2024-12-10T11:00:52,395 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741842_1018 (size=283) 2024-12-10T11:00:52,396 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452263 to hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs/wal.1733828452263 2024-12-10T11:00:52,404 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,404 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,404 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,404 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,405 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,405 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452382 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452394 2024-12-10T11:00:52,407 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741843_1019 (size=93) 2024-12-10T11:00:52,408 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741843_1019 (size=93) 2024-12-10T11:00:52,408 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741843_1019 (size=93) 2024-12-10T11:00:52,409 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452382 to hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs/wal.1733828452382 2024-12-10T11:00:52,414 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:00:52,418 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452418 2024-12-10T11:00:52,427 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,427 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,427 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,427 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,427 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,428 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452394 with entries=6, filesize=709 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452418 2024-12-10T11:00:52,429 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:36879:36879)] 2024-12-10T11:00:52,429 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452394 is not closed yet, will try archiving it next time 2024-12-10T11:00:52,430 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741844_1020 (size=717) 2024-12-10T11:00:52,431 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741844_1020 (size=717) 2024-12-10T11:00:52,431 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741844_1020 (size=717) 2024-12-10T11:00:52,432 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452432 2024-12-10T11:00:52,440 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,441 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,441 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,441 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,441 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,441 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452418 with entries=2, filesize=293 B; new WAL /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452432 2024-12-10T11:00:52,442 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:36879:36879)] 2024-12-10T11:00:52,442 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:44515/user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/testFindMemStoresEligibleForFlush/wal.1733828452418 is not closed yet, will try archiving it next time 2024-12-10T11:00:52,442 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 2577b880214328327677d100e4b1c2ed[cf1,cf3,cf2] 2024-12-10T11:00:52,443 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-12-10T11:00:52,443 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 2577b880214328327677d100e4b1c2ed[cf3,cf2] 2024-12-10T11:00:52,444 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,444 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741845_1021 (size=301) 2024-12-10T11:00:52,444 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,444 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,444 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,444 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741845_1021 (size=301) 2024-12-10T11:00:52,444 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,445 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741845_1021 (size=301) 2024-12-10T11:00:52,447 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741846_1022 (size=93) 2024-12-10T11:00:52,447 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741846_1022 (size=93) 2024-12-10T11:00:52,448 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741846_1022 (size=93) 2024-12-10T11:00:52,454 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 3 WAL file(s) to /user/jenkins/test-data/d866e5c4-c27b-d3d1-f4d0-8f3a5cd92b7f/oldWALs 2024-12-10T11:00:52,454 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733828452432) 2024-12-10T11:00:52,462 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=192 (was 187) - Thread LEAK? -, OpenFileDescriptor=419 (was 419), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1233 (was 1323), ProcessCount=11 (was 11), AvailableMemoryMB=5258 (was 5287) 2024-12-10T11:00:52,469 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=192, OpenFileDescriptor=419, MaxFileDescriptor=1048576, SystemLoadAverage=1233, ProcessCount=11, AvailableMemoryMB=5258 2024-12-10T11:00:52,475 WARN [IPC Server handler 3 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-10T11:00:52,475 WARN [IPC Server handler 3 on default port 44515 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-10T11:00:52,475 WARN [IPC Server handler 3 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-10T11:00:52,480 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741847_1023 (size=7) 2024-12-10T11:00:52,480 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741847_1023 (size=7) 2024-12-10T11:00:52,482 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:00:52,482 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:52,485 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:52,491 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:00:52,491 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/5146de01-9a4e-b647-e6db-807ddccbd9cd/testRollWriterForClosedWAL, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/5146de01-9a4e-b647-e6db-807ddccbd9cd/testRollWriterForClosedWAL, maxLogs=1760 2024-12-10T11:00:52,492 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452492 2024-12-10T11:00:52,495 WARN [IPC Server handler 0 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-10T11:00:52,495 WARN [IPC Server handler 0 on default port 44515 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-10T11:00:52,495 WARN [IPC Server handler 0 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-10T11:00:52,500 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/5146de01-9a4e-b647-e6db-807ddccbd9cd/testRollWriterForClosedWAL/wal.1733828452492 2024-12-10T11:00:52,504 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:00:52,509 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,509 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,510 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,510 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,510 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:52,513 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741848_1024 (size=93) 2024-12-10T11:00:52,514 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741848_1024 (size=93) 2024-12-10T11:00:52,517 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/5146de01-9a4e-b647-e6db-807ddccbd9cd/testRollWriterForClosedWAL 2024-12-10T11:00:52,517 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733828452492) 2024-12-10T11:00:52,528 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=197 (was 192) - Thread LEAK? -, OpenFileDescriptor=419 (was 419), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1233 (was 1233), ProcessCount=11 (was 11), AvailableMemoryMB=5255 (was 5258) 2024-12-10T11:00:52,535 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=197, OpenFileDescriptor=419, MaxFileDescriptor=1048576, SystemLoadAverage=1233, ProcessCount=11, AvailableMemoryMB=5255 2024-12-10T11:00:52,540 WARN [IPC Server handler 4 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-10T11:00:52,541 WARN [IPC Server handler 4 on default port 44515 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-10T11:00:52,541 WARN [IPC Server handler 4 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-10T11:00:52,546 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741849_1025 (size=7) 2024-12-10T11:00:52,547 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741849_1025 (size=7) 2024-12-10T11:00:52,548 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:00:52,548 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:52,550 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:52,559 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:00:52,559 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/testMaxFlushedSequenceIdGoBackwards, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/oldWALs, maxLogs=1760 2024-12-10T11:00:52,561 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828452560 2024-12-10T11:00:52,563 WARN [IPC Server handler 2 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-12-10T11:00:52,563 WARN [IPC Server handler 2 on default port 44515 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-12-10T11:00:52,563 WARN [IPC Server handler 2 on default port 44515 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-12-10T11:00:52,568 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/testMaxFlushedSequenceIdGoBackwards/wal.1733828452560 2024-12-10T11:00:52,569 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:00:52,570 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 89440a2786b9c74de0aba51d1b54ef19, NAME => 'table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82 2024-12-10T11:00:52,581 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741851_1027 (size=40) 2024-12-10T11:00:52,582 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741851_1027 (size=40) 2024-12-10T11:00:52,582 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741851_1027 (size=40) 2024-12-10T11:00:52,583 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-10T11:00:52,585 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:52,587 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 89440a2786b9c74de0aba51d1b54ef19 columnFamilyName a 2024-12-10T11:00:52,587 DEBUG [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-10T11:00:52,588 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] regionserver.HStore(327): Store=89440a2786b9c74de0aba51d1b54ef19/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-10T11:00:52,588 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:52,590 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 89440a2786b9c74de0aba51d1b54ef19 columnFamilyName b 2024-12-10T11:00:52,590 DEBUG [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-10T11:00:52,591 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] regionserver.HStore(327): Store=89440a2786b9c74de0aba51d1b54ef19/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-10T11:00:52,591 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:52,592 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/table/89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:52,593 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/table/89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:52,594 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/a1780653-3903-a852-848b-088fc2258559/data/default/table/89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:52,595 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:52,595 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:52,596 DEBUG [Time-limited test {}] regionserver.FlushLargeStoresPolicy(65): No hbase.hregion.percolumnfamilyflush.size.lower.bound set in table table descriptor;using region.getMemStoreFlushHeapSize/# of families (64.0 M)) instead. 2024-12-10T11:00:52,598 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:52,601 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44515/user/jenkins/test-data/a1780653-3903-a852-848b-088fc2258559/data/default/table/89440a2786b9c74de0aba51d1b54ef19/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-10T11:00:52,602 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 89440a2786b9c74de0aba51d1b54ef19; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=71600104, jitterRate=0.06692469120025635}}}, FlushLargeStoresPolicy{flushSizeLowerBound=67108864} 2024-12-10T11:00:52,605 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 89440a2786b9c74de0aba51d1b54ef19: Writing region info on filesystem at 1733828452583Initializing all the Stores at 1733828452585 (+2 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733828452585Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733828452585Cleaning up temporary data from old regions at 1733828452595 (+10 ms)Region opened successfully at 1733828452604 (+9 ms) 2024-12-10T11:00:52,605 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 89440a2786b9c74de0aba51d1b54ef19, disabling compactions & flushes 2024-12-10T11:00:52,605 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. 2024-12-10T11:00:52,605 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. 2024-12-10T11:00:52,605 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. after waiting 0 ms 2024-12-10T11:00:52,605 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. 2024-12-10T11:00:52,605 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. 2024-12-10T11:00:52,606 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 89440a2786b9c74de0aba51d1b54ef19: Waiting for close lock at 1733828452605Disabling compacts and flushes for region at 1733828452605Disabling writes for close at 1733828452605Writing region close event to WAL at 1733828452605Closed at 1733828452605 2024-12-10T11:00:53,093 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 89440a2786b9c74de0aba51d1b54ef19, NAME => 'table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19.', STARTKEY => '', ENDKEY => ''} 2024-12-10T11:00:53,119 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,120 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-10T11:00:53,123 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,124 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,130 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,132 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 89440a2786b9c74de0aba51d1b54ef19 columnFamilyName a 2024-12-10T11:00:53,132 DEBUG [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-10T11:00:53,133 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] regionserver.HStore(327): Store=89440a2786b9c74de0aba51d1b54ef19/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-10T11:00:53,133 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,135 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 89440a2786b9c74de0aba51d1b54ef19 columnFamilyName b 2024-12-10T11:00:53,135 DEBUG [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-10T11:00:53,136 INFO [StoreOpener-89440a2786b9c74de0aba51d1b54ef19-1 {}] regionserver.HStore(327): Store=89440a2786b9c74de0aba51d1b54ef19/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-10T11:00:53,137 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,138 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/table/89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,139 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/table/89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,141 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/a1780653-3903-a852-848b-088fc2258559/data/default/table/89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,142 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,142 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,146 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,147 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 89440a2786b9c74de0aba51d1b54ef19; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=66004428, jitterRate=-0.01645737886428833}}}, org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL$FlushSpecificStoresPolicy@5fdb1d04 2024-12-10T11:00:53,148 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for 89440a2786b9c74de0aba51d1b54ef19 2024-12-10T11:00:53,152 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 89440a2786b9c74de0aba51d1b54ef19: Running coprocessor pre-open hook at 1733828453125Writing region info on filesystem at 1733828453125Initializing all the Stores at 1733828453127 (+2 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733828453127Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733828453129 (+2 ms)Cleaning up temporary data from old regions at 1733828453143 (+14 ms)Running coprocessor post-open hooks at 1733828453148 (+5 ms)Region opened successfully at 1733828453152 (+4 ms) 2024-12-10T11:00:55,537 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741849_1025 (size=7) 2024-12-10T11:00:56,177 INFO [pool-78-thread-1 {}] regionserver.HRegion(2902): Flushing 89440a2786b9c74de0aba51d1b54ef19 2/2 column families, dataSize=96 B heapSize=896 B 2024-12-10T11:00:58,632 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-12-10T11:00:59,201 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/a/a4ab8fe4a487471a84696f3147c5b6aa is 28, key is a/a:a/1733828453163/Put/seqid=0 2024-12-10T11:00:59,212 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741852_1028 (size=4945) 2024-12-10T11:00:59,212 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741852_1028 (size=4945) 2024-12-10T11:00:59,214 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/a/a4ab8fe4a487471a84696f3147c5b6aa 2024-12-10T11:00:59,214 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741852_1028 (size=4945) 2024-12-10T11:00:59,248 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/b/46c4f1002c8a4e9889491846c36423dc is 28, key is a/b:b/1733828453163/Put/seqid=0 2024-12-10T11:00:59,256 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741853_1029 (size=4945) 2024-12-10T11:00:59,257 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741853_1029 (size=4945) 2024-12-10T11:00:59,257 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741853_1029 (size=4945) 2024-12-10T11:00:59,258 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/b/46c4f1002c8a4e9889491846c36423dc 2024-12-10T11:00:59,268 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/a/a4ab8fe4a487471a84696f3147c5b6aa as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/a/a4ab8fe4a487471a84696f3147c5b6aa 2024-12-10T11:00:59,276 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/a/a4ab8fe4a487471a84696f3147c5b6aa, entries=1, sequenceid=6, filesize=4.8 K 2024-12-10T11:00:59,277 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/b/46c4f1002c8a4e9889491846c36423dc as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/b/46c4f1002c8a4e9889491846c36423dc 2024-12-10T11:00:59,285 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/b/46c4f1002c8a4e9889491846c36423dc, entries=1, sequenceid=6, filesize=4.8 K 2024-12-10T11:00:59,287 INFO [pool-78-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~96 B/96, heapSize ~864 B/864, currentSize=0 B/0 for 89440a2786b9c74de0aba51d1b54ef19 in 3111ms, sequenceid=6, compaction requested=false 2024-12-10T11:00:59,287 DEBUG [pool-78-thread-1 {}] regionserver.HRegion(2603): Flush status journal for 89440a2786b9c74de0aba51d1b54ef19: 2024-12-10T11:00:59,287 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(676): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-12-10T11:00:59,287 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(677): Flush succeeded:true 2024-12-10T11:00:59,293 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 89440a2786b9c74de0aba51d1b54ef19 1/2 column families, dataSize=48 B heapSize=704 B; a={dataSize=24 B, heapSize=352 B, offHeapSize=0 B} 2024-12-10T11:00:59,299 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/a/d7f81179a5564581879e779c3a6b42ce is 28, key is a/a:a/1733828453163/Put/seqid=0 2024-12-10T11:00:59,306 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741854_1030 (size=4945) 2024-12-10T11:00:59,307 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741854_1030 (size=4945) 2024-12-10T11:00:59,307 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741854_1030 (size=4945) 2024-12-10T11:00:59,308 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=10 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/a/d7f81179a5564581879e779c3a6b42ce 2024-12-10T11:00:59,317 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/a/d7f81179a5564581879e779c3a6b42ce as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/a/d7f81179a5564581879e779c3a6b42ce 2024-12-10T11:00:59,326 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/a/d7f81179a5564581879e779c3a6b42ce, entries=1, sequenceid=10, filesize=4.8 K 2024-12-10T11:00:59,327 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=24 B/24 for 89440a2786b9c74de0aba51d1b54ef19 in 34ms, sequenceid=10, compaction requested=false 2024-12-10T11:00:59,327 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for 89440a2786b9c74de0aba51d1b54ef19: 2024-12-10T11:00:59,329 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 89440a2786b9c74de0aba51d1b54ef19, disabling compactions & flushes 2024-12-10T11:00:59,329 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. 2024-12-10T11:00:59,329 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. 2024-12-10T11:00:59,329 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. after waiting 0 ms 2024-12-10T11:00:59,329 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. 2024-12-10T11:00:59,329 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 89440a2786b9c74de0aba51d1b54ef19 2/2 column families, dataSize=24 B heapSize=608 B 2024-12-10T11:00:59,336 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/b/c277373d8db641cb8bed21bdf24a6423 is 28, key is a/b:b/1733828453163/Put/seqid=0 2024-12-10T11:00:59,343 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741855_1031 (size=4945) 2024-12-10T11:00:59,343 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741855_1031 (size=4945) 2024-12-10T11:00:59,344 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741855_1031 (size=4945) 2024-12-10T11:00:59,344 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=13 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/b/c277373d8db641cb8bed21bdf24a6423 2024-12-10T11:00:59,353 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/.tmp/b/c277373d8db641cb8bed21bdf24a6423 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/b/c277373d8db641cb8bed21bdf24a6423 2024-12-10T11:00:59,361 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/89440a2786b9c74de0aba51d1b54ef19/b/c277373d8db641cb8bed21bdf24a6423, entries=1, sequenceid=13, filesize=4.8 K 2024-12-10T11:00:59,362 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 89440a2786b9c74de0aba51d1b54ef19 in 33ms, sequenceid=13, compaction requested=false 2024-12-10T11:00:59,368 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44515/user/jenkins/test-data/a1780653-3903-a852-848b-088fc2258559/data/default/table/89440a2786b9c74de0aba51d1b54ef19/recovered.edits/16.seqid, newMaxSeqId=16, maxSeqId=1 2024-12-10T11:00:59,369 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. 2024-12-10T11:00:59,369 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 89440a2786b9c74de0aba51d1b54ef19: Waiting for close lock at 1733828459329Running coprocessor pre-close hooks at 1733828459329Disabling compacts and flushes for region at 1733828459329Disabling writes for close at 1733828459329Obtaining lock to block concurrent updates at 1733828459329Preparing flush snapshotting stores in 89440a2786b9c74de0aba51d1b54ef19 at 1733828459329Finished memstore snapshotting table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19., syncing WAL and waiting on mvcc, flushsize=dataSize=24, getHeapSize=576, getOffHeapSize=0, getCellsCount=1 at 1733828459330 (+1 ms)Flushing stores of table,,1733828452570.89440a2786b9c74de0aba51d1b54ef19. at 1733828459331 (+1 ms)Flushing 89440a2786b9c74de0aba51d1b54ef19/b: creating writer at 1733828459331Flushing 89440a2786b9c74de0aba51d1b54ef19/b: appending metadata at 1733828459335 (+4 ms)Flushing 89440a2786b9c74de0aba51d1b54ef19/b: closing flushed file at 1733828459335Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@856d03d: reopening flushed file at 1733828459352 (+17 ms)Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 89440a2786b9c74de0aba51d1b54ef19 in 33ms, sequenceid=13, compaction requested=false at 1733828459363 (+11 ms)Writing region close event to WAL at 1733828459364 (+1 ms)Running coprocessor post-close hooks at 1733828459369 (+5 ms)Closed at 1733828459369 2024-12-10T11:00:59,370 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:59,370 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:59,370 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:59,370 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:59,370 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:00:59,372 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741850_1026 (size=2357) 2024-12-10T11:00:59,373 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741850_1026 (size=2357) 2024-12-10T11:00:59,376 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/oldWALs 2024-12-10T11:00:59,376 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733828452560) 2024-12-10T11:00:59,386 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=207 (was 197) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:34316 [Waiting for operation #9] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:55520 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: java.util.concurrent.ThreadPoolExecutor$Worker@3af2172a[State = -1, empty queue] java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:401) java.base@17.0.11/java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:903) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Timer for 'HBase' metrics system java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.util.TimerThread.mainLoop(Timer.java:563) java.base@17.0.11/java.util.TimerThread.run(Timer.java:516) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data6 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data5 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:46350 [Waiting for operation #7] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=423 (was 419) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1158 (was 1233), ProcessCount=11 (was 11), AvailableMemoryMB=5199 (was 5255) 2024-12-10T11:00:59,394 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=207, OpenFileDescriptor=423, MaxFileDescriptor=1048576, SystemLoadAverage=1158, ProcessCount=11, AvailableMemoryMB=5198 2024-12-10T11:00:59,406 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741856_1032 (size=7) 2024-12-10T11:00:59,406 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741856_1032 (size=7) 2024-12-10T11:00:59,406 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741856_1032 (size=7) 2024-12-10T11:00:59,407 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:00:59,408 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:59,410 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:59,420 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(244): No decryptEncryptedDataEncryptionKey method in DFSClient, should be hadoop version with HDFS-12396 java.lang.NoSuchMethodException: org.apache.hadoop.hdfs.DFSClient.decryptEncryptedDataEncryptionKey(org.apache.hadoop.fs.FileEncryptionInfo) at java.lang.Class.getDeclaredMethod(Class.java:2675) ~[?:?] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelperWithoutHDFS12396(FanOutOneBlockAsyncDFSOutputSaslHelper.java:183) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelper(FanOutOneBlockAsyncDFSOutputSaslHelper.java:242) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.(FanOutOneBlockAsyncDFSOutputSaslHelper.java:253) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at java.lang.Class.forName0(Native Method) ~[?:?] at java.lang.Class.forName(Class.java:375) ~[?:?] at org.apache.hadoop.hbase.wal.AsyncFSWALProvider.load(AsyncFSWALProvider.java:150) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.getProviderClass(WALFactory.java:174) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:262) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:214) ~[classes/:?] at org.apache.hadoop.hbase.HBaseTestingUtil.createWal(HBaseTestingUtil.java:2160) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2205) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2169) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFlushSequenceIdIsGreaterThanAllEditsInHFile(AbstractTestFSWAL.java:425) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-10T11:00:59,425 INFO [Time-limited test {}] wal.WALFactory(196): Instantiating WALProvider of type class org.apache.hadoop.hbase.wal.AsyncFSWALProvider 2024-12-10T11:00:59,429 DEBUG [Time-limited test {}] channel.MultithreadEventLoopGroup(44): -Dio.netty.eventLoopThreads: 16 2024-12-10T11:00:59,440 DEBUG [Time-limited test {}] nio.NioEventLoop(110): -Dio.netty.noKeySetOptimization: false 2024-12-10T11:00:59,440 DEBUG [Time-limited test {}] nio.NioEventLoop(111): -Dio.netty.selectorAutoRebuildThreshold: 512 2024-12-10T11:00:59,456 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor defaultMonitorName 2024-12-10T11:00:59,460 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:00:59,460 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=hregion-19077075, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/WALs/hregion-19077075, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/oldWALs, maxLogs=1760 2024-12-10T11:00:59,476 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputHelper(524): When create output stream for /user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/WALs/hregion-19077075/hregion-19077075.1733828459460, exclude list is [], retry=0 2024-12-10T11:00:59,490 DEBUG [Time-limited test {}] channel.DefaultChannelId(84): -Dio.netty.processId: 26149 (auto-detected) 2024-12-10T11:00:59,493 DEBUG [Time-limited test {}] channel.DefaultChannelId(106): -Dio.netty.machineId: 02:42:ac:ff:fe:11:00:02 (auto-detected) 2024-12-10T11:00:59,514 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:39069,DS-75516e36-daad-4b45-b0c0-0db5f534803e,DISK] 2024-12-10T11:00:59,514 DEBUG [AsyncFSWAL-1-3 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:34987,DS-02e043e0-225a-4869-b5f6-49f0d0f9039c,DISK] 2024-12-10T11:00:59,514 DEBUG [AsyncFSWAL-1-2 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:34489,DS-a7373fce-bbbd-4059-870f-74703d77f511,DISK] 2024-12-10T11:00:59,518 DEBUG [AsyncFSWAL-1-3 {}] asyncfs.ProtobufDecoder(117): Hadoop 3.3 and above shades protobuf. 2024-12-10T11:00:59,555 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/WALs/hregion-19077075/hregion-19077075.1733828459460 2024-12-10T11:00:59,555 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new AsyncFSWAL writer with pipeline: [(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:38683:38683)] 2024-12-10T11:00:59,555 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => d957ba752c306037ce8176663a8a5103, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testFlushSequenceIdIsGreaterThanAllEditsInHFile', {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 2024-12-10T11:00:59,566 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741858_1034 (size=82) 2024-12-10T11:00:59,567 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741858_1034 (size=82) 2024-12-10T11:00:59,567 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741858_1034 (size=82) 2024-12-10T11:00:59,568 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-10T11:00:59,570 INFO [StoreOpener-d957ba752c306037ce8176663a8a5103-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,573 INFO [StoreOpener-d957ba752c306037ce8176663a8a5103-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region d957ba752c306037ce8176663a8a5103 columnFamilyName f 2024-12-10T11:00:59,573 DEBUG [StoreOpener-d957ba752c306037ce8176663a8a5103-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-10T11:00:59,574 INFO [StoreOpener-d957ba752c306037ce8176663a8a5103-1 {}] regionserver.HStore(327): Store=d957ba752c306037ce8176663a8a5103/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-10T11:00:59,574 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,575 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,576 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,576 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,577 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,577 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,580 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,583 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-10T11:00:59,584 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened d957ba752c306037ce8176663a8a5103; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=71317705, jitterRate=0.06271661818027496}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-10T11:00:59,588 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for d957ba752c306037ce8176663a8a5103: Writing region info on filesystem at 1733828459569Initializing all the Stores at 1733828459570 (+1 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733828459570Cleaning up temporary data from old regions at 1733828459577 (+7 ms)Region opened successfully at 1733828459588 (+11 ms) 2024-12-10T11:00:59,588 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing d957ba752c306037ce8176663a8a5103, disabling compactions & flushes 2024-12-10T11:00:59,588 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103. 2024-12-10T11:00:59,588 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103. 2024-12-10T11:00:59,589 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103. after waiting 0 ms 2024-12-10T11:00:59,589 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103. 2024-12-10T11:00:59,589 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103. 2024-12-10T11:00:59,589 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for d957ba752c306037ce8176663a8a5103: Waiting for close lock at 1733828459588Disabling compacts and flushes for region at 1733828459588Disabling writes for close at 1733828459589 (+1 ms)Writing region close event to WAL at 1733828459589Closed at 1733828459589 2024-12-10T11:00:59,595 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741857_1033 (size=93) 2024-12-10T11:00:59,596 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741857_1033 (size=93) 2024-12-10T11:00:59,596 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741857_1033 (size=93) 2024-12-10T11:00:59,599 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/oldWALs 2024-12-10T11:00:59,599 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: AsyncFSWAL hregion-19077075:(num 1733828459460) 2024-12-10T11:00:59,602 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:00:59,602 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/testFlushSequenceIdIsGreaterThanAllEditsInHFile, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/testFlushSequenceIdIsGreaterThanAllEditsInHFile, maxLogs=1760 2024-12-10T11:00:59,603 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828459603 2024-12-10T11:00:59,613 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/testFlushSequenceIdIsGreaterThanAllEditsInHFile/wal.1733828459603 2024-12-10T11:00:59,614 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new writer with pipeline: [(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:38683:38683)] 2024-12-10T11:00:59,618 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:00:59,619 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => d957ba752c306037ce8176663a8a5103, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103.', STARTKEY => '', ENDKEY => ''} 2024-12-10T11:00:59,620 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-10T11:00:59,620 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,620 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,622 INFO [StoreOpener-d957ba752c306037ce8176663a8a5103-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,624 INFO [StoreOpener-d957ba752c306037ce8176663a8a5103-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region d957ba752c306037ce8176663a8a5103 columnFamilyName f 2024-12-10T11:00:59,624 DEBUG [StoreOpener-d957ba752c306037ce8176663a8a5103-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-10T11:00:59,625 INFO [StoreOpener-d957ba752c306037ce8176663a8a5103-1 {}] regionserver.HStore(327): Store=d957ba752c306037ce8176663a8a5103/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-10T11:00:59,625 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,626 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,627 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,629 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,629 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,629 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,632 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for d957ba752c306037ce8176663a8a5103 2024-12-10T11:00:59,633 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened d957ba752c306037ce8176663a8a5103; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=73599671, jitterRate=0.09672056138515472}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-10T11:00:59,636 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for d957ba752c306037ce8176663a8a5103: Writing region info on filesystem at 1733828459620Initializing all the Stores at 1733828459622 (+2 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733828459622Cleaning up temporary data from old regions at 1733828459629 (+7 ms)Region opened successfully at 1733828459636 (+7 ms) 2024-12-10T11:00:59,654 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.log.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir so I do NOT create it in target/test-data/ac812df6-73ef-08f5-6f35-f2b2fa2f4b91 2024-12-10T11:00:59,654 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.log.dir property value differs in configuration and system: Configuration=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/../logs while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir Erasing configuration value by system value. 2024-12-10T11:00:59,654 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.tmp.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.tmp.dir so I do NOT create it in target/test-data/ac812df6-73ef-08f5-6f35-f2b2fa2f4b91 2024-12-10T11:00:59,654 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.tmp.dir property value differs in configuration and system: Configuration=/tmp/hadoop-jenkins while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.tmp.dir Erasing configuration value by system value. 2024-12-10T11:00:59,655 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/ac812df6-73ef-08f5-6f35-f2b2fa2f4b91 2024-12-10T11:00:59,683 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing d957ba752c306037ce8176663a8a5103 1/1 column families, dataSize=1.14 KB heapSize=2.13 KB 2024-12-10T11:00:59,783 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:00:59,884 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:00:59,984 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:01:00,085 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:01:00,186 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:01:00,286 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:01:00,387 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:01:00,487 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:01:00,588 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:01:00,688 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:01:00,789 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:01:00,815 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103/.tmp/f/854acc58df3c4a2187cd9c611ac1926d is 121, key is testFlushSequenceIdIsGreaterThanAllEditsInHFile/f:x0/1733828459655/Put/seqid=0 2024-12-10T11:01:00,825 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741860_1036 (size=6333) 2024-12-10T11:01:00,826 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741860_1036 (size=6333) 2024-12-10T11:01:00,826 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741860_1036 (size=6333) 2024-12-10T11:01:00,827 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=1.14 KB at sequenceid=23 (bloomFilter=true), to=hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103/.tmp/f/854acc58df3c4a2187cd9c611ac1926d 2024-12-10T11:01:00,843 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103/.tmp/f/854acc58df3c4a2187cd9c611ac1926d as hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103/f/854acc58df3c4a2187cd9c611ac1926d 2024-12-10T11:01:00,852 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/d957ba752c306037ce8176663a8a5103/f/854acc58df3c4a2187cd9c611ac1926d, entries=10, sequenceid=23, filesize=6.2 K 2024-12-10T11:01:00,953 DEBUG [FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-12-10T11:01:00,958 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~1.14 KB/1170, heapSize ~2.11 KB/2160, currentSize=0 B/0 for d957ba752c306037ce8176663a8a5103 in 1274ms, sequenceid=23, compaction requested=false 2024-12-10T11:01:00,958 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for d957ba752c306037ce8176663a8a5103: 2024-12-10T11:01:00,958 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing d957ba752c306037ce8176663a8a5103, disabling compactions & flushes 2024-12-10T11:01:00,958 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103. 2024-12-10T11:01:00,958 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103. 2024-12-10T11:01:00,958 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103. after waiting 0 ms 2024-12-10T11:01:00,958 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103. 2024-12-10T11:01:00,959 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1733828459412.d957ba752c306037ce8176663a8a5103. 2024-12-10T11:01:00,959 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for d957ba752c306037ce8176663a8a5103: Waiting for close lock at 1733828460958Disabling compacts and flushes for region at 1733828460958Disabling writes for close at 1733828460958Writing region close event to WAL at 1733828460959 (+1 ms)Closed at 1733828460959 2024-12-10T11:01:00,960 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:00,960 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:00,960 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:00,960 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:00,961 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:00,964 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741859_1035 (size=16537) 2024-12-10T11:01:00,964 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741859_1035 (size=16537) 2024-12-10T11:01:00,964 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741859_1035 (size=16537) 2024-12-10T11:01:00,968 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/1235ab8b-969f-b02b-a9d0-135b36765522/testFlushSequenceIdIsGreaterThanAllEditsInHFile 2024-12-10T11:01:00,968 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: wal:(num 1733828459603) 2024-12-10T11:01:00,979 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=216 (was 207) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-626172146_22 at /127.0.0.1:34316 [Waiting for operation #10] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-626172146_22 at /127.0.0.1:55520 [Waiting for operation #4] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:34372 [Cleaning up] java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.close(BlockReceiver.java:1381) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1017) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-1 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-3 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:46350 [Waiting for operation #9] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-2 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=457 (was 423) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1158 (was 1158), ProcessCount=11 (was 11), AvailableMemoryMB=5111 (was 5198) 2024-12-10T11:01:00,988 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=215, OpenFileDescriptor=457, MaxFileDescriptor=1048576, SystemLoadAverage=1158, ProcessCount=11, AvailableMemoryMB=5110 2024-12-10T11:01:01,000 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741861_1037 (size=7) 2024-12-10T11:01:01,001 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741861_1037 (size=7) 2024-12-10T11:01:01,001 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741861_1037 (size=7) 2024-12-10T11:01:01,003 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:01:01,003 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:01:01,004 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:01:01,011 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:01:01,011 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/oldWALs, maxLogs=1760 2024-12-10T11:01:01,012 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828461012 2024-12-10T11:01:01,023 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461012 2024-12-10T11:01:01,032 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:01:01,033 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828461033 2024-12-10T11:01:01,047 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828461034 2024-12-10T11:01:01,052 WARN [Time-limited test {}] wal.AbstractProtobufLogWriter(199): Init output failed, path=hdfs://localhost:44515/user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461034 java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-12-10T11:01:01,054 DEBUG [Time-limited test {}] wal.FSHLogProvider(93): Error instantiating log writer. java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-12-10T11:01:01,067 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=242 (was 215) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:46434 [Receiving block BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:34987, 127.0.0.1:34489] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:55602 [Receiving block BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461033 block BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:34406 [Receiving block BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:55588 [Receiving block BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:34489] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:34400 [Receiving block BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: FSHLog-0-hdfs://localhost:44515/user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134-prefix:default java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:46448 [Receiving block BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461012 block BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:34987] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:39069, 127.0.0.1:34987] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=483 (was 457) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1158 (was 1158), ProcessCount=11 (was 11), AvailableMemoryMB=5101 (was 5110) 2024-12-10T11:01:01,077 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=242, OpenFileDescriptor=483, MaxFileDescriptor=1048576, SystemLoadAverage=1158, ProcessCount=11, AvailableMemoryMB=5100 2024-12-10T11:01:01,096 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741864_1040 (size=7) 2024-12-10T11:01:01,096 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741864_1040 (size=7) 2024-12-10T11:01:01,096 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741864_1040 (size=7) 2024-12-10T11:01:01,098 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:01:01,098 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:01:01,100 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:01:01,105 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:01:01,105 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/c4212bfd-92e6-0470-437b-a29be9d63285/testWALCoprocessorLoaded, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/c4212bfd-92e6-0470-437b-a29be9d63285/oldWALs, maxLogs=1760 2024-12-10T11:01:01,106 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828461106 2024-12-10T11:01:01,138 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/c4212bfd-92e6-0470-437b-a29be9d63285/testWALCoprocessorLoaded/wal.1733828461106 2024-12-10T11:01:01,139 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:01:01,140 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,141 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,141 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,141 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,141 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,146 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741865_1041 (size=93) 2024-12-10T11:01:01,147 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741865_1041 (size=93) 2024-12-10T11:01:01,147 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741865_1041 (size=93) 2024-12-10T11:01:01,150 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/c4212bfd-92e6-0470-437b-a29be9d63285/oldWALs 2024-12-10T11:01:01,150 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733828461106) 2024-12-10T11:01:01,159 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=247 (was 242) - Thread LEAK? -, OpenFileDescriptor=495 (was 483) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1158 (was 1158), ProcessCount=11 (was 11), AvailableMemoryMB=5092 (was 5100) 2024-12-10T11:01:01,169 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=247, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=1158, ProcessCount=11, AvailableMemoryMB=5092 2024-12-10T11:01:01,183 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741866_1042 (size=7) 2024-12-10T11:01:01,183 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741866_1042 (size=7) 2024-12-10T11:01:01,184 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741866_1042 (size=7) 2024-12-10T11:01:01,185 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:01:01,185 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:01:01,187 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:01:01,192 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:01:01,193 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/7a2ff50c-545d-5eb4-8caf-dfb34e2bae3c/testSyncNoAppend, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/7a2ff50c-545d-5eb4-8caf-dfb34e2bae3c/testSyncNoAppend, maxLogs=1760 2024-12-10T11:01:01,193 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828461193 2024-12-10T11:01:01,206 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/7a2ff50c-545d-5eb4-8caf-dfb34e2bae3c/testSyncNoAppend/wal.1733828461193 2024-12-10T11:01:01,208 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:40781:40781)] 2024-12-10T11:01:01,208 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor 2024-12-10T11:01:01,209 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor Metrics about HBase WALObservers 2024-12-10T11:01:01,214 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,214 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,214 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,214 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,215 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,217 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741867_1043 (size=93) 2024-12-10T11:01:01,218 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741867_1043 (size=93) 2024-12-10T11:01:01,218 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741867_1043 (size=93) 2024-12-10T11:01:01,221 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/7a2ff50c-545d-5eb4-8caf-dfb34e2bae3c/testSyncNoAppend 2024-12-10T11:01:01,221 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733828461193) 2024-12-10T11:01:01,232 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=252 (was 247) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1158 (was 1158), ProcessCount=11 (was 11), AvailableMemoryMB=5085 (was 5092) 2024-12-10T11:01:01,241 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=252, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=1158, ProcessCount=11, AvailableMemoryMB=5084 2024-12-10T11:01:01,252 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741868_1044 (size=7) 2024-12-10T11:01:01,252 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741868_1044 (size=7) 2024-12-10T11:01:01,253 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741868_1044 (size=7) 2024-12-10T11:01:01,255 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:01:01,256 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:01:01,258 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:01:01,264 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:01:01,264 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/205439d6-f3a3-2523-dc8e-8bdff6cb8457/testWriteEntryCanBeNull, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/205439d6-f3a3-2523-dc8e-8bdff6cb8457/testWriteEntryCanBeNull, maxLogs=1760 2024-12-10T11:01:01,265 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828461265 2024-12-10T11:01:01,275 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/205439d6-f3a3-2523-dc8e-8bdff6cb8457/testWriteEntryCanBeNull/wal.1733828461265 2024-12-10T11:01:01,280 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:36879:36879),(127.0.0.1/127.0.0.1:38683:38683)] 2024-12-10T11:01:01,284 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,284 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,285 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,285 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,285 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:01,292 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741869_1045 (size=93) 2024-12-10T11:01:01,294 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741869_1045 (size=93) 2024-12-10T11:01:01,294 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741869_1045 (size=93) 2024-12-10T11:01:01,298 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/205439d6-f3a3-2523-dc8e-8bdff6cb8457/testWriteEntryCanBeNull 2024-12-10T11:01:01,298 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733828461265) 2024-12-10T11:01:01,311 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=257 (was 252) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1158 (was 1158), ProcessCount=11 (was 11), AvailableMemoryMB=5079 (was 5084) 2024-12-10T11:01:01,323 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=257, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=1158, ProcessCount=11, AvailableMemoryMB=5078 2024-12-10T11:01:01,336 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741870_1046 (size=7) 2024-12-10T11:01:01,336 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741870_1046 (size=7) 2024-12-10T11:01:01,337 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741870_1046 (size=7) 2024-12-10T11:01:01,338 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041 with version=8 2024-12-10T11:01:01,338 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:01:01,340 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-12-10T11:01:01,348 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-12-10T11:01:01,348 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/testUnflushedSeqIdTrackingWithAsyncWal, archiveDir=hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/oldWALs, maxLogs=1760 2024-12-10T11:01:01,350 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1733828461349 2024-12-10T11:01:01,361 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/testUnflushedSeqIdTrackingWithAsyncWal/wal.1733828461349 2024-12-10T11:01:01,365 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40781:40781),(127.0.0.1/127.0.0.1:38683:38683),(127.0.0.1/127.0.0.1:36879:36879)] 2024-12-10T11:01:01,368 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 91fe88475e024706738eb7f8e7a1c2d7, NAME => 'table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82 2024-12-10T11:01:01,398 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741872_1048 (size=40) 2024-12-10T11:01:01,399 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741872_1048 (size=40) 2024-12-10T11:01:01,400 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741872_1048 (size=40) 2024-12-10T11:01:01,401 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-10T11:01:01,403 INFO [StoreOpener-91fe88475e024706738eb7f8e7a1c2d7-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,406 INFO [StoreOpener-91fe88475e024706738eb7f8e7a1c2d7-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 91fe88475e024706738eb7f8e7a1c2d7 columnFamilyName b 2024-12-10T11:01:01,406 DEBUG [StoreOpener-91fe88475e024706738eb7f8e7a1c2d7-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-10T11:01:01,407 INFO [StoreOpener-91fe88475e024706738eb7f8e7a1c2d7-1 {}] regionserver.HStore(327): Store=91fe88475e024706738eb7f8e7a1c2d7/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-10T11:01:01,409 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,417 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/table/91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,418 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/table/91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,419 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/3bf06d2b-8db4-aae0-05b2-e35a9fe6b632/data/default/table/91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,419 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,420 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,422 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,430 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44515/user/jenkins/test-data/3bf06d2b-8db4-aae0-05b2-e35a9fe6b632/data/default/table/91fe88475e024706738eb7f8e7a1c2d7/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-12-10T11:01:01,431 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 91fe88475e024706738eb7f8e7a1c2d7; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=74344489, jitterRate=0.10781921446323395}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-10T11:01:01,434 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 91fe88475e024706738eb7f8e7a1c2d7: Writing region info on filesystem at 1733828461401Initializing all the Stores at 1733828461403 (+2 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733828461403Cleaning up temporary data from old regions at 1733828461420 (+17 ms)Region opened successfully at 1733828461433 (+13 ms) 2024-12-10T11:01:01,434 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 91fe88475e024706738eb7f8e7a1c2d7, disabling compactions & flushes 2024-12-10T11:01:01,434 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. 2024-12-10T11:01:01,434 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. 2024-12-10T11:01:01,434 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. after waiting 0 ms 2024-12-10T11:01:01,434 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. 2024-12-10T11:01:01,437 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. 2024-12-10T11:01:01,438 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 91fe88475e024706738eb7f8e7a1c2d7: Waiting for close lock at 1733828461434Disabling compacts and flushes for region at 1733828461434Disabling writes for close at 1733828461434Writing region close event to WAL at 1733828461437 (+3 ms)Closed at 1733828461437 2024-12-10T11:01:01,439 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 91fe88475e024706738eb7f8e7a1c2d7, NAME => 'table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7.', STARTKEY => '', ENDKEY => ''} 2024-12-10T11:01:01,440 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,440 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-12-10T11:01:01,440 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,441 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,450 INFO [StoreOpener-91fe88475e024706738eb7f8e7a1c2d7-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,452 INFO [StoreOpener-91fe88475e024706738eb7f8e7a1c2d7-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 91fe88475e024706738eb7f8e7a1c2d7 columnFamilyName b 2024-12-10T11:01:01,452 DEBUG [StoreOpener-91fe88475e024706738eb7f8e7a1c2d7-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-12-10T11:01:01,453 INFO [StoreOpener-91fe88475e024706738eb7f8e7a1c2d7-1 {}] regionserver.HStore(327): Store=91fe88475e024706738eb7f8e7a1c2d7/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-12-10T11:01:01,453 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,454 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/table/91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,454 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/data/default/table/91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,456 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:44515/user/jenkins/test-data/3bf06d2b-8db4-aae0-05b2-e35a9fe6b632/data/default/table/91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,457 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,457 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,460 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,461 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 91fe88475e024706738eb7f8e7a1c2d7; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=61437518, jitterRate=-0.08450964093208313}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-12-10T11:01:01,461 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for 91fe88475e024706738eb7f8e7a1c2d7 2024-12-10T11:01:01,463 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 91fe88475e024706738eb7f8e7a1c2d7: Running coprocessor pre-open hook at 1733828461441Writing region info on filesystem at 1733828461441Initializing all the Stores at 1733828461444 (+3 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1733828461444Cleaning up temporary data from old regions at 1733828461457 (+13 ms)Running coprocessor post-open hooks at 1733828461461 (+4 ms)Region opened successfully at 1733828461463 (+2 ms) 2024-12-10T11:01:04,471 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1722): Closing 91fe88475e024706738eb7f8e7a1c2d7, disabling compactions & flushes 2024-12-10T11:01:04,471 INFO [pool-106-thread-1 {}] regionserver.HRegion(1755): Closing region table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. 2024-12-10T11:01:04,471 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. 2024-12-10T11:01:04,471 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1843): Acquired close lock on table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. after waiting 0 ms 2024-12-10T11:01:04,471 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1853): Updates disabled for region table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. 2024-12-10T11:01:04,472 INFO [pool-106-thread-1 {}] regionserver.HRegion(2902): Flushing 91fe88475e024706738eb7f8e7a1c2d7 1/1 column families, dataSize=48 B heapSize=448 B 2024-12-10T11:01:04,872 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-12-10T11:01:07,492 DEBUG [pool-106-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/91fe88475e024706738eb7f8e7a1c2d7/.tmp/b/6fea4430e3cb452c85d2c7564130a2cb is 28, key is b/b:b/1733828461466/Put/seqid=0 2024-12-10T11:01:07,502 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741873_1049 (size=4945) 2024-12-10T11:01:07,503 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741873_1049 (size=4945) 2024-12-10T11:01:07,503 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741873_1049 (size=4945) 2024-12-10T11:01:07,504 INFO [pool-106-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/91fe88475e024706738eb7f8e7a1c2d7/.tmp/b/6fea4430e3cb452c85d2c7564130a2cb 2024-12-10T11:01:07,516 DEBUG [pool-106-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/91fe88475e024706738eb7f8e7a1c2d7/.tmp/b/6fea4430e3cb452c85d2c7564130a2cb as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/91fe88475e024706738eb7f8e7a1c2d7/b/6fea4430e3cb452c85d2c7564130a2cb 2024-12-10T11:01:07,529 INFO [pool-106-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/91fe88475e024706738eb7f8e7a1c2d7/b/6fea4430e3cb452c85d2c7564130a2cb, entries=1, sequenceid=6, filesize=4.8 K 2024-12-10T11:01:07,531 INFO [pool-106-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 91fe88475e024706738eb7f8e7a1c2d7 in 3059ms, sequenceid=6, compaction requested=false 2024-12-10T11:01:07,541 DEBUG [pool-106-thread-1 {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:44515/user/jenkins/test-data/3bf06d2b-8db4-aae0-05b2-e35a9fe6b632/data/default/table/91fe88475e024706738eb7f8e7a1c2d7/recovered.edits/9.seqid, newMaxSeqId=9, maxSeqId=1 2024-12-10T11:01:07,542 INFO [pool-106-thread-1 {}] regionserver.HRegion(1973): Closed table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. 2024-12-10T11:01:07,542 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1676): Region close journal for 91fe88475e024706738eb7f8e7a1c2d7: Waiting for close lock at 1733828464471Running coprocessor pre-close hooks at 1733828464471Disabling compacts and flushes for region at 1733828464471Disabling writes for close at 1733828464471Obtaining lock to block concurrent updates at 1733828464472 (+1 ms)Preparing flush snapshotting stores in 91fe88475e024706738eb7f8e7a1c2d7 at 1733828464472Finished memstore snapshotting table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7., syncing WAL and waiting on mvcc, flushsize=dataSize=48, getHeapSize=432, getOffHeapSize=0, getCellsCount=2 at 1733828464473 (+1 ms)Flushing stores of table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. at 1733828467473 (+3000 ms)Flushing 91fe88475e024706738eb7f8e7a1c2d7/b: creating writer at 1733828467473Flushing 91fe88475e024706738eb7f8e7a1c2d7/b: appending metadata at 1733828467491 (+18 ms)Flushing 91fe88475e024706738eb7f8e7a1c2d7/b: closing flushed file at 1733828467491Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@5b4ce435: reopening flushed file at 1733828467514 (+23 ms)Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 91fe88475e024706738eb7f8e7a1c2d7 in 3059ms, sequenceid=6, compaction requested=false at 1733828467531 (+17 ms)Writing region close event to WAL at 1733828467532 (+1 ms)Running coprocessor post-close hooks at 1733828467542 (+10 ms)Closed at 1733828467542 2024-12-10T11:01:07,543 INFO [pool-106-thread-1 {}] wal.AbstractTestFSWAL(620): Close result:{[B@291f7180=[/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/data/default/table/91fe88475e024706738eb7f8e7a1c2d7/b/6fea4430e3cb452c85d2c7564130a2cb]} 2024-12-10T11:01:07,543 WARN [Time-limited test {}] regionserver.HRegion(1707): Region table,,1733828461368.91fe88475e024706738eb7f8e7a1c2d7. already closed 2024-12-10T11:01:07,543 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 91fe88475e024706738eb7f8e7a1c2d7: Waiting for close lock at 1733828467543 2024-12-10T11:01:07,544 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:07,545 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:07,545 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:07,545 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:07,545 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-12-10T11:01:07,548 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:39069 is added to blk_1073741871_1047 (size=1206) 2024-12-10T11:01:07,549 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34987 is added to blk_1073741871_1047 (size=1206) 2024-12-10T11:01:07,549 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34489 is added to blk_1073741871_1047 (size=1206) 2024-12-10T11:01:07,552 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/d7e4466e-38cd-04a0-4d83-1cdb000f3041/oldWALs 2024-12-10T11:01:07,553 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1733828461349) 2024-12-10T11:01:07,566 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=261 (was 257) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:39280 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:42238 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=503 (was 495) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=1041 (was 1158), ProcessCount=11 (was 11), AvailableMemoryMB=4659 (was 5078) 2024-12-10T11:01:07,566 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1019): Shutting down minicluster 2024-12-10T11:01:07,568 WARN [PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039, type=LAST_IN_PIPELINE {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-622500072-172.17.0.2-1733828441734:1073741863 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-10T11:01:07,569 WARN [ResponseProcessor for block BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039 java.io.IOException: Bad response ERROR for BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039 from datanode DatanodeInfoWithStorage[127.0.0.1:34987,DS-02e043e0-225a-4869-b5f6-49f0d0f9039c,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-10T11:01:07,569 WARN [DataStreamer for file /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461033 block BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039 {}] hdfs.DataStreamer(1731): Error Recovery for BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039 in pipeline [DatanodeInfoWithStorage[127.0.0.1:34489,DS-a7373fce-bbbd-4059-870f-74703d77f511,DISK], DatanodeInfoWithStorage[127.0.0.1:39069,DS-75516e36-daad-4b45-b0c0-0db5f534803e,DISK], DatanodeInfoWithStorage[127.0.0.1:34987,DS-02e043e0-225a-4869-b5f6-49f0d0f9039c,DISK]]: datanode 2(DatanodeInfoWithStorage[127.0.0.1:34987,DS-02e043e0-225a-4869-b5f6-49f0d0f9039c,DISK]) is bad. 2024-12-10T11:01:07,569 WARN [PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:39069, 127.0.0.1:34987] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-10T11:01:07,573 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:46448 [Receiving block BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039] {}] datanode.BlockReceiver(316): Block 1073741863 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-12-10T11:01:07,573 WARN [PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:34987] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-10T11:01:07,574 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:55602 [Receiving block BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039] {}] datanode.BlockReceiver(316): Block 1073741863 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-12-10T11:01:07,577 WARN [DataStreamer for file /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461033 block BP-622500072-172.17.0.2-1733828441734:blk_1073741863_1039 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461033 (inode 16549) Holder DFSClient_NONMAPREDUCE_-980493151_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-10T11:01:07,579 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461033 with renewLeaseKey: DEFAULT_16549 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461033 (inode 16549) Holder DFSClient_NONMAPREDUCE_-980493151_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-10T11:01:07,586 WARN [PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:34489] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-622500072-172.17.0.2-1733828441734:1073741862 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-10T11:01:07,587 WARN [ResponseProcessor for block BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038 java.io.IOException: Bad response ERROR for BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038 from datanode DatanodeInfoWithStorage[127.0.0.1:34987,DS-02e043e0-225a-4869-b5f6-49f0d0f9039c,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-10T11:01:07,587 WARN [DataStreamer for file /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461012 block BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038 {}] hdfs.DataStreamer(1731): Error Recovery for BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038 in pipeline [DatanodeInfoWithStorage[127.0.0.1:39069,DS-75516e36-daad-4b45-b0c0-0db5f534803e,DISK], DatanodeInfoWithStorage[127.0.0.1:34987,DS-02e043e0-225a-4869-b5f6-49f0d0f9039c,DISK], DatanodeInfoWithStorage[127.0.0.1:34489,DS-a7373fce-bbbd-4059-870f-74703d77f511,DISK]]: datanode 1(DatanodeInfoWithStorage[127.0.0.1:34987,DS-02e043e0-225a-4869-b5f6-49f0d0f9039c,DISK]) is bad. 2024-12-10T11:01:07,587 WARN [PacketResponder: BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:34987, 127.0.0.1:34489] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-12-10T11:01:07,588 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_-980493151_22 at /127.0.0.1:55588 [Receiving block BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038] {}] datanode.BlockReceiver(316): Block 1073741862 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-12-10T11:01:07,590 WARN [DataStreamer for file /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461012 block BP-622500072-172.17.0.2-1733828441734:blk_1073741862_1038 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461012 (inode 16548) Holder DFSClient_NONMAPREDUCE_-980493151_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-10T11:01:07,591 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461012 with renewLeaseKey: DEFAULT_16548 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/936c8d64-4cdd-076b-f941-5a82aa920134/testFailedToCreateWALIfParentRenamed/wal.1733828461012 (inode 16548) Holder DFSClient_NONMAPREDUCE_-980493151_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-12-10T11:01:07,596 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@4932d2e3{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-10T11:01:07,599 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@6613ef7b{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-10T11:01:07,599 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-10T11:01:07,600 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@71ef7580{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-10T11:01:07,600 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@56b1d6f5{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir/,STOPPED} 2024-12-10T11:01:07,606 WARN [BP-622500072-172.17.0.2-1733828441734 heartbeating to localhost/127.0.0.1:44515 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-10T11:01:07,606 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-10T11:01:07,606 WARN [BP-622500072-172.17.0.2-1733828441734 heartbeating to localhost/127.0.0.1:44515 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-622500072-172.17.0.2-1733828441734 (Datanode Uuid c1c9f96c-5c9e-4cb6-a75f-4a166c4b5d88) service to localhost/127.0.0.1:44515 2024-12-10T11:01:07,606 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-10T11:01:07,608 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data5/current/BP-622500072-172.17.0.2-1733828441734 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-10T11:01:07,608 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data6/current/BP-622500072-172.17.0.2-1733828441734 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-10T11:01:07,608 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-10T11:01:07,614 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@478a8ea{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-10T11:01:07,614 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@8a88f04{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-10T11:01:07,614 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-10T11:01:07,614 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@6949af4f{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-10T11:01:07,615 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@58722aa{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir/,STOPPED} 2024-12-10T11:01:07,617 WARN [BP-622500072-172.17.0.2-1733828441734 heartbeating to localhost/127.0.0.1:44515 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-10T11:01:07,617 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-10T11:01:07,617 WARN [BP-622500072-172.17.0.2-1733828441734 heartbeating to localhost/127.0.0.1:44515 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-622500072-172.17.0.2-1733828441734 (Datanode Uuid f4751df2-89e4-4cad-9294-95ab54da39f2) service to localhost/127.0.0.1:44515 2024-12-10T11:01:07,617 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-10T11:01:07,617 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data3/current/BP-622500072-172.17.0.2-1733828441734 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-10T11:01:07,618 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data4/current/BP-622500072-172.17.0.2-1733828441734 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-10T11:01:07,618 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-10T11:01:07,622 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@16bc1793{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-12-10T11:01:07,622 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@292b61eb{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-10T11:01:07,622 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-10T11:01:07,622 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@d548ecc{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-10T11:01:07,623 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@4d1f1349{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir/,STOPPED} 2024-12-10T11:01:07,624 WARN [BP-622500072-172.17.0.2-1733828441734 heartbeating to localhost/127.0.0.1:44515 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-12-10T11:01:07,624 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-12-10T11:01:07,624 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-12-10T11:01:07,624 WARN [BP-622500072-172.17.0.2-1733828441734 heartbeating to localhost/127.0.0.1:44515 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-622500072-172.17.0.2-1733828441734 (Datanode Uuid bdf59f75-0b7a-4ef6-95c4-e5947fef9ef8) service to localhost/127.0.0.1:44515 2024-12-10T11:01:07,625 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data1/current/BP-622500072-172.17.0.2-1733828441734 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-10T11:01:07,625 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/cluster_811f9d48-1fbd-6977-8009-5208f6069a70/data/data2/current/BP-622500072-172.17.0.2-1733828441734 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-12-10T11:01:07,625 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-12-10T11:01:07,636 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@23572bf7{hdfs,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-12-10T11:01:07,637 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@69a5d44d{HTTP/1.1, (http/1.1)}{localhost:0} 2024-12-10T11:01:07,637 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-12-10T11:01:07,637 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@7d70b283{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-12-10T11:01:07,637 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@5ea63753{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/b7a52b8c-4e27-89db-865b-861521892b82/hadoop.log.dir/,STOPPED} 2024-12-10T11:01:07,682 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1026): Minicluster is down