2024-11-20 13:55:51,768 main DEBUG Apache Log4j Core 2.17.2 initializing configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-11-20 13:55:51,779 main DEBUG Took 0.009315 seconds to load 1 plugins from package org.apache.hadoop.hbase.logging 2024-11-20 13:55:51,779 main DEBUG PluginManager 'Core' found 129 plugins 2024-11-20 13:55:51,780 main DEBUG PluginManager 'Level' found 0 plugins 2024-11-20 13:55:51,781 main DEBUG PluginManager 'Lookup' found 16 plugins 2024-11-20 13:55:51,782 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,789 main DEBUG PluginManager 'TypeConverter' found 26 plugins 2024-11-20 13:55:51,802 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.util.MBeans", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,804 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,806 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.logging.TestJul2Slf4j", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,806 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,807 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.zookeeper", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,808 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,809 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,809 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,810 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,811 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,812 main DEBUG LoggerConfig$Builder(additivity="false", level="WARN", levelAndRefs="null", name="org.apache.directory", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,812 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,813 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.ipc.FailedServers", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,814 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,815 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsConfig", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,815 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,816 main DEBUG LoggerConfig$Builder(additivity="null", level="INFO", levelAndRefs="null", name="org.apache.hadoop.hbase.ScheduledChore", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,816 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,817 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.regionserver.RSRpcServices", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,817 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,818 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,818 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,819 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,819 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-20 13:55:51,820 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hbase.thirdparty.io.netty.channel", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,820 main DEBUG Building Plugin[name=root, class=org.apache.logging.log4j.core.config.LoggerConfig$RootLogger]. 2024-11-20 13:55:51,822 main DEBUG LoggerConfig$RootLogger$Builder(additivity="null", level="null", levelAndRefs="INFO,Console", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-20 13:55:51,824 main DEBUG Building Plugin[name=loggers, class=org.apache.logging.log4j.core.config.LoggersPlugin]. 2024-11-20 13:55:51,827 main DEBUG createLoggers(={org.apache.hadoop.metrics2.util.MBeans, org.apache.hadoop.hbase.logging.TestJul2Slf4j, org.apache.zookeeper, org.apache.hadoop.metrics2.impl.MetricsSinkAdapter, org.apache.hadoop.metrics2.impl.MetricsSystemImpl, org.apache.directory, org.apache.hadoop.hbase.ipc.FailedServers, org.apache.hadoop.metrics2.impl.MetricsConfig, org.apache.hadoop.hbase.ScheduledChore, org.apache.hadoop.hbase.regionserver.RSRpcServices, org.apache.hadoop, org.apache.hadoop.hbase, org.apache.hbase.thirdparty.io.netty.channel, root}) 2024-11-20 13:55:51,828 main DEBUG Building Plugin[name=layout, class=org.apache.logging.log4j.core.layout.PatternLayout]. 2024-11-20 13:55:51,830 main DEBUG PatternLayout$Builder(pattern="%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n", PatternSelector=null, Configuration(PropertiesConfig), Replace=null, charset="null", alwaysWriteExceptions="null", disableAnsi="null", noConsoleNoAnsi="null", header="null", footer="null") 2024-11-20 13:55:51,830 main DEBUG PluginManager 'Converter' found 47 plugins 2024-11-20 13:55:51,842 main DEBUG Building Plugin[name=appender, class=org.apache.hadoop.hbase.logging.HBaseTestAppender]. 2024-11-20 13:55:51,845 main DEBUG HBaseTestAppender$Builder(target="SYSTEM_ERR", maxSize="1G", bufferedIo="null", bufferSize="null", immediateFlush="null", ignoreExceptions="null", PatternLayout(%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n), name="Console", Configuration(PropertiesConfig), Filter=null, ={}) 2024-11-20 13:55:51,847 main DEBUG Starting HBaseTestOutputStreamManager SYSTEM_ERR 2024-11-20 13:55:51,848 main DEBUG Building Plugin[name=appenders, class=org.apache.logging.log4j.core.config.AppendersPlugin]. 2024-11-20 13:55:51,848 main DEBUG createAppenders(={Console}) 2024-11-20 13:55:51,850 main DEBUG Configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f initialized 2024-11-20 13:55:51,850 main DEBUG Starting configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-11-20 13:55:51,850 main DEBUG Started configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f OK. 2024-11-20 13:55:51,851 main DEBUG Shutting down OutputStreamManager SYSTEM_OUT.false.false-1 2024-11-20 13:55:51,852 main DEBUG OutputStream closed 2024-11-20 13:55:51,852 main DEBUG Shut down OutputStreamManager SYSTEM_OUT.false.false-1, all resources released: true 2024-11-20 13:55:51,852 main DEBUG Appender DefaultConsole-1 stopped with status true 2024-11-20 13:55:51,853 main DEBUG Stopped org.apache.logging.log4j.core.config.DefaultConfiguration@61001b64 OK 2024-11-20 13:55:51,926 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6 2024-11-20 13:55:51,928 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=StatusLogger 2024-11-20 13:55:51,929 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=ContextSelector 2024-11-20 13:55:51,930 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name= 2024-11-20 13:55:51,931 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.directory 2024-11-20 13:55:51,931 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSinkAdapter 2024-11-20 13:55:51,932 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.zookeeper 2024-11-20 13:55:51,932 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.logging.TestJul2Slf4j 2024-11-20 13:55:51,932 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSystemImpl 2024-11-20 13:55:51,933 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.util.MBeans 2024-11-20 13:55:51,933 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase 2024-11-20 13:55:51,933 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop 2024-11-20 13:55:51,934 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ipc.FailedServers 2024-11-20 13:55:51,934 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.regionserver.RSRpcServices 2024-11-20 13:55:51,934 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsConfig 2024-11-20 13:55:51,935 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hbase.thirdparty.io.netty.channel 2024-11-20 13:55:51,935 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ScheduledChore 2024-11-20 13:55:51,936 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Appenders,name=Console 2024-11-20 13:55:51,938 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-11-20 13:55:51,939 main DEBUG Reconfiguration complete for context[name=1dbd16a6] at URI jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-logging/target/hbase-logging-4.0.0-alpha-1-SNAPSHOT-tests.jar!/log4j2.properties (org.apache.logging.log4j.core.LoggerContext@40db2a24) with optional ClassLoader: null 2024-11-20 13:55:51,939 main DEBUG Shutdown hook enabled. Registering a new one. 2024-11-20 13:55:51,940 main DEBUG LoggerContext[name=1dbd16a6, org.apache.logging.log4j.core.LoggerContext@40db2a24] started OK. 2024-11-20T13:55:52,174 DEBUG [main {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b 2024-11-20 13:55:52,177 main DEBUG AsyncLogger.ThreadNameStrategy=UNCACHED (user specified null, default is UNCACHED) 2024-11-20 13:55:52,177 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-11-20T13:55:52,186 INFO [main {}] hbase.HBaseClassTestRule(94): Test class org.apache.hadoop.hbase.regionserver.wal.TestFSHLog timeout: 13 mins 2024-11-20T13:55:52,210 INFO [Time-limited test {}] hbase.HBaseZKTestingUtil(84): Created new mini-cluster data directory: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6, deleteOnExit=true 2024-11-20T13:55:52,211 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting test.cache.data to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/test.cache.data in system properties and HBase conf 2024-11-20T13:55:52,211 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.tmp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.tmp.dir in system properties and HBase conf 2024-11-20T13:55:52,212 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.log.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir in system properties and HBase conf 2024-11-20T13:55:52,213 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.local.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/mapreduce.cluster.local.dir in system properties and HBase conf 2024-11-20T13:55:52,214 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.temp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/mapreduce.cluster.temp.dir in system properties and HBase conf 2024-11-20T13:55:52,214 INFO [Time-limited test {}] hbase.HBaseTestingUtil(738): read short circuit is OFF 2024-11-20T13:55:52,298 WARN [Time-limited test {}] util.NativeCodeLoader(60): Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2024-11-20T13:55:52,384 DEBUG [Time-limited test {}] fs.HFileSystem(310): The file system is not a DistributedFileSystem. Skipping on block location reordering 2024-11-20T13:55:52,388 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-labels.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/yarn.node-labels.fs-store.root-dir in system properties and HBase conf 2024-11-20T13:55:52,389 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-attribute.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/yarn.node-attribute.fs-store.root-dir in system properties and HBase conf 2024-11-20T13:55:52,389 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.log-dirs to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/yarn.nodemanager.log-dirs in system properties and HBase conf 2024-11-20T13:55:52,390 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-11-20T13:55:52,390 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.active-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/yarn.timeline-service.entity-group-fs-store.active-dir in system properties and HBase conf 2024-11-20T13:55:52,391 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.done-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/yarn.timeline-service.entity-group-fs-store.done-dir in system properties and HBase conf 2024-11-20T13:55:52,391 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-11-20T13:55:52,392 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/dfs.journalnode.edits.dir in system properties and HBase conf 2024-11-20T13:55:52,392 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.datanode.shared.file.descriptor.paths to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/dfs.datanode.shared.file.descriptor.paths in system properties and HBase conf 2024-11-20T13:55:52,393 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting nfs.dump.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/nfs.dump.dir in system properties and HBase conf 2024-11-20T13:55:52,393 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting java.io.tmpdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/java.io.tmpdir in system properties and HBase conf 2024-11-20T13:55:52,394 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/dfs.journalnode.edits.dir in system properties and HBase conf 2024-11-20T13:55:52,394 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.provided.aliasmap.inmemory.leveldb.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/dfs.provided.aliasmap.inmemory.leveldb.dir in system properties and HBase conf 2024-11-20T13:55:52,395 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting fs.s3a.committer.staging.tmp.path to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/fs.s3a.committer.staging.tmp.path in system properties and HBase conf 2024-11-20T13:55:52,887 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-11-20T13:55:53,509 WARN [Time-limited test {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 2024-11-20T13:55:53,590 INFO [Time-limited test {}] log.Log(170): Logging initialized @2616ms to org.eclipse.jetty.util.log.Slf4jLog 2024-11-20T13:55:53,666 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-20T13:55:53,731 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-20T13:55:53,757 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-20T13:55:53,758 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-20T13:55:53,760 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-11-20T13:55:53,779 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-20T13:55:53,783 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@3cb77234{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir/,AVAILABLE} 2024-11-20T13:55:53,784 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@79c59de9{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-20T13:55:53,983 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@637d0fb3{hdfs,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/java.io.tmpdir/jetty-localhost-40581-hadoop-hdfs-3_4_1-tests_jar-_-any-15172055896619528445/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-11-20T13:55:53,990 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@29b30f4f{HTTP/1.1, (http/1.1)}{localhost:40581} 2024-11-20T13:55:53,991 INFO [Time-limited test {}] server.Server(415): Started @3018ms 2024-11-20T13:55:54,025 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-11-20T13:55:54,592 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-20T13:55:54,601 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-20T13:55:54,602 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-20T13:55:54,602 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-20T13:55:54,602 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-11-20T13:55:54,603 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@4b6a27ec{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir/,AVAILABLE} 2024-11-20T13:55:54,604 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@6071838{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-20T13:55:54,704 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@6cb5542{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/java.io.tmpdir/jetty-localhost-34637-hadoop-hdfs-3_4_1-tests_jar-_-any-14196300855597491969/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-20T13:55:54,705 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@5ab539e8{HTTP/1.1, (http/1.1)}{localhost:34637} 2024-11-20T13:55:54,705 INFO [Time-limited test {}] server.Server(415): Started @3732ms 2024-11-20T13:55:54,752 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-11-20T13:55:54,860 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-20T13:55:54,867 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-20T13:55:54,869 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-20T13:55:54,869 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-20T13:55:54,869 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-11-20T13:55:54,870 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@30b1edcb{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir/,AVAILABLE} 2024-11-20T13:55:54,871 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@29f30d89{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-20T13:55:54,973 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@62fe08ec{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/java.io.tmpdir/jetty-localhost-42589-hadoop-hdfs-3_4_1-tests_jar-_-any-12682832507846129656/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-20T13:55:54,974 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@328d1637{HTTP/1.1, (http/1.1)}{localhost:42589} 2024-11-20T13:55:54,974 INFO [Time-limited test {}] server.Server(415): Started @4001ms 2024-11-20T13:55:54,977 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-11-20T13:55:55,019 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-20T13:55:55,024 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-20T13:55:55,026 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-20T13:55:55,026 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-20T13:55:55,026 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-11-20T13:55:55,027 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@7fb8704f{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir/,AVAILABLE} 2024-11-20T13:55:55,028 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@7585798d{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-20T13:55:55,128 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@54549f71{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/java.io.tmpdir/jetty-localhost-46581-hadoop-hdfs-3_4_1-tests_jar-_-any-6974035828665059202/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-20T13:55:55,129 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@6184a766{HTTP/1.1, (http/1.1)}{localhost:46581} 2024-11-20T13:55:55,129 INFO [Time-limited test {}] server.Server(415): Started @4156ms 2024-11-20T13:55:55,131 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-11-20T13:55:56,550 WARN [Thread-123 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data2/current/BP-806677084-172.17.0.2-1732110952974/current, will proceed with Du for space computation calculation, 2024-11-20T13:55:56,550 WARN [Thread-122 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data1/current/BP-806677084-172.17.0.2-1732110952974/current, will proceed with Du for space computation calculation, 2024-11-20T13:55:56,550 WARN [Thread-124 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data3/current/BP-806677084-172.17.0.2-1732110952974/current, will proceed with Du for space computation calculation, 2024-11-20T13:55:56,550 WARN [Thread-125 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data4/current/BP-806677084-172.17.0.2-1732110952974/current, will proceed with Du for space computation calculation, 2024-11-20T13:55:56,587 WARN [Thread-136 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data5/current/BP-806677084-172.17.0.2-1732110952974/current, will proceed with Du for space computation calculation, 2024-11-20T13:55:56,589 WARN [Thread-137 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data6/current/BP-806677084-172.17.0.2-1732110952974/current, will proceed with Du for space computation calculation, 2024-11-20T13:55:56,596 WARN [Thread-58 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-11-20T13:55:56,596 WARN [Thread-81 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-11-20T13:55:56,630 WARN [Thread-103 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-11-20T13:55:56,649 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x3c300b6bdb59461b with lease ID 0x853321cc01d5c223: Processing first storage report for DS-462795d0-a958-41d2-8374-ee6bc979008e from datanode DatanodeRegistration(127.0.0.1:42649, datanodeUuid=6829e71d-af75-4b26-9068-535bad204d0c, infoPort=40789, infoSecurePort=0, ipcPort=32857, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974) 2024-11-20T13:55:56,651 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x3c300b6bdb59461b with lease ID 0x853321cc01d5c223: from storage DS-462795d0-a958-41d2-8374-ee6bc979008e node DatanodeRegistration(127.0.0.1:42649, datanodeUuid=6829e71d-af75-4b26-9068-535bad204d0c, infoPort=40789, infoSecurePort=0, ipcPort=32857, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974), blocks: 0, hasStaleStorage: true, processing time: 2 msecs, invalidatedBlocks: 0 2024-11-20T13:55:56,651 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xb2ecb628cb8424cd with lease ID 0x853321cc01d5c222: Processing first storage report for DS-e39fe82d-74f1-44bb-a0fe-1fce0214d083 from datanode DatanodeRegistration(127.0.0.1:40355, datanodeUuid=b8af1ed9-b83f-4048-b76f-83a997761cb7, infoPort=37605, infoSecurePort=0, ipcPort=40125, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974) 2024-11-20T13:55:56,651 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xb2ecb628cb8424cd with lease ID 0x853321cc01d5c222: from storage DS-e39fe82d-74f1-44bb-a0fe-1fce0214d083 node DatanodeRegistration(127.0.0.1:40355, datanodeUuid=b8af1ed9-b83f-4048-b76f-83a997761cb7, infoPort=37605, infoSecurePort=0, ipcPort=40125, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-11-20T13:55:56,652 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xa976a58aa705270c with lease ID 0x853321cc01d5c224: Processing first storage report for DS-6259affe-44bd-460c-8356-5cf6cd2fe296 from datanode DatanodeRegistration(127.0.0.1:37813, datanodeUuid=2dbe0db3-9cba-4e0c-b720-30fac9f070cb, infoPort=36515, infoSecurePort=0, ipcPort=43349, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974) 2024-11-20T13:55:56,652 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xa976a58aa705270c with lease ID 0x853321cc01d5c224: from storage DS-6259affe-44bd-460c-8356-5cf6cd2fe296 node DatanodeRegistration(127.0.0.1:37813, datanodeUuid=2dbe0db3-9cba-4e0c-b720-30fac9f070cb, infoPort=36515, infoSecurePort=0, ipcPort=43349, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-11-20T13:55:56,652 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x3c300b6bdb59461b with lease ID 0x853321cc01d5c223: Processing first storage report for DS-085328d5-7f2f-41f0-96ec-675b15a1529e from datanode DatanodeRegistration(127.0.0.1:42649, datanodeUuid=6829e71d-af75-4b26-9068-535bad204d0c, infoPort=40789, infoSecurePort=0, ipcPort=32857, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974) 2024-11-20T13:55:56,653 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x3c300b6bdb59461b with lease ID 0x853321cc01d5c223: from storage DS-085328d5-7f2f-41f0-96ec-675b15a1529e node DatanodeRegistration(127.0.0.1:42649, datanodeUuid=6829e71d-af75-4b26-9068-535bad204d0c, infoPort=40789, infoSecurePort=0, ipcPort=32857, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-11-20T13:55:56,653 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xb2ecb628cb8424cd with lease ID 0x853321cc01d5c222: Processing first storage report for DS-492b55dc-b364-4831-8dcf-09b82854b9c2 from datanode DatanodeRegistration(127.0.0.1:40355, datanodeUuid=b8af1ed9-b83f-4048-b76f-83a997761cb7, infoPort=37605, infoSecurePort=0, ipcPort=40125, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974) 2024-11-20T13:55:56,653 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xb2ecb628cb8424cd with lease ID 0x853321cc01d5c222: from storage DS-492b55dc-b364-4831-8dcf-09b82854b9c2 node DatanodeRegistration(127.0.0.1:40355, datanodeUuid=b8af1ed9-b83f-4048-b76f-83a997761cb7, infoPort=37605, infoSecurePort=0, ipcPort=40125, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-11-20T13:55:56,654 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xa976a58aa705270c with lease ID 0x853321cc01d5c224: Processing first storage report for DS-b7e6427b-3049-40bb-8e30-2952a1d43c75 from datanode DatanodeRegistration(127.0.0.1:37813, datanodeUuid=2dbe0db3-9cba-4e0c-b720-30fac9f070cb, infoPort=36515, infoSecurePort=0, ipcPort=43349, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974) 2024-11-20T13:55:56,654 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xa976a58aa705270c with lease ID 0x853321cc01d5c224: from storage DS-b7e6427b-3049-40bb-8e30-2952a1d43c75 node DatanodeRegistration(127.0.0.1:37813, datanodeUuid=2dbe0db3-9cba-4e0c-b720-30fac9f070cb, infoPort=36515, infoSecurePort=0, ipcPort=43349, storageInfo=lv=-57;cid=testClusterID;nsid=1897407684;c=1732110952974), blocks: 0, hasStaleStorage: false, processing time: 1 msecs, invalidatedBlocks: 0 2024-11-20T13:55:56,746 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(631): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b 2024-11-20T13:55:56,758 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=160, OpenFileDescriptor=393, MaxFileDescriptor=1048576, SystemLoadAverage=204, ProcessCount=11, AvailableMemoryMB=8114 2024-11-20T13:55:56,775 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:55:56,778 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:55:56,992 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741825_1001 (size=7) 2024-11-20T13:55:56,993 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741825_1001 (size=7) 2024-11-20T13:55:56,993 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741825_1001 (size=7) 2024-11-20T13:55:57,406 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:55:57,407 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:55:57,409 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:55:57,418 DEBUG [Time-limited test {}] util.ClassSize(228): Using Unsafe to estimate memory layout 2024-11-20T13:55:57,435 INFO [Time-limited test {}] metrics.MetricRegistriesLoader(60): Loaded MetricRegistries class org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl 2024-11-20T13:55:57,437 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:55:57,445 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/testSyncRunnerIndexOverflow, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/oldWALs, maxLogs=1760 2024-11-20T13:55:57,493 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110957484 2024-11-20T13:55:57,553 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/testSyncRunnerIndexOverflow/wal.1732110957484 2024-11-20T13:55:57,599 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605)] 2024-11-20T13:55:57,656 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:55:57,657 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:55:57,657 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:55:57,657 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:55:57,658 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:55:57,662 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741826_1002 (size=1293) 2024-11-20T13:55:57,663 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741826_1002 (size=1293) 2024-11-20T13:55:57,663 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741826_1002 (size=1293) 2024-11-20T13:55:57,671 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/oldWALs 2024-11-20T13:55:57,675 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732110957484) 2024-11-20T13:55:57,687 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=167 (was 160) Potentially hanging thread: sync.3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: LeaseRenewer:jenkins@localhost:39513 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:441) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$800(LeaseRenewer.java:77) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:336) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.0 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: weak-ref-cleaner-strictcontextstorage java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:155) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:176) app//io.opentelemetry.context.StrictContextStorage$PendingScopes.run(StrictContextStorage.java:269) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) - Thread LEAK? -, OpenFileDescriptor=403 (was 393) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=204 (was 204), ProcessCount=11 (was 11), AvailableMemoryMB=8057 (was 8114) 2024-11-20T13:55:57,697 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=167, OpenFileDescriptor=403, MaxFileDescriptor=1048576, SystemLoadAverage=204, ProcessCount=11, AvailableMemoryMB=8054 2024-11-20T13:55:57,725 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741827_1003 (size=7) 2024-11-20T13:55:57,725 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741827_1003 (size=7) 2024-11-20T13:55:57,725 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741827_1003 (size=7) 2024-11-20T13:55:57,728 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:55:57,729 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:55:57,731 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:55:57,741 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:55:57,741 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/testUnflushedSeqIdTracking, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/oldWALs, maxLogs=1760 2024-11-20T13:55:57,743 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110957743 2024-11-20T13:55:57,762 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/testUnflushedSeqIdTracking/wal.1732110957743 2024-11-20T13:55:57,766 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:55:57,770 INFO [Time-limited test {}] regionserver.ChunkCreator(472): data poolSizePercentage is less than 0. So not using pool 2024-11-20T13:55:57,770 INFO [Time-limited test {}] regionserver.ChunkCreator(472): index poolSizePercentage is less than 0. So not using pool 2024-11-20T13:55:57,791 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 4a8d4a2dff55c644c33b492062b899da, NAME => 'testUnflushedSeqIdTracking,,1732110957769.4a8d4a2dff55c644c33b492062b899da.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testUnflushedSeqIdTracking', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b 2024-11-20T13:55:57,815 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741829_1005 (size=61) 2024-11-20T13:55:57,815 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741829_1005 (size=61) 2024-11-20T13:55:57,816 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741829_1005 (size=61) 2024-11-20T13:55:57,821 INFO [Time-limited test {}] throttle.StoreHotnessProtector(112): StoreHotnessProtector is disabled. Set hbase.region.store.parallel.put.limit > 0 to enable, which may help mitigate load under heavy write pressure. 2024-11-20T13:55:57,826 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testUnflushedSeqIdTracking,,1732110957769.4a8d4a2dff55c644c33b492062b899da.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-20T13:55:57,875 INFO [StoreOpener-4a8d4a2dff55c644c33b492062b899da-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 4a8d4a2dff55c644c33b492062b899da 2024-11-20T13:55:57,916 INFO [StoreOpener-4a8d4a2dff55c644c33b492062b899da-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 4a8d4a2dff55c644c33b492062b899da columnFamilyName b 2024-11-20T13:55:57,924 DEBUG [StoreOpener-4a8d4a2dff55c644c33b492062b899da-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-20T13:55:57,929 INFO [StoreOpener-4a8d4a2dff55c644c33b492062b899da-1 {}] regionserver.HStore(327): Store=4a8d4a2dff55c644c33b492062b899da/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-20T13:55:57,932 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 4a8d4a2dff55c644c33b492062b899da 2024-11-20T13:55:57,937 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testUnflushedSeqIdTracking/4a8d4a2dff55c644c33b492062b899da 2024-11-20T13:55:57,938 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testUnflushedSeqIdTracking/4a8d4a2dff55c644c33b492062b899da 2024-11-20T13:55:57,940 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4d0956f7-6457-131a-3209-66dac248acaf/data/default/testUnflushedSeqIdTracking/4a8d4a2dff55c644c33b492062b899da 2024-11-20T13:55:57,944 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 4a8d4a2dff55c644c33b492062b899da 2024-11-20T13:55:57,944 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 4a8d4a2dff55c644c33b492062b899da 2024-11-20T13:55:57,954 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 4a8d4a2dff55c644c33b492062b899da 2024-11-20T13:55:57,961 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:39513/user/jenkins/test-data/4d0956f7-6457-131a-3209-66dac248acaf/data/default/testUnflushedSeqIdTracking/4a8d4a2dff55c644c33b492062b899da/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-20T13:55:57,963 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 4a8d4a2dff55c644c33b492062b899da; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=65032785, jitterRate=-0.03093598783016205}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-20T13:55:57,979 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 4a8d4a2dff55c644c33b492062b899da: Writing region info on filesystem at 1732110957849Initializing all the Stores at 1732110957852 (+3 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732110957853 (+1 ms)Cleaning up temporary data from old regions at 1732110957945 (+92 ms)Region opened successfully at 1732110957977 (+32 ms) 2024-11-20T13:56:01,012 INFO [pool-60-thread-2 {}] regionserver.HRegion(2902): Flushing 4a8d4a2dff55c644c33b492062b899da 1/1 column families, dataSize=24 B heapSize=352 B 2024-11-20T13:56:04,103 DEBUG [pool-60-thread-2 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/testUnflushedSeqIdTracking/4a8d4a2dff55c644c33b492062b899da/.tmp/b/108e5a76bc8a486d847e9d9ff4721bac is 28, key is b/b:b/1732110957999/Put/seqid=0 2024-11-20T13:56:04,117 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741830_1006 (size=4945) 2024-11-20T13:56:04,117 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741830_1006 (size=4945) 2024-11-20T13:56:04,118 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741830_1006 (size=4945) 2024-11-20T13:56:04,119 INFO [pool-60-thread-2 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=4 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/testUnflushedSeqIdTracking/4a8d4a2dff55c644c33b492062b899da/.tmp/b/108e5a76bc8a486d847e9d9ff4721bac 2024-11-20T13:56:04,200 DEBUG [pool-60-thread-2 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/testUnflushedSeqIdTracking/4a8d4a2dff55c644c33b492062b899da/.tmp/b/108e5a76bc8a486d847e9d9ff4721bac as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/testUnflushedSeqIdTracking/4a8d4a2dff55c644c33b492062b899da/b/108e5a76bc8a486d847e9d9ff4721bac 2024-11-20T13:56:04,214 INFO [pool-60-thread-2 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/testUnflushedSeqIdTracking/4a8d4a2dff55c644c33b492062b899da/b/108e5a76bc8a486d847e9d9ff4721bac, entries=1, sequenceid=4, filesize=4.8 K 2024-11-20T13:56:04,224 INFO [pool-60-thread-2 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 4a8d4a2dff55c644c33b492062b899da in 3215ms, sequenceid=4, compaction requested=false 2024-11-20T13:56:04,225 DEBUG [pool-60-thread-2 {}] regionserver.HRegion(2603): Flush status journal for 4a8d4a2dff55c644c33b492062b899da: 2024-11-20T13:56:04,225 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(193): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-11-20T13:56:04,225 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(194): Flush succeeded:true 2024-11-20T13:56:04,226 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 4a8d4a2dff55c644c33b492062b899da, disabling compactions & flushes 2024-11-20T13:56:04,226 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testUnflushedSeqIdTracking,,1732110957769.4a8d4a2dff55c644c33b492062b899da. 2024-11-20T13:56:04,227 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testUnflushedSeqIdTracking,,1732110957769.4a8d4a2dff55c644c33b492062b899da. 2024-11-20T13:56:04,227 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testUnflushedSeqIdTracking,,1732110957769.4a8d4a2dff55c644c33b492062b899da. after waiting 0 ms 2024-11-20T13:56:04,227 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testUnflushedSeqIdTracking,,1732110957769.4a8d4a2dff55c644c33b492062b899da. 2024-11-20T13:56:04,229 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testUnflushedSeqIdTracking,,1732110957769.4a8d4a2dff55c644c33b492062b899da. 2024-11-20T13:56:04,230 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 4a8d4a2dff55c644c33b492062b899da: Waiting for close lock at 1732110964226Disabling compacts and flushes for region at 1732110964226Disabling writes for close at 1732110964227 (+1 ms)Writing region close event to WAL at 1732110964229 (+2 ms)Closed at 1732110964229 2024-11-20T13:56:04,231 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,231 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,231 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,231 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,232 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,235 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741828_1004 (size=875) 2024-11-20T13:56:04,236 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741828_1004 (size=875) 2024-11-20T13:56:04,236 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741828_1004 (size=875) 2024-11-20T13:56:04,239 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/oldWALs 2024-11-20T13:56:04,240 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732110957743) 2024-11-20T13:56:04,246 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=182 (was 167) Potentially hanging thread: pool-60-thread-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:34278 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Monitor thread for TaskMonitor java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hbase.monitoring.TaskMonitor$MonitorRunnable.run(TaskMonitor.java:325) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: HBase-Metrics2-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: org.apache.hadoop.hdfs.PeerCache@56865b03 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.PeerCache.run(PeerCache.java:253) app//org.apache.hadoop.hdfs.PeerCache.access$000(PeerCache.java:46) app//org.apache.hadoop.hdfs.PeerCache$1.run(PeerCache.java:124) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=415 (was 403) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=212 (was 204) - SystemLoadAverage LEAK? -, ProcessCount=11 (was 11), AvailableMemoryMB=8019 (was 8054) 2024-11-20T13:56:04,254 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALComparator Thread=182, OpenFileDescriptor=415, MaxFileDescriptor=1048576, SystemLoadAverage=212, ProcessCount=11, AvailableMemoryMB=8019 2024-11-20T13:56:04,269 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741831_1007 (size=7) 2024-11-20T13:56:04,269 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741831_1007 (size=7) 2024-11-20T13:56:04,270 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741831_1007 (size=7) 2024-11-20T13:56:04,271 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:56:04,272 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:04,275 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:04,284 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:04,284 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/01eae31f-e8e4-fc05-592f-c2d87bb50de6/testWALComparator, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/01eae31f-e8e4-fc05-592f-c2d87bb50de6/oldWALs, maxLogs=1760 2024-11-20T13:56:04,286 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964286 2024-11-20T13:56:04,295 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/01eae31f-e8e4-fc05-592f-c2d87bb50de6/testWALComparator/wal.1732110964286 2024-11-20T13:56:04,299 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:56:04,300 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(215): Log obtained is: FSHLog wal:(num 1732110964286) 2024-11-20T13:56:04,303 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:04,303 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=.meta, logDir=hdfs://localhost:39513/user/jenkins/test-data/01eae31f-e8e4-fc05-592f-c2d87bb50de6/testWALComparator, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/01eae31f-e8e4-fc05-592f-c2d87bb50de6/oldWALs, maxLogs=1760 2024-11-20T13:56:04,305 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964305.meta 2024-11-20T13:56:04,314 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/01eae31f-e8e4-fc05-592f-c2d87bb50de6/testWALComparator/wal.1732110964305.meta 2024-11-20T13:56:04,315 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515)] 2024-11-20T13:56:04,317 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,317 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,318 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,318 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,318 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,322 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741832_1008 (size=93) 2024-11-20T13:56:04,322 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741832_1008 (size=93) 2024-11-20T13:56:04,323 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741832_1008 (size=93) 2024-11-20T13:56:04,328 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/01eae31f-e8e4-fc05-592f-c2d87bb50de6/oldWALs 2024-11-20T13:56:04,328 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732110964286) 2024-11-20T13:56:04,329 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,329 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,329 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,329 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,330 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,333 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741833_1009 (size=93) 2024-11-20T13:56:04,333 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741833_1009 (size=93) 2024-11-20T13:56:04,334 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741833_1009 (size=93) 2024-11-20T13:56:04,338 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/01eae31f-e8e4-fc05-592f-c2d87bb50de6/oldWALs 2024-11-20T13:56:04,338 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:.meta(num 1732110964305) 2024-11-20T13:56:04,345 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALComparator Thread=192 (was 182) - Thread LEAK? -, OpenFileDescriptor=419 (was 415) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=212 (was 212), ProcessCount=11 (was 11), AvailableMemoryMB=8014 (was 8019) 2024-11-20T13:56:04,352 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=192, OpenFileDescriptor=419, MaxFileDescriptor=1048576, SystemLoadAverage=212, ProcessCount=11, AvailableMemoryMB=8014 2024-11-20T13:56:04,364 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741834_1010 (size=7) 2024-11-20T13:56:04,365 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741834_1010 (size=7) 2024-11-20T13:56:04,365 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741834_1010 (size=7) 2024-11-20T13:56:04,367 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:56:04,367 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:04,369 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:04,373 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(383): testFindMemStoresEligibleForFlush 2024-11-20T13:56:04,395 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:04,396 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs, maxLogs=1 2024-11-20T13:56:04,397 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964397 2024-11-20T13:56:04,405 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964397 2024-11-20T13:56:04,406 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515)] 2024-11-20T13:56:04,410 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964410 2024-11-20T13:56:04,420 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,421 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,421 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,421 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,421 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,422 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964397 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964410 2024-11-20T13:56:04,423 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605)] 2024-11-20T13:56:04,424 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964397 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,426 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741835_1011 (size=283) 2024-11-20T13:56:04,426 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741835_1011 (size=283) 2024-11-20T13:56:04,426 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964426 2024-11-20T13:56:04,427 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741835_1011 (size=283) 2024-11-20T13:56:04,436 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,436 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,436 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,437 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,437 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,437 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964410 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964426 2024-11-20T13:56:04,438 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515)] 2024-11-20T13:56:04,439 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964410 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,440 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 55125e480c86496bdf49b8c2f3d1585d[cf1] 2024-11-20T13:56:04,440 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741836_1012 (size=283) 2024-11-20T13:56:04,440 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741836_1012 (size=283) 2024-11-20T13:56:04,440 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741836_1012 (size=283) 2024-11-20T13:56:04,441 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-20T13:56:04,442 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 55125e480c86496bdf49b8c2f3d1585d[cf1] 2024-11-20T13:56:04,443 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 55125e480c86496bdf49b8c2f3d1585d[cf1] 2024-11-20T13:56:04,444 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964443 2024-11-20T13:56:04,453 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,453 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,453 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,454 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,454 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,454 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964426 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964443 2024-11-20T13:56:04,455 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515)] 2024-11-20T13:56:04,455 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964426 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,456 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964397 to hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs/wal.1732110964397 2024-11-20T13:56:04,456 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-20T13:56:04,457 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964457 2024-11-20T13:56:04,457 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741837_1013 (size=283) 2024-11-20T13:56:04,458 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741837_1013 (size=283) 2024-11-20T13:56:04,461 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964410 to hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs/wal.1732110964410 2024-11-20T13:56:04,461 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741837_1013 (size=283) 2024-11-20T13:56:04,464 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964426 to hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs/wal.1732110964426 2024-11-20T13:56:04,471 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,471 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,471 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,471 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,471 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,472 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964443 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964457 2024-11-20T13:56:04,473 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:56:04,473 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964443 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,473 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-20T13:56:04,474 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741838_1014 (size=93) 2024-11-20T13:56:04,475 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741838_1014 (size=93) 2024-11-20T13:56:04,475 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741838_1014 (size=93) 2024-11-20T13:56:04,476 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964443 to hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs/wal.1732110964443 2024-11-20T13:56:04,582 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964581 2024-11-20T13:56:04,593 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,593 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,594 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,594 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,594 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,594 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964457 with entries=4, filesize=465 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964581 2024-11-20T13:56:04,595 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605)] 2024-11-20T13:56:04,595 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964457 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,596 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-20T13:56:04,597 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741839_1015 (size=473) 2024-11-20T13:56:04,598 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741839_1015 (size=473) 2024-11-20T13:56:04,598 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741839_1015 (size=473) 2024-11-20T13:56:04,599 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964599 2024-11-20T13:56:04,608 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,608 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,609 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,609 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,609 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,609 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964581 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964599 2024-11-20T13:56:04,610 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605)] 2024-11-20T13:56:04,610 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964581 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,610 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 55125e480c86496bdf49b8c2f3d1585d[cf1],7da700307749cb7954b29708ac6c85d6[cf1] 2024-11-20T13:56:04,610 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 55125e480c86496bdf49b8c2f3d1585d[cf1],7da700307749cb7954b29708ac6c85d6[cf1] 2024-11-20T13:56:04,611 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964611 2024-11-20T13:56:04,612 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741840_1016 (size=283) 2024-11-20T13:56:04,613 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741840_1016 (size=283) 2024-11-20T13:56:04,613 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741840_1016 (size=283) 2024-11-20T13:56:04,614 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964457 to hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs/wal.1732110964457 2024-11-20T13:56:04,615 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964581 to hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs/wal.1732110964581 2024-11-20T13:56:04,619 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,619 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,619 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,619 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,620 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,620 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964599 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964611 2024-11-20T13:56:04,621 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:37605:37605)] 2024-11-20T13:56:04,621 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964599 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,621 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-20T13:56:04,623 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741841_1017 (size=93) 2024-11-20T13:56:04,624 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741841_1017 (size=93) 2024-11-20T13:56:04,624 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741841_1017 (size=93) 2024-11-20T13:56:04,624 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964599 to hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs/wal.1732110964599 2024-11-20T13:56:04,726 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964725 2024-11-20T13:56:04,739 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,739 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,740 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,740 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,740 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,740 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964611 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964725 2024-11-20T13:56:04,741 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:56:04,742 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964611 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,742 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-20T13:56:04,742 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964742 2024-11-20T13:56:04,744 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741842_1018 (size=283) 2024-11-20T13:56:04,744 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741842_1018 (size=283) 2024-11-20T13:56:04,745 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741842_1018 (size=283) 2024-11-20T13:56:04,746 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964611 to hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs/wal.1732110964611 2024-11-20T13:56:04,752 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,753 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,753 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,753 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,753 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,753 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964725 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964742 2024-11-20T13:56:04,756 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515)] 2024-11-20T13:56:04,757 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741843_1019 (size=93) 2024-11-20T13:56:04,757 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964725 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,757 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741843_1019 (size=93) 2024-11-20T13:56:04,757 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741843_1019 (size=93) 2024-11-20T13:56:04,758 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964725 to hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs/wal.1732110964725 2024-11-20T13:56:04,762 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964761 2024-11-20T13:56:04,770 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,770 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,770 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,771 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,771 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,771 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964742 with entries=6, filesize=709 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964761 2024-11-20T13:56:04,772 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:56:04,772 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964742 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,774 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741844_1020 (size=717) 2024-11-20T13:56:04,774 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741844_1020 (size=717) 2024-11-20T13:56:04,775 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964774 2024-11-20T13:56:04,775 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741844_1020 (size=717) 2024-11-20T13:56:04,782 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,783 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,783 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,783 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,783 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,783 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964761 with entries=2, filesize=293 B; new WAL /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964774 2024-11-20T13:56:04,784 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605)] 2024-11-20T13:56:04,784 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:39513/user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/testFindMemStoresEligibleForFlush/wal.1732110964761 is not closed yet, will try archiving it next time 2024-11-20T13:56:04,784 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 4f9fd8d26f5e2c9c9e38535b12b37b92[cf1,cf3,cf2] 2024-11-20T13:56:04,785 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-20T13:56:04,785 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 4f9fd8d26f5e2c9c9e38535b12b37b92[cf3,cf2] 2024-11-20T13:56:04,786 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741845_1021 (size=301) 2024-11-20T13:56:04,786 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741845_1021 (size=301) 2024-11-20T13:56:04,786 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,786 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,786 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,787 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,787 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,788 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741845_1021 (size=301) 2024-11-20T13:56:04,791 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741846_1022 (size=93) 2024-11-20T13:56:04,791 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741846_1022 (size=93) 2024-11-20T13:56:04,792 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741846_1022 (size=93) 2024-11-20T13:56:04,799 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 3 WAL file(s) to /user/jenkins/test-data/bc1f7264-4f3b-0a7d-e7b6-00ed36cbc776/oldWALs 2024-11-20T13:56:04,799 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732110964774) 2024-11-20T13:56:04,806 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=197 (was 192) - Thread LEAK? -, OpenFileDescriptor=419 (was 419), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=212 (was 212), ProcessCount=11 (was 11), AvailableMemoryMB=8003 (was 8014) 2024-11-20T13:56:04,812 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=197, OpenFileDescriptor=419, MaxFileDescriptor=1048576, SystemLoadAverage=212, ProcessCount=11, AvailableMemoryMB=8002 2024-11-20T13:56:04,824 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741847_1023 (size=7) 2024-11-20T13:56:04,824 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741847_1023 (size=7) 2024-11-20T13:56:04,825 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741847_1023 (size=7) 2024-11-20T13:56:04,827 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:56:04,827 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:04,829 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:04,835 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:04,835 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/67d99645-6f54-639a-4aed-48d79480f592/testRollWriterForClosedWAL, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/67d99645-6f54-639a-4aed-48d79480f592/testRollWriterForClosedWAL, maxLogs=1760 2024-11-20T13:56:04,836 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964836 2024-11-20T13:56:04,844 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/67d99645-6f54-639a-4aed-48d79480f592/testRollWriterForClosedWAL/wal.1732110964836 2024-11-20T13:56:04,847 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:56:04,849 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,849 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,849 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,849 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,849 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:04,852 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741848_1024 (size=93) 2024-11-20T13:56:04,853 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741848_1024 (size=93) 2024-11-20T13:56:04,853 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741848_1024 (size=93) 2024-11-20T13:56:04,856 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/67d99645-6f54-639a-4aed-48d79480f592/testRollWriterForClosedWAL 2024-11-20T13:56:04,856 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732110964836) 2024-11-20T13:56:04,863 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=202 (was 197) - Thread LEAK? -, OpenFileDescriptor=419 (was 419), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=212 (was 212), ProcessCount=11 (was 11), AvailableMemoryMB=8002 (was 8002) 2024-11-20T13:56:04,872 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=202, OpenFileDescriptor=419, MaxFileDescriptor=1048576, SystemLoadAverage=212, ProcessCount=11, AvailableMemoryMB=8002 2024-11-20T13:56:04,886 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741849_1025 (size=7) 2024-11-20T13:56:04,886 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741849_1025 (size=7) 2024-11-20T13:56:04,887 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741849_1025 (size=7) 2024-11-20T13:56:04,889 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:56:04,889 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:04,891 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:04,898 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:04,898 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/testMaxFlushedSequenceIdGoBackwards, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/oldWALs, maxLogs=1760 2024-11-20T13:56:04,899 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110964899 2024-11-20T13:56:04,907 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/testMaxFlushedSequenceIdGoBackwards/wal.1732110964899 2024-11-20T13:56:04,909 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605)] 2024-11-20T13:56:04,911 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 541fd9a760623cd0ff6c5b534f039dd5, NAME => 'table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b 2024-11-20T13:56:04,922 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741851_1027 (size=40) 2024-11-20T13:56:04,922 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741851_1027 (size=40) 2024-11-20T13:56:04,923 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741851_1027 (size=40) 2024-11-20T13:56:04,924 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-20T13:56:04,926 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:04,928 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 541fd9a760623cd0ff6c5b534f039dd5 columnFamilyName a 2024-11-20T13:56:04,929 DEBUG [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-20T13:56:04,929 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] regionserver.HStore(327): Store=541fd9a760623cd0ff6c5b534f039dd5/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-20T13:56:04,930 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:04,932 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 541fd9a760623cd0ff6c5b534f039dd5 columnFamilyName b 2024-11-20T13:56:04,932 DEBUG [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-20T13:56:04,933 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] regionserver.HStore(327): Store=541fd9a760623cd0ff6c5b534f039dd5/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-20T13:56:04,933 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:04,935 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/table/541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:04,935 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/table/541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:04,936 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/d6f26f73-a935-1519-ec79-d2501d34d196/data/default/table/541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:04,937 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:04,937 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:04,938 DEBUG [Time-limited test {}] regionserver.FlushLargeStoresPolicy(65): No hbase.hregion.percolumnfamilyflush.size.lower.bound set in table table descriptor;using region.getMemStoreFlushHeapSize/# of families (64.0 M)) instead. 2024-11-20T13:56:04,940 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:04,944 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:39513/user/jenkins/test-data/d6f26f73-a935-1519-ec79-d2501d34d196/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-20T13:56:04,945 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 541fd9a760623cd0ff6c5b534f039dd5; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=65698335, jitterRate=-0.02101851999759674}}}, FlushLargeStoresPolicy{flushSizeLowerBound=67108864} 2024-11-20T13:56:04,948 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 541fd9a760623cd0ff6c5b534f039dd5: Writing region info on filesystem at 1732110964924Initializing all the Stores at 1732110964925 (+1 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732110964925Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732110964926 (+1 ms)Cleaning up temporary data from old regions at 1732110964937 (+11 ms)Region opened successfully at 1732110964948 (+11 ms) 2024-11-20T13:56:04,948 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 541fd9a760623cd0ff6c5b534f039dd5, disabling compactions & flushes 2024-11-20T13:56:04,949 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. 2024-11-20T13:56:04,949 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. 2024-11-20T13:56:04,949 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. after waiting 0 ms 2024-11-20T13:56:04,949 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. 2024-11-20T13:56:04,949 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. 2024-11-20T13:56:04,950 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 541fd9a760623cd0ff6c5b534f039dd5: Waiting for close lock at 1732110964948Disabling compacts and flushes for region at 1732110964948Disabling writes for close at 1732110964949 (+1 ms)Writing region close event to WAL at 1732110964949Closed at 1732110964949 2024-11-20T13:56:05,348 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 541fd9a760623cd0ff6c5b534f039dd5, NAME => 'table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5.', STARTKEY => '', ENDKEY => ''} 2024-11-20T13:56:05,367 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,368 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-20T13:56:05,370 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,371 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,374 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,376 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 541fd9a760623cd0ff6c5b534f039dd5 columnFamilyName a 2024-11-20T13:56:05,376 DEBUG [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-20T13:56:05,377 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] regionserver.HStore(327): Store=541fd9a760623cd0ff6c5b534f039dd5/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-20T13:56:05,378 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,379 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 541fd9a760623cd0ff6c5b534f039dd5 columnFamilyName b 2024-11-20T13:56:05,379 DEBUG [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-20T13:56:05,380 INFO [StoreOpener-541fd9a760623cd0ff6c5b534f039dd5-1 {}] regionserver.HStore(327): Store=541fd9a760623cd0ff6c5b534f039dd5/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-20T13:56:05,381 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,382 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/table/541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,383 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/table/541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,386 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/d6f26f73-a935-1519-ec79-d2501d34d196/data/default/table/541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,387 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,388 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,391 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,393 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 541fd9a760623cd0ff6c5b534f039dd5; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=74138866, jitterRate=0.10475519299507141}}}, org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL$FlushSpecificStoresPolicy@7d07a8c1 2024-11-20T13:56:05,393 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for 541fd9a760623cd0ff6c5b534f039dd5 2024-11-20T13:56:05,397 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 541fd9a760623cd0ff6c5b534f039dd5: Running coprocessor pre-open hook at 1732110965372Writing region info on filesystem at 1732110965372Initializing all the Stores at 1732110965373 (+1 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732110965374 (+1 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732110965374Cleaning up temporary data from old regions at 1732110965388 (+14 ms)Running coprocessor post-open hooks at 1732110965393 (+5 ms)Region opened successfully at 1732110965397 (+4 ms) 2024-11-20T13:56:08,418 INFO [pool-78-thread-1 {}] regionserver.HRegion(2902): Flushing 541fd9a760623cd0ff6c5b534f039dd5 2/2 column families, dataSize=96 B heapSize=896 B 2024-11-20T13:56:10,884 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-11-20T13:56:11,448 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/a/b9ab80eea318464d82c43e477919f1ae is 28, key is a/a:a/1732110965405/Put/seqid=0 2024-11-20T13:56:11,457 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741852_1028 (size=4945) 2024-11-20T13:56:11,458 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741852_1028 (size=4945) 2024-11-20T13:56:11,458 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741852_1028 (size=4945) 2024-11-20T13:56:11,459 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/a/b9ab80eea318464d82c43e477919f1ae 2024-11-20T13:56:11,485 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/b/782624a7f9ef420fb925f391d366fe49 is 28, key is a/b:b/1732110965405/Put/seqid=0 2024-11-20T13:56:11,493 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741853_1029 (size=4945) 2024-11-20T13:56:11,493 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741853_1029 (size=4945) 2024-11-20T13:56:11,494 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741853_1029 (size=4945) 2024-11-20T13:56:11,494 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/b/782624a7f9ef420fb925f391d366fe49 2024-11-20T13:56:11,505 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/a/b9ab80eea318464d82c43e477919f1ae as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/a/b9ab80eea318464d82c43e477919f1ae 2024-11-20T13:56:11,514 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/a/b9ab80eea318464d82c43e477919f1ae, entries=1, sequenceid=6, filesize=4.8 K 2024-11-20T13:56:11,517 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/b/782624a7f9ef420fb925f391d366fe49 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/b/782624a7f9ef420fb925f391d366fe49 2024-11-20T13:56:11,527 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/b/782624a7f9ef420fb925f391d366fe49, entries=1, sequenceid=6, filesize=4.8 K 2024-11-20T13:56:11,529 INFO [pool-78-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~96 B/96, heapSize ~864 B/864, currentSize=0 B/0 for 541fd9a760623cd0ff6c5b534f039dd5 in 3111ms, sequenceid=6, compaction requested=false 2024-11-20T13:56:11,529 DEBUG [pool-78-thread-1 {}] regionserver.HRegion(2603): Flush status journal for 541fd9a760623cd0ff6c5b534f039dd5: 2024-11-20T13:56:11,529 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(676): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-11-20T13:56:11,530 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(677): Flush succeeded:true 2024-11-20T13:56:11,535 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 541fd9a760623cd0ff6c5b534f039dd5 1/2 column families, dataSize=48 B heapSize=704 B; a={dataSize=24 B, heapSize=352 B, offHeapSize=0 B} 2024-11-20T13:56:11,542 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/a/9a3fbb058253405aa01a7431fef85f25 is 28, key is a/a:a/1732110965405/Put/seqid=0 2024-11-20T13:56:11,550 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741854_1030 (size=4945) 2024-11-20T13:56:11,550 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741854_1030 (size=4945) 2024-11-20T13:56:11,551 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741854_1030 (size=4945) 2024-11-20T13:56:11,551 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=10 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/a/9a3fbb058253405aa01a7431fef85f25 2024-11-20T13:56:11,561 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/a/9a3fbb058253405aa01a7431fef85f25 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/a/9a3fbb058253405aa01a7431fef85f25 2024-11-20T13:56:11,571 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/a/9a3fbb058253405aa01a7431fef85f25, entries=1, sequenceid=10, filesize=4.8 K 2024-11-20T13:56:11,573 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=24 B/24 for 541fd9a760623cd0ff6c5b534f039dd5 in 39ms, sequenceid=10, compaction requested=false 2024-11-20T13:56:11,573 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for 541fd9a760623cd0ff6c5b534f039dd5: 2024-11-20T13:56:11,575 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 541fd9a760623cd0ff6c5b534f039dd5, disabling compactions & flushes 2024-11-20T13:56:11,575 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. 2024-11-20T13:56:11,575 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. 2024-11-20T13:56:11,575 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. after waiting 0 ms 2024-11-20T13:56:11,575 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. 2024-11-20T13:56:11,575 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 541fd9a760623cd0ff6c5b534f039dd5 2/2 column families, dataSize=24 B heapSize=608 B 2024-11-20T13:56:11,582 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/b/17668b9526bd4ef6bc0932d711a529e7 is 28, key is a/b:b/1732110965405/Put/seqid=0 2024-11-20T13:56:11,590 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741855_1031 (size=4945) 2024-11-20T13:56:11,590 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741855_1031 (size=4945) 2024-11-20T13:56:11,590 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741855_1031 (size=4945) 2024-11-20T13:56:11,591 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=13 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/b/17668b9526bd4ef6bc0932d711a529e7 2024-11-20T13:56:11,602 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/.tmp/b/17668b9526bd4ef6bc0932d711a529e7 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/b/17668b9526bd4ef6bc0932d711a529e7 2024-11-20T13:56:11,612 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/b/17668b9526bd4ef6bc0932d711a529e7, entries=1, sequenceid=13, filesize=4.8 K 2024-11-20T13:56:11,614 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 541fd9a760623cd0ff6c5b534f039dd5 in 39ms, sequenceid=13, compaction requested=false 2024-11-20T13:56:11,620 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:39513/user/jenkins/test-data/d6f26f73-a935-1519-ec79-d2501d34d196/data/default/table/541fd9a760623cd0ff6c5b534f039dd5/recovered.edits/16.seqid, newMaxSeqId=16, maxSeqId=1 2024-11-20T13:56:11,622 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. 2024-11-20T13:56:11,622 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 541fd9a760623cd0ff6c5b534f039dd5: Waiting for close lock at 1732110971575Running coprocessor pre-close hooks at 1732110971575Disabling compacts and flushes for region at 1732110971575Disabling writes for close at 1732110971575Obtaining lock to block concurrent updates at 1732110971575Preparing flush snapshotting stores in 541fd9a760623cd0ff6c5b534f039dd5 at 1732110971575Finished memstore snapshotting table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5., syncing WAL and waiting on mvcc, flushsize=dataSize=24, getHeapSize=576, getOffHeapSize=0, getCellsCount=1 at 1732110971576 (+1 ms)Flushing stores of table,,1732110964910.541fd9a760623cd0ff6c5b534f039dd5. at 1732110971577 (+1 ms)Flushing 541fd9a760623cd0ff6c5b534f039dd5/b: creating writer at 1732110971578 (+1 ms)Flushing 541fd9a760623cd0ff6c5b534f039dd5/b: appending metadata at 1732110971581 (+3 ms)Flushing 541fd9a760623cd0ff6c5b534f039dd5/b: closing flushed file at 1732110971581Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@146704ac: reopening flushed file at 1732110971601 (+20 ms)Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 541fd9a760623cd0ff6c5b534f039dd5 in 39ms, sequenceid=13, compaction requested=false at 1732110971614 (+13 ms)Writing region close event to WAL at 1732110971615 (+1 ms)Running coprocessor post-close hooks at 1732110971621 (+6 ms)Closed at 1732110971621 2024-11-20T13:56:11,622 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:11,623 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:11,623 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:11,623 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:11,623 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:11,626 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741850_1026 (size=2357) 2024-11-20T13:56:11,627 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741850_1026 (size=2357) 2024-11-20T13:56:11,627 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741850_1026 (size=2357) 2024-11-20T13:56:11,630 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/oldWALs 2024-11-20T13:56:11,631 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732110964899) 2024-11-20T13:56:11,640 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=210 (was 202) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data6 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:38848 [Waiting for operation #9] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data5 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:34440 [Waiting for operation #9] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Timer for 'HBase' metrics system java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.util.TimerThread.mainLoop(Timer.java:563) java.base@17.0.11/java.util.TimerThread.run(Timer.java:516) - Thread LEAK? -, OpenFileDescriptor=421 (was 419) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=203 (was 212), ProcessCount=11 (was 11), AvailableMemoryMB=7945 (was 8002) 2024-11-20T13:56:11,647 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=210, OpenFileDescriptor=421, MaxFileDescriptor=1048576, SystemLoadAverage=203, ProcessCount=11, AvailableMemoryMB=7945 2024-11-20T13:56:11,659 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741856_1032 (size=7) 2024-11-20T13:56:11,659 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741856_1032 (size=7) 2024-11-20T13:56:11,659 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741856_1032 (size=7) 2024-11-20T13:56:11,661 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:56:11,661 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:11,663 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:11,672 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(244): No decryptEncryptedDataEncryptionKey method in DFSClient, should be hadoop version with HDFS-12396 java.lang.NoSuchMethodException: org.apache.hadoop.hdfs.DFSClient.decryptEncryptedDataEncryptionKey(org.apache.hadoop.fs.FileEncryptionInfo) at java.lang.Class.getDeclaredMethod(Class.java:2675) ~[?:?] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelperWithoutHDFS12396(FanOutOneBlockAsyncDFSOutputSaslHelper.java:183) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelper(FanOutOneBlockAsyncDFSOutputSaslHelper.java:242) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.(FanOutOneBlockAsyncDFSOutputSaslHelper.java:253) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at java.lang.Class.forName0(Native Method) ~[?:?] at java.lang.Class.forName(Class.java:375) ~[?:?] at org.apache.hadoop.hbase.wal.AsyncFSWALProvider.load(AsyncFSWALProvider.java:150) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.getProviderClass(WALFactory.java:174) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:262) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:214) ~[classes/:?] at org.apache.hadoop.hbase.HBaseTestingUtil.createWal(HBaseTestingUtil.java:2160) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2205) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2169) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFlushSequenceIdIsGreaterThanAllEditsInHFile(AbstractTestFSWAL.java:425) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-20T13:56:11,676 INFO [Time-limited test {}] wal.WALFactory(196): Instantiating WALProvider of type class org.apache.hadoop.hbase.wal.AsyncFSWALProvider 2024-11-20T13:56:11,679 DEBUG [Time-limited test {}] channel.MultithreadEventLoopGroup(44): -Dio.netty.eventLoopThreads: 16 2024-11-20T13:56:11,690 DEBUG [Time-limited test {}] nio.NioEventLoop(110): -Dio.netty.noKeySetOptimization: false 2024-11-20T13:56:11,690 DEBUG [Time-limited test {}] nio.NioEventLoop(111): -Dio.netty.selectorAutoRebuildThreshold: 512 2024-11-20T13:56:11,706 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor defaultMonitorName 2024-11-20T13:56:11,710 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:11,710 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=hregion-63385543, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/WALs/hregion-63385543, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/oldWALs, maxLogs=1760 2024-11-20T13:56:11,730 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputHelper(524): When create output stream for /user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/WALs/hregion-63385543/hregion-63385543.1732110971711, exclude list is [], retry=0 2024-11-20T13:56:11,745 DEBUG [Time-limited test {}] channel.DefaultChannelId(84): -Dio.netty.processId: 67350 (auto-detected) 2024-11-20T13:56:11,749 DEBUG [Time-limited test {}] channel.DefaultChannelId(106): -Dio.netty.machineId: 02:42:ac:ff:fe:11:00:02 (auto-detected) 2024-11-20T13:56:11,772 DEBUG [AsyncFSWAL-1-3 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:37813,DS-6259affe-44bd-460c-8356-5cf6cd2fe296,DISK] 2024-11-20T13:56:11,772 DEBUG [AsyncFSWAL-1-2 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:42649,DS-462795d0-a958-41d2-8374-ee6bc979008e,DISK] 2024-11-20T13:56:11,772 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:40355,DS-e39fe82d-74f1-44bb-a0fe-1fce0214d083,DISK] 2024-11-20T13:56:11,776 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.ProtobufDecoder(117): Hadoop 3.3 and above shades protobuf. 2024-11-20T13:56:11,807 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/WALs/hregion-63385543/hregion-63385543.1732110971711 2024-11-20T13:56:11,808 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new AsyncFSWAL writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515)] 2024-11-20T13:56:11,808 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 5248ecc479fb1783386b79fa5b2aa0f8, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testFlushSequenceIdIsGreaterThanAllEditsInHFile', {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de 2024-11-20T13:56:11,821 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741858_1034 (size=82) 2024-11-20T13:56:11,822 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741858_1034 (size=82) 2024-11-20T13:56:11,822 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741858_1034 (size=82) 2024-11-20T13:56:11,823 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-20T13:56:11,825 INFO [StoreOpener-5248ecc479fb1783386b79fa5b2aa0f8-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,828 INFO [StoreOpener-5248ecc479fb1783386b79fa5b2aa0f8-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 5248ecc479fb1783386b79fa5b2aa0f8 columnFamilyName f 2024-11-20T13:56:11,828 DEBUG [StoreOpener-5248ecc479fb1783386b79fa5b2aa0f8-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-20T13:56:11,829 INFO [StoreOpener-5248ecc479fb1783386b79fa5b2aa0f8-1 {}] regionserver.HStore(327): Store=5248ecc479fb1783386b79fa5b2aa0f8/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-20T13:56:11,829 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,830 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,831 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,832 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,832 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,832 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,836 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,840 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-20T13:56:11,841 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 5248ecc479fb1783386b79fa5b2aa0f8; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=68963743, jitterRate=0.027639850974082947}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-20T13:56:11,846 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 5248ecc479fb1783386b79fa5b2aa0f8: Writing region info on filesystem at 1732110971823Initializing all the Stores at 1732110971824 (+1 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732110971825 (+1 ms)Cleaning up temporary data from old regions at 1732110971832 (+7 ms)Region opened successfully at 1732110971846 (+14 ms) 2024-11-20T13:56:11,846 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 5248ecc479fb1783386b79fa5b2aa0f8, disabling compactions & flushes 2024-11-20T13:56:11,846 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8. 2024-11-20T13:56:11,846 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8. 2024-11-20T13:56:11,847 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8. after waiting 0 ms 2024-11-20T13:56:11,847 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8. 2024-11-20T13:56:11,847 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8. 2024-11-20T13:56:11,847 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 5248ecc479fb1783386b79fa5b2aa0f8: Waiting for close lock at 1732110971846Disabling compacts and flushes for region at 1732110971846Disabling writes for close at 1732110971847 (+1 ms)Writing region close event to WAL at 1732110971847Closed at 1732110971847 2024-11-20T13:56:11,856 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741857_1033 (size=93) 2024-11-20T13:56:11,856 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741857_1033 (size=93) 2024-11-20T13:56:11,857 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741857_1033 (size=93) 2024-11-20T13:56:11,860 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/oldWALs 2024-11-20T13:56:11,860 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: AsyncFSWAL hregion-63385543:(num 1732110971711) 2024-11-20T13:56:11,864 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:11,864 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/testFlushSequenceIdIsGreaterThanAllEditsInHFile, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/testFlushSequenceIdIsGreaterThanAllEditsInHFile, maxLogs=1760 2024-11-20T13:56:11,865 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110971865 2024-11-20T13:56:11,878 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/testFlushSequenceIdIsGreaterThanAllEditsInHFile/wal.1732110971865 2024-11-20T13:56:11,883 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:56:11,885 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:11,887 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 5248ecc479fb1783386b79fa5b2aa0f8, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8.', STARTKEY => '', ENDKEY => ''} 2024-11-20T13:56:11,887 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-20T13:56:11,888 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,888 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,892 INFO [StoreOpener-5248ecc479fb1783386b79fa5b2aa0f8-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,894 INFO [StoreOpener-5248ecc479fb1783386b79fa5b2aa0f8-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 5248ecc479fb1783386b79fa5b2aa0f8 columnFamilyName f 2024-11-20T13:56:11,894 DEBUG [StoreOpener-5248ecc479fb1783386b79fa5b2aa0f8-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-20T13:56:11,895 INFO [StoreOpener-5248ecc479fb1783386b79fa5b2aa0f8-1 {}] regionserver.HStore(327): Store=5248ecc479fb1783386b79fa5b2aa0f8/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-20T13:56:11,895 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,896 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,897 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,899 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,899 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,899 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,902 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 5248ecc479fb1783386b79fa5b2aa0f8 2024-11-20T13:56:11,904 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 5248ecc479fb1783386b79fa5b2aa0f8; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=68277743, jitterRate=0.017417654395103455}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-20T13:56:11,909 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 5248ecc479fb1783386b79fa5b2aa0f8: Writing region info on filesystem at 1732110971888Initializing all the Stores at 1732110971890 (+2 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732110971890Cleaning up temporary data from old regions at 1732110971899 (+9 ms)Region opened successfully at 1732110971909 (+10 ms) 2024-11-20T13:56:11,930 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.log.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir so I do NOT create it in target/test-data/bb2540c8-5e39-c32f-7a8b-6971cd029326 2024-11-20T13:56:11,930 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.log.dir property value differs in configuration and system: Configuration=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/../logs while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir Erasing configuration value by system value. 2024-11-20T13:56:11,930 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.tmp.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.tmp.dir so I do NOT create it in target/test-data/bb2540c8-5e39-c32f-7a8b-6971cd029326 2024-11-20T13:56:11,930 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.tmp.dir property value differs in configuration and system: Configuration=/tmp/hadoop-jenkins while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.tmp.dir Erasing configuration value by system value. 2024-11-20T13:56:11,931 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/bb2540c8-5e39-c32f-7a8b-6971cd029326 2024-11-20T13:56:11,961 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 5248ecc479fb1783386b79fa5b2aa0f8 1/1 column families, dataSize=1.14 KB heapSize=2.13 KB 2024-11-20T13:56:12,061 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:12,162 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:12,263 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:12,363 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:12,464 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:12,565 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:12,666 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:12,767 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:12,867 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:12,968 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:13,068 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:13,091 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8/.tmp/f/b4925ac137cb49bb9c8769559a6b2e2c is 121, key is testFlushSequenceIdIsGreaterThanAllEditsInHFile/f:x0/1732110971931/Put/seqid=0 2024-11-20T13:56:13,099 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741860_1036 (size=6333) 2024-11-20T13:56:13,099 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741860_1036 (size=6333) 2024-11-20T13:56:13,099 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741860_1036 (size=6333) 2024-11-20T13:56:13,100 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=1.14 KB at sequenceid=23 (bloomFilter=true), to=hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8/.tmp/f/b4925ac137cb49bb9c8769559a6b2e2c 2024-11-20T13:56:13,111 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8/.tmp/f/b4925ac137cb49bb9c8769559a6b2e2c as hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8/f/b4925ac137cb49bb9c8769559a6b2e2c 2024-11-20T13:56:13,120 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/5248ecc479fb1783386b79fa5b2aa0f8/f/b4925ac137cb49bb9c8769559a6b2e2c, entries=10, sequenceid=23, filesize=6.2 K 2024-11-20T13:56:13,221 DEBUG [FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-20T13:56:13,226 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~1.14 KB/1170, heapSize ~2.11 KB/2160, currentSize=0 B/0 for 5248ecc479fb1783386b79fa5b2aa0f8 in 1264ms, sequenceid=23, compaction requested=false 2024-11-20T13:56:13,226 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for 5248ecc479fb1783386b79fa5b2aa0f8: 2024-11-20T13:56:13,226 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 5248ecc479fb1783386b79fa5b2aa0f8, disabling compactions & flushes 2024-11-20T13:56:13,226 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8. 2024-11-20T13:56:13,227 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8. 2024-11-20T13:56:13,227 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8. after waiting 0 ms 2024-11-20T13:56:13,227 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8. 2024-11-20T13:56:13,230 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732110971666.5248ecc479fb1783386b79fa5b2aa0f8. 2024-11-20T13:56:13,230 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 5248ecc479fb1783386b79fa5b2aa0f8: Waiting for close lock at 1732110973226Disabling compacts and flushes for region at 1732110973226Disabling writes for close at 1732110973227 (+1 ms)Writing region close event to WAL at 1732110973230 (+3 ms)Closed at 1732110973230 2024-11-20T13:56:13,231 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,231 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,232 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,232 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,232 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,236 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741859_1035 (size=16537) 2024-11-20T13:56:13,236 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741859_1035 (size=16537) 2024-11-20T13:56:13,237 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741859_1035 (size=16537) 2024-11-20T13:56:13,241 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/301bd08f-2011-b6a3-c597-5647e3d32b62/testFlushSequenceIdIsGreaterThanAllEditsInHFile 2024-11-20T13:56:13,241 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: wal:(num 1732110971865) 2024-11-20T13:56:13,249 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=219 (was 210) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1741847029_22 at /127.0.0.1:38848 [Waiting for operation #11] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-2 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-1 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:36408 [Waiting for operation #2] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-3 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1741847029_22 at /127.0.0.1:34440 [Waiting for operation #10] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=455 (was 421) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=203 (was 203), ProcessCount=11 (was 11), AvailableMemoryMB=7917 (was 7945) 2024-11-20T13:56:13,256 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=219, OpenFileDescriptor=455, MaxFileDescriptor=1048576, SystemLoadAverage=203, ProcessCount=11, AvailableMemoryMB=7916 2024-11-20T13:56:13,268 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741861_1037 (size=7) 2024-11-20T13:56:13,269 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741861_1037 (size=7) 2024-11-20T13:56:13,269 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741861_1037 (size=7) 2024-11-20T13:56:13,270 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:56:13,270 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:13,272 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:13,278 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:13,278 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/oldWALs, maxLogs=1760 2024-11-20T13:56:13,279 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110973278 2024-11-20T13:56:13,285 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973278 2024-11-20T13:56:13,287 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:56:13,288 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110973288 2024-11-20T13:56:13,296 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110973289 2024-11-20T13:56:13,301 WARN [Time-limited test {}] wal.AbstractProtobufLogWriter(199): Init output failed, path=hdfs://localhost:39513/user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973289 java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-11-20T13:56:13,303 DEBUG [Time-limited test {}] wal.FSHLogProvider(93): Error instantiating log writer. java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-11-20T13:56:13,315 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=246 (was 219) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:48300 [Receiving block BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:40355, 127.0.0.1:42649] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:39482 [Receiving block BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: PacketResponder: BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:42649] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:36436 [Receiving block BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: PacketResponder: BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:37813, 127.0.0.1:42649] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973278 block BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:39486 [Receiving block BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: FSHLog-0-hdfs://localhost:39513/user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c-prefix:default java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:42649] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973288 block BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:36426 [Receiving block BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:48288 [Receiving block BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=483 (was 455) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=203 (was 203), ProcessCount=11 (was 11), AvailableMemoryMB=7912 (was 7916) 2024-11-20T13:56:13,323 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=246, OpenFileDescriptor=483, MaxFileDescriptor=1048576, SystemLoadAverage=203, ProcessCount=11, AvailableMemoryMB=7911 2024-11-20T13:56:13,336 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741864_1040 (size=7) 2024-11-20T13:56:13,336 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741864_1040 (size=7) 2024-11-20T13:56:13,337 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741864_1040 (size=7) 2024-11-20T13:56:13,338 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:56:13,338 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:13,340 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:13,346 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:13,347 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/48bbe878-d09c-cfbd-b591-0f78467c2a92/testWALCoprocessorLoaded, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/48bbe878-d09c-cfbd-b591-0f78467c2a92/oldWALs, maxLogs=1760 2024-11-20T13:56:13,348 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110973347 2024-11-20T13:56:13,380 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/48bbe878-d09c-cfbd-b591-0f78467c2a92/testWALCoprocessorLoaded/wal.1732110973347 2024-11-20T13:56:13,382 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:40789:40789),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605)] 2024-11-20T13:56:13,384 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,384 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,384 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,384 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,384 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,388 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741865_1041 (size=93) 2024-11-20T13:56:13,389 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741865_1041 (size=93) 2024-11-20T13:56:13,389 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741865_1041 (size=93) 2024-11-20T13:56:13,393 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/48bbe878-d09c-cfbd-b591-0f78467c2a92/oldWALs 2024-11-20T13:56:13,393 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732110973347) 2024-11-20T13:56:13,402 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=251 (was 246) - Thread LEAK? -, OpenFileDescriptor=495 (was 483) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=203 (was 203), ProcessCount=11 (was 11), AvailableMemoryMB=7905 (was 7911) 2024-11-20T13:56:13,411 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=251, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=203, ProcessCount=11, AvailableMemoryMB=7904 2024-11-20T13:56:13,423 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741866_1042 (size=7) 2024-11-20T13:56:13,423 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741866_1042 (size=7) 2024-11-20T13:56:13,423 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741866_1042 (size=7) 2024-11-20T13:56:13,424 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:56:13,425 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:13,427 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:13,433 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:13,433 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/339368cf-b250-7777-cdbd-f5550bf7eace/testSyncNoAppend, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/339368cf-b250-7777-cdbd-f5550bf7eace/testSyncNoAppend, maxLogs=1760 2024-11-20T13:56:13,434 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110973433 2024-11-20T13:56:13,442 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/339368cf-b250-7777-cdbd-f5550bf7eace/testSyncNoAppend/wal.1732110973433 2024-11-20T13:56:13,443 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:56:13,449 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,449 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,449 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,449 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,449 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,454 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741867_1043 (size=93) 2024-11-20T13:56:13,455 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741867_1043 (size=93) 2024-11-20T13:56:13,455 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741867_1043 (size=93) 2024-11-20T13:56:13,457 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/339368cf-b250-7777-cdbd-f5550bf7eace/testSyncNoAppend 2024-11-20T13:56:13,457 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732110973433) 2024-11-20T13:56:13,467 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=256 (was 251) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=203 (was 203), ProcessCount=11 (was 11), AvailableMemoryMB=7894 (was 7904) 2024-11-20T13:56:13,475 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=256, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=203, ProcessCount=11, AvailableMemoryMB=7894 2024-11-20T13:56:13,487 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741868_1044 (size=7) 2024-11-20T13:56:13,487 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741868_1044 (size=7) 2024-11-20T13:56:13,487 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741868_1044 (size=7) 2024-11-20T13:56:13,489 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:56:13,489 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:13,490 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:13,495 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:13,495 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/c2136e9a-b3a3-0636-e7b9-737853f6b3e7/testWriteEntryCanBeNull, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/c2136e9a-b3a3-0636-e7b9-737853f6b3e7/testWriteEntryCanBeNull, maxLogs=1760 2024-11-20T13:56:13,496 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110973496 2024-11-20T13:56:13,504 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/c2136e9a-b3a3-0636-e7b9-737853f6b3e7/testWriteEntryCanBeNull/wal.1732110973496 2024-11-20T13:56:13,507 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:56:13,512 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,512 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,512 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,512 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,512 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:13,515 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741869_1045 (size=93) 2024-11-20T13:56:13,515 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741869_1045 (size=93) 2024-11-20T13:56:13,515 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741869_1045 (size=93) 2024-11-20T13:56:13,518 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/c2136e9a-b3a3-0636-e7b9-737853f6b3e7/testWriteEntryCanBeNull 2024-11-20T13:56:13,518 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732110973496) 2024-11-20T13:56:13,529 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=261 (was 256) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=203 (was 203), ProcessCount=11 (was 11), AvailableMemoryMB=7888 (was 7894) 2024-11-20T13:56:13,537 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=261, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=203, ProcessCount=11, AvailableMemoryMB=7887 2024-11-20T13:56:13,547 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741870_1046 (size=7) 2024-11-20T13:56:13,548 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741870_1046 (size=7) 2024-11-20T13:56:13,548 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741870_1046 (size=7) 2024-11-20T13:56:13,549 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de with version=8 2024-11-20T13:56:13,550 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:13,552 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-20T13:56:13,562 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-20T13:56:13,562 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/testUnflushedSeqIdTrackingWithAsyncWal, archiveDir=hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/oldWALs, maxLogs=1760 2024-11-20T13:56:13,563 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732110973563 2024-11-20T13:56:13,576 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/testUnflushedSeqIdTrackingWithAsyncWal/wal.1732110973563 2024-11-20T13:56:13,577 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:36515:36515),(127.0.0.1/127.0.0.1:37605:37605),(127.0.0.1/127.0.0.1:40789:40789)] 2024-11-20T13:56:13,578 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => bd6b96926d392fe98d25c72b46c20901, NAME => 'table,,1732110973578.bd6b96926d392fe98d25c72b46c20901.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b 2024-11-20T13:56:13,587 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741872_1048 (size=40) 2024-11-20T13:56:13,588 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741872_1048 (size=40) 2024-11-20T13:56:13,588 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741872_1048 (size=40) 2024-11-20T13:56:13,590 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1732110973578.bd6b96926d392fe98d25c72b46c20901.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-20T13:56:13,592 INFO [StoreOpener-bd6b96926d392fe98d25c72b46c20901-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,594 INFO [StoreOpener-bd6b96926d392fe98d25c72b46c20901-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region bd6b96926d392fe98d25c72b46c20901 columnFamilyName b 2024-11-20T13:56:13,594 DEBUG [StoreOpener-bd6b96926d392fe98d25c72b46c20901-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-20T13:56:13,594 INFO [StoreOpener-bd6b96926d392fe98d25c72b46c20901-1 {}] regionserver.HStore(327): Store=bd6b96926d392fe98d25c72b46c20901/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-20T13:56:13,595 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,596 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/table/bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,596 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/table/bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,597 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/c54a98c8-d8e6-f94f-5cfd-781b98adf4ed/data/default/table/bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,597 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,597 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,599 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,602 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:39513/user/jenkins/test-data/c54a98c8-d8e6-f94f-5cfd-781b98adf4ed/data/default/table/bd6b96926d392fe98d25c72b46c20901/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-20T13:56:13,603 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened bd6b96926d392fe98d25c72b46c20901; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=71345949, jitterRate=0.063137486577034}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-20T13:56:13,607 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for bd6b96926d392fe98d25c72b46c20901: Writing region info on filesystem at 1732110973590Initializing all the Stores at 1732110973591 (+1 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732110973591Cleaning up temporary data from old regions at 1732110973597 (+6 ms)Region opened successfully at 1732110973607 (+10 ms) 2024-11-20T13:56:13,607 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing bd6b96926d392fe98d25c72b46c20901, disabling compactions & flushes 2024-11-20T13:56:13,607 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. 2024-11-20T13:56:13,608 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. 2024-11-20T13:56:13,608 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. after waiting 0 ms 2024-11-20T13:56:13,608 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. 2024-11-20T13:56:13,608 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. 2024-11-20T13:56:13,609 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for bd6b96926d392fe98d25c72b46c20901: Waiting for close lock at 1732110973607Disabling compacts and flushes for region at 1732110973607Disabling writes for close at 1732110973608 (+1 ms)Writing region close event to WAL at 1732110973608Closed at 1732110973608 2024-11-20T13:56:13,610 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => bd6b96926d392fe98d25c72b46c20901, NAME => 'table,,1732110973578.bd6b96926d392fe98d25c72b46c20901.', STARTKEY => '', ENDKEY => ''} 2024-11-20T13:56:13,611 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,611 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1732110973578.bd6b96926d392fe98d25c72b46c20901.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-20T13:56:13,611 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,611 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,613 INFO [StoreOpener-bd6b96926d392fe98d25c72b46c20901-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,614 INFO [StoreOpener-bd6b96926d392fe98d25c72b46c20901-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region bd6b96926d392fe98d25c72b46c20901 columnFamilyName b 2024-11-20T13:56:13,615 DEBUG [StoreOpener-bd6b96926d392fe98d25c72b46c20901-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-20T13:56:13,615 INFO [StoreOpener-bd6b96926d392fe98d25c72b46c20901-1 {}] regionserver.HStore(327): Store=bd6b96926d392fe98d25c72b46c20901/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-20T13:56:13,615 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,616 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/table/bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,617 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/data/default/table/bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,619 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:39513/user/jenkins/test-data/c54a98c8-d8e6-f94f-5cfd-781b98adf4ed/data/default/table/bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,619 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,619 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,622 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,624 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened bd6b96926d392fe98d25c72b46c20901; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=70061276, jitterRate=0.043994367122650146}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-20T13:56:13,624 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for bd6b96926d392fe98d25c72b46c20901 2024-11-20T13:56:13,625 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for bd6b96926d392fe98d25c72b46c20901: Running coprocessor pre-open hook at 1732110973611Writing region info on filesystem at 1732110973611Initializing all the Stores at 1732110973612 (+1 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732110973613 (+1 ms)Cleaning up temporary data from old regions at 1732110973619 (+6 ms)Running coprocessor post-open hooks at 1732110973624 (+5 ms)Region opened successfully at 1732110973625 (+1 ms) 2024-11-20T13:56:14,096 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor 2024-11-20T13:56:14,097 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor Metrics about HBase WALObservers 2024-11-20T13:56:14,097 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=TableRequests_Namespace_default_table_table 2024-11-20T13:56:14,097 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=TableRequests_Namespace_default_table_table Metrics about Tables on a single HBase RegionServer 2024-11-20T13:56:16,631 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1722): Closing bd6b96926d392fe98d25c72b46c20901, disabling compactions & flushes 2024-11-20T13:56:16,631 INFO [pool-106-thread-1 {}] regionserver.HRegion(1755): Closing region table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. 2024-11-20T13:56:16,631 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. 2024-11-20T13:56:16,631 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1843): Acquired close lock on table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. after waiting 0 ms 2024-11-20T13:56:16,631 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1853): Updates disabled for region table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. 2024-11-20T13:56:16,632 INFO [pool-106-thread-1 {}] regionserver.HRegion(2902): Flushing bd6b96926d392fe98d25c72b46c20901 1/1 column families, dataSize=48 B heapSize=448 B 2024-11-20T13:56:17,126 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-11-20T13:56:19,651 DEBUG [pool-106-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/bd6b96926d392fe98d25c72b46c20901/.tmp/b/5c2caa7072a24fe4a241864ac118b825 is 28, key is b/b:b/1732110973627/Put/seqid=0 2024-11-20T13:56:19,657 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741873_1049 (size=4945) 2024-11-20T13:56:19,658 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741873_1049 (size=4945) 2024-11-20T13:56:19,658 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741873_1049 (size=4945) 2024-11-20T13:56:19,658 INFO [pool-106-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/bd6b96926d392fe98d25c72b46c20901/.tmp/b/5c2caa7072a24fe4a241864ac118b825 2024-11-20T13:56:19,668 DEBUG [pool-106-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/bd6b96926d392fe98d25c72b46c20901/.tmp/b/5c2caa7072a24fe4a241864ac118b825 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/bd6b96926d392fe98d25c72b46c20901/b/5c2caa7072a24fe4a241864ac118b825 2024-11-20T13:56:19,677 INFO [pool-106-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/bd6b96926d392fe98d25c72b46c20901/b/5c2caa7072a24fe4a241864ac118b825, entries=1, sequenceid=6, filesize=4.8 K 2024-11-20T13:56:19,679 INFO [pool-106-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for bd6b96926d392fe98d25c72b46c20901 in 3047ms, sequenceid=6, compaction requested=false 2024-11-20T13:56:19,685 DEBUG [pool-106-thread-1 {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:39513/user/jenkins/test-data/c54a98c8-d8e6-f94f-5cfd-781b98adf4ed/data/default/table/bd6b96926d392fe98d25c72b46c20901/recovered.edits/9.seqid, newMaxSeqId=9, maxSeqId=1 2024-11-20T13:56:19,686 INFO [pool-106-thread-1 {}] regionserver.HRegion(1973): Closed table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. 2024-11-20T13:56:19,686 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1676): Region close journal for bd6b96926d392fe98d25c72b46c20901: Waiting for close lock at 1732110976631Running coprocessor pre-close hooks at 1732110976631Disabling compacts and flushes for region at 1732110976631Disabling writes for close at 1732110976631Obtaining lock to block concurrent updates at 1732110976632 (+1 ms)Preparing flush snapshotting stores in bd6b96926d392fe98d25c72b46c20901 at 1732110976632Finished memstore snapshotting table,,1732110973578.bd6b96926d392fe98d25c72b46c20901., syncing WAL and waiting on mvcc, flushsize=dataSize=48, getHeapSize=432, getOffHeapSize=0, getCellsCount=2 at 1732110976632Flushing stores of table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. at 1732110979634 (+3002 ms)Flushing bd6b96926d392fe98d25c72b46c20901/b: creating writer at 1732110979634Flushing bd6b96926d392fe98d25c72b46c20901/b: appending metadata at 1732110979650 (+16 ms)Flushing bd6b96926d392fe98d25c72b46c20901/b: closing flushed file at 1732110979650Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@1819ea84: reopening flushed file at 1732110979667 (+17 ms)Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for bd6b96926d392fe98d25c72b46c20901 in 3047ms, sequenceid=6, compaction requested=false at 1732110979679 (+12 ms)Writing region close event to WAL at 1732110979680 (+1 ms)Running coprocessor post-close hooks at 1732110979686 (+6 ms)Closed at 1732110979686 2024-11-20T13:56:19,686 INFO [pool-106-thread-1 {}] wal.AbstractTestFSWAL(620): Close result:{[B@3a042c9=[/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/data/default/table/bd6b96926d392fe98d25c72b46c20901/b/5c2caa7072a24fe4a241864ac118b825]} 2024-11-20T13:56:19,686 WARN [Time-limited test {}] regionserver.HRegion(1707): Region table,,1732110973578.bd6b96926d392fe98d25c72b46c20901. already closed 2024-11-20T13:56:19,686 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for bd6b96926d392fe98d25c72b46c20901: Waiting for close lock at 1732110979686 2024-11-20T13:56:19,687 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:19,687 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:19,687 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:19,687 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:19,687 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-20T13:56:19,690 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42649 is added to blk_1073741871_1047 (size=1206) 2024-11-20T13:56:19,690 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40355 is added to blk_1073741871_1047 (size=1206) 2024-11-20T13:56:19,690 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:37813 is added to blk_1073741871_1047 (size=1206) 2024-11-20T13:56:19,693 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/4e72a9b2-8389-f4a9-55c8-abb615e5b4de/oldWALs 2024-11-20T13:56:19,694 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732110973563) 2024-11-20T13:56:19,703 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=264 (was 261) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_853534929_22 at /127.0.0.1:48396 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=501 (was 495) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=211 (was 203) - SystemLoadAverage LEAK? -, ProcessCount=11 (was 11), AvailableMemoryMB=7868 (was 7887) 2024-11-20T13:56:19,704 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1019): Shutting down minicluster 2024-11-20T13:56:19,706 WARN [PacketResponder: BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:42649] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-806677084-172.17.0.2-1732110952974:1073741863 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-20T13:56:19,706 WARN [ResponseProcessor for block BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039 java.io.IOException: Bad response ERROR for BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039 from datanode DatanodeInfoWithStorage[127.0.0.1:37813,DS-6259affe-44bd-460c-8356-5cf6cd2fe296,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-20T13:56:19,707 WARN [DataStreamer for file /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973288 block BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039 {}] hdfs.DataStreamer(1731): Error Recovery for BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039 in pipeline [DatanodeInfoWithStorage[127.0.0.1:40355,DS-e39fe82d-74f1-44bb-a0fe-1fce0214d083,DISK], DatanodeInfoWithStorage[127.0.0.1:37813,DS-6259affe-44bd-460c-8356-5cf6cd2fe296,DISK], DatanodeInfoWithStorage[127.0.0.1:42649,DS-462795d0-a958-41d2-8374-ee6bc979008e,DISK]]: datanode 1(DatanodeInfoWithStorage[127.0.0.1:37813,DS-6259affe-44bd-460c-8356-5cf6cd2fe296,DISK]) is bad. 2024-11-20T13:56:19,707 WARN [PacketResponder: BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:37813, 127.0.0.1:42649] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-20T13:56:19,712 WARN [DataStreamer for file /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973288 block BP-806677084-172.17.0.2-1732110952974:blk_1073741863_1039 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973288 (inode 16549) Holder DFSClient_NONMAPREDUCE_853534929_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-20T13:56:19,713 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973288 with renewLeaseKey: DEFAULT_16549 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973288 (inode 16549) Holder DFSClient_NONMAPREDUCE_853534929_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-20T13:56:19,715 WARN [PacketResponder: BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:42649] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-806677084-172.17.0.2-1732110952974:1073741862 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-20T13:56:19,715 WARN [ResponseProcessor for block BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038 java.io.IOException: Bad response ERROR for BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038 from datanode DatanodeInfoWithStorage[127.0.0.1:40355,DS-e39fe82d-74f1-44bb-a0fe-1fce0214d083,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-20T13:56:19,716 WARN [DataStreamer for file /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973278 block BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038 {}] hdfs.DataStreamer(1731): Error Recovery for BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038 in pipeline [DatanodeInfoWithStorage[127.0.0.1:37813,DS-6259affe-44bd-460c-8356-5cf6cd2fe296,DISK], DatanodeInfoWithStorage[127.0.0.1:40355,DS-e39fe82d-74f1-44bb-a0fe-1fce0214d083,DISK], DatanodeInfoWithStorage[127.0.0.1:42649,DS-462795d0-a958-41d2-8374-ee6bc979008e,DISK]]: datanode 1(DatanodeInfoWithStorage[127.0.0.1:40355,DS-e39fe82d-74f1-44bb-a0fe-1fce0214d083,DISK]) is bad. 2024-11-20T13:56:19,716 WARN [PacketResponder: BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:40355, 127.0.0.1:42649] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-20T13:56:19,717 WARN [DataStreamer for file /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973278 block BP-806677084-172.17.0.2-1732110952974:blk_1073741862_1038 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973278 (inode 16548) Holder DFSClient_NONMAPREDUCE_853534929_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-20T13:56:19,717 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973278 with renewLeaseKey: DEFAULT_16548 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/036504e4-d894-193e-fdd2-8239be8b719c/testFailedToCreateWALIfParentRenamed/wal.1732110973278 (inode 16548) Holder DFSClient_NONMAPREDUCE_853534929_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy42.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy43.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-20T13:56:19,774 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@54549f71{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-20T13:56:19,778 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@6184a766{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-20T13:56:19,779 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-20T13:56:19,779 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@7585798d{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-20T13:56:19,779 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@7fb8704f{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir/,STOPPED} 2024-11-20T13:56:19,782 WARN [BP-806677084-172.17.0.2-1732110952974 heartbeating to localhost/127.0.0.1:39513 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-11-20T13:56:19,782 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-11-20T13:56:19,782 WARN [BP-806677084-172.17.0.2-1732110952974 heartbeating to localhost/127.0.0.1:39513 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-806677084-172.17.0.2-1732110952974 (Datanode Uuid 2dbe0db3-9cba-4e0c-b720-30fac9f070cb) service to localhost/127.0.0.1:39513 2024-11-20T13:56:19,782 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-11-20T13:56:19,783 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data5/current/BP-806677084-172.17.0.2-1732110952974 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-20T13:56:19,784 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data6/current/BP-806677084-172.17.0.2-1732110952974 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-20T13:56:19,784 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-11-20T13:56:19,786 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@62fe08ec{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-20T13:56:19,787 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@328d1637{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-20T13:56:19,787 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-20T13:56:19,787 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@29f30d89{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-20T13:56:19,787 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@30b1edcb{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir/,STOPPED} 2024-11-20T13:56:19,788 WARN [BP-806677084-172.17.0.2-1732110952974 heartbeating to localhost/127.0.0.1:39513 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-11-20T13:56:19,788 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-11-20T13:56:19,788 WARN [BP-806677084-172.17.0.2-1732110952974 heartbeating to localhost/127.0.0.1:39513 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-806677084-172.17.0.2-1732110952974 (Datanode Uuid b8af1ed9-b83f-4048-b76f-83a997761cb7) service to localhost/127.0.0.1:39513 2024-11-20T13:56:19,788 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-11-20T13:56:19,789 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data3/current/BP-806677084-172.17.0.2-1732110952974 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-20T13:56:19,789 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data4/current/BP-806677084-172.17.0.2-1732110952974 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-20T13:56:19,789 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-11-20T13:56:19,791 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@6cb5542{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-20T13:56:19,792 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@5ab539e8{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-20T13:56:19,792 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-20T13:56:19,792 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@6071838{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-20T13:56:19,792 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@4b6a27ec{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir/,STOPPED} 2024-11-20T13:56:19,795 WARN [BP-806677084-172.17.0.2-1732110952974 heartbeating to localhost/127.0.0.1:39513 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-11-20T13:56:19,795 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-11-20T13:56:19,795 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-11-20T13:56:19,795 WARN [BP-806677084-172.17.0.2-1732110952974 heartbeating to localhost/127.0.0.1:39513 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-806677084-172.17.0.2-1732110952974 (Datanode Uuid 6829e71d-af75-4b26-9068-535bad204d0c) service to localhost/127.0.0.1:39513 2024-11-20T13:56:19,796 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data1/current/BP-806677084-172.17.0.2-1732110952974 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-20T13:56:19,796 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/cluster_0eeb59b7-ea0d-32f2-2ca0-73cdfa74e5c6/data/data2/current/BP-806677084-172.17.0.2-1732110952974 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-20T13:56:19,796 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-11-20T13:56:19,806 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@637d0fb3{hdfs,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-11-20T13:56:19,807 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@29b30f4f{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-20T13:56:19,807 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-20T13:56:19,807 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@79c59de9{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-20T13:56:19,807 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@3cb77234{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/e6469ea7-de36-dbda-f2d8-21db421ee54b/hadoop.log.dir/,STOPPED} 2024-11-20T13:56:19,839 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1026): Minicluster is down