2024-11-26 20:51:18,894 main DEBUG Apache Log4j Core 2.17.2 initializing configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-11-26 20:51:18,905 main DEBUG Took 0.009289 seconds to load 1 plugins from package org.apache.hadoop.hbase.logging 2024-11-26 20:51:18,905 main DEBUG PluginManager 'Core' found 129 plugins 2024-11-26 20:51:18,906 main DEBUG PluginManager 'Level' found 0 plugins 2024-11-26 20:51:18,906 main DEBUG PluginManager 'Lookup' found 16 plugins 2024-11-26 20:51:18,908 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,915 main DEBUG PluginManager 'TypeConverter' found 26 plugins 2024-11-26 20:51:18,926 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.util.MBeans", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,927 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,928 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.logging.TestJul2Slf4j", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,928 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,928 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.zookeeper", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,929 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,930 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,930 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,930 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,931 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,931 main DEBUG LoggerConfig$Builder(additivity="false", level="WARN", levelAndRefs="null", name="org.apache.directory", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,932 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,932 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.ipc.FailedServers", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,932 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,933 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsConfig", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,933 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,933 main DEBUG LoggerConfig$Builder(additivity="null", level="INFO", levelAndRefs="null", name="org.apache.hadoop.hbase.ScheduledChore", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,934 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,934 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.regionserver.RSRpcServices", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,934 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,935 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,935 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,935 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,936 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-26 20:51:18,936 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hbase.thirdparty.io.netty.channel", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,936 main DEBUG Building Plugin[name=root, class=org.apache.logging.log4j.core.config.LoggerConfig$RootLogger]. 2024-11-26 20:51:18,938 main DEBUG LoggerConfig$RootLogger$Builder(additivity="null", level="null", levelAndRefs="INFO,Console", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-26 20:51:18,939 main DEBUG Building Plugin[name=loggers, class=org.apache.logging.log4j.core.config.LoggersPlugin]. 2024-11-26 20:51:18,940 main DEBUG createLoggers(={org.apache.hadoop.metrics2.util.MBeans, org.apache.hadoop.hbase.logging.TestJul2Slf4j, org.apache.zookeeper, org.apache.hadoop.metrics2.impl.MetricsSinkAdapter, org.apache.hadoop.metrics2.impl.MetricsSystemImpl, org.apache.directory, org.apache.hadoop.hbase.ipc.FailedServers, org.apache.hadoop.metrics2.impl.MetricsConfig, org.apache.hadoop.hbase.ScheduledChore, org.apache.hadoop.hbase.regionserver.RSRpcServices, org.apache.hadoop, org.apache.hadoop.hbase, org.apache.hbase.thirdparty.io.netty.channel, root}) 2024-11-26 20:51:18,941 main DEBUG Building Plugin[name=layout, class=org.apache.logging.log4j.core.layout.PatternLayout]. 2024-11-26 20:51:18,942 main DEBUG PatternLayout$Builder(pattern="%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n", PatternSelector=null, Configuration(PropertiesConfig), Replace=null, charset="null", alwaysWriteExceptions="null", disableAnsi="null", noConsoleNoAnsi="null", header="null", footer="null") 2024-11-26 20:51:18,942 main DEBUG PluginManager 'Converter' found 47 plugins 2024-11-26 20:51:18,950 main DEBUG Building Plugin[name=appender, class=org.apache.hadoop.hbase.logging.HBaseTestAppender]. 2024-11-26 20:51:18,952 main DEBUG HBaseTestAppender$Builder(target="SYSTEM_ERR", maxSize="1G", bufferedIo="null", bufferSize="null", immediateFlush="null", ignoreExceptions="null", PatternLayout(%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n), name="Console", Configuration(PropertiesConfig), Filter=null, ={}) 2024-11-26 20:51:18,953 main DEBUG Starting HBaseTestOutputStreamManager SYSTEM_ERR 2024-11-26 20:51:18,953 main DEBUG Building Plugin[name=appenders, class=org.apache.logging.log4j.core.config.AppendersPlugin]. 2024-11-26 20:51:18,954 main DEBUG createAppenders(={Console}) 2024-11-26 20:51:18,954 main DEBUG Configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f initialized 2024-11-26 20:51:18,955 main DEBUG Starting configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f 2024-11-26 20:51:18,955 main DEBUG Started configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@210ab13f OK. 2024-11-26 20:51:18,955 main DEBUG Shutting down OutputStreamManager SYSTEM_OUT.false.false-1 2024-11-26 20:51:18,956 main DEBUG OutputStream closed 2024-11-26 20:51:18,956 main DEBUG Shut down OutputStreamManager SYSTEM_OUT.false.false-1, all resources released: true 2024-11-26 20:51:18,956 main DEBUG Appender DefaultConsole-1 stopped with status true 2024-11-26 20:51:18,956 main DEBUG Stopped org.apache.logging.log4j.core.config.DefaultConfiguration@61001b64 OK 2024-11-26 20:51:19,021 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6 2024-11-26 20:51:19,023 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=StatusLogger 2024-11-26 20:51:19,024 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=ContextSelector 2024-11-26 20:51:19,025 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name= 2024-11-26 20:51:19,025 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.directory 2024-11-26 20:51:19,026 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSinkAdapter 2024-11-26 20:51:19,026 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.zookeeper 2024-11-26 20:51:19,026 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.logging.TestJul2Slf4j 2024-11-26 20:51:19,026 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSystemImpl 2024-11-26 20:51:19,027 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.util.MBeans 2024-11-26 20:51:19,027 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase 2024-11-26 20:51:19,027 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop 2024-11-26 20:51:19,028 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ipc.FailedServers 2024-11-26 20:51:19,028 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.regionserver.RSRpcServices 2024-11-26 20:51:19,028 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsConfig 2024-11-26 20:51:19,029 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hbase.thirdparty.io.netty.channel 2024-11-26 20:51:19,029 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ScheduledChore 2024-11-26 20:51:19,030 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Appenders,name=Console 2024-11-26 20:51:19,032 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-11-26 20:51:19,033 main DEBUG Reconfiguration complete for context[name=1dbd16a6] at URI jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-logging/target/hbase-logging-4.0.0-alpha-1-SNAPSHOT-tests.jar!/log4j2.properties (org.apache.logging.log4j.core.LoggerContext@40db2a24) with optional ClassLoader: null 2024-11-26 20:51:19,033 main DEBUG Shutdown hook enabled. Registering a new one. 2024-11-26 20:51:19,033 main DEBUG LoggerContext[name=1dbd16a6, org.apache.logging.log4j.core.LoggerContext@40db2a24] started OK. 2024-11-26T20:51:19,237 DEBUG [main {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73 2024-11-26 20:51:19,240 main DEBUG AsyncLogger.ThreadNameStrategy=UNCACHED (user specified null, default is UNCACHED) 2024-11-26 20:51:19,240 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-11-26T20:51:19,250 INFO [main {}] hbase.HBaseClassTestRule(94): Test class org.apache.hadoop.hbase.regionserver.wal.TestFSHLog timeout: 13 mins 2024-11-26T20:51:19,272 INFO [Time-limited test {}] hbase.HBaseZKTestingUtil(84): Created new mini-cluster data directory: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2, deleteOnExit=true 2024-11-26T20:51:19,273 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting test.cache.data to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/test.cache.data in system properties and HBase conf 2024-11-26T20:51:19,274 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.tmp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.tmp.dir in system properties and HBase conf 2024-11-26T20:51:19,274 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting hadoop.log.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir in system properties and HBase conf 2024-11-26T20:51:19,275 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.local.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/mapreduce.cluster.local.dir in system properties and HBase conf 2024-11-26T20:51:19,276 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting mapreduce.cluster.temp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/mapreduce.cluster.temp.dir in system properties and HBase conf 2024-11-26T20:51:19,276 INFO [Time-limited test {}] hbase.HBaseTestingUtil(738): read short circuit is OFF 2024-11-26T20:51:19,364 WARN [Time-limited test {}] util.NativeCodeLoader(60): Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2024-11-26T20:51:19,454 DEBUG [Time-limited test {}] fs.HFileSystem(310): The file system is not a DistributedFileSystem. Skipping on block location reordering 2024-11-26T20:51:19,457 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-labels.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/yarn.node-labels.fs-store.root-dir in system properties and HBase conf 2024-11-26T20:51:19,458 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.node-attribute.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/yarn.node-attribute.fs-store.root-dir in system properties and HBase conf 2024-11-26T20:51:19,458 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.log-dirs to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/yarn.nodemanager.log-dirs in system properties and HBase conf 2024-11-26T20:51:19,459 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-11-26T20:51:19,459 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.active-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/yarn.timeline-service.entity-group-fs-store.active-dir in system properties and HBase conf 2024-11-26T20:51:19,460 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.timeline-service.entity-group-fs-store.done-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/yarn.timeline-service.entity-group-fs-store.done-dir in system properties and HBase conf 2024-11-26T20:51:19,460 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-11-26T20:51:19,460 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/dfs.journalnode.edits.dir in system properties and HBase conf 2024-11-26T20:51:19,461 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.datanode.shared.file.descriptor.paths to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/dfs.datanode.shared.file.descriptor.paths in system properties and HBase conf 2024-11-26T20:51:19,461 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting nfs.dump.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/nfs.dump.dir in system properties and HBase conf 2024-11-26T20:51:19,462 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting java.io.tmpdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/java.io.tmpdir in system properties and HBase conf 2024-11-26T20:51:19,462 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/dfs.journalnode.edits.dir in system properties and HBase conf 2024-11-26T20:51:19,462 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting dfs.provided.aliasmap.inmemory.leveldb.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/dfs.provided.aliasmap.inmemory.leveldb.dir in system properties and HBase conf 2024-11-26T20:51:19,463 INFO [Time-limited test {}] hbase.HBaseTestingUtil(751): Setting fs.s3a.committer.staging.tmp.path to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/fs.s3a.committer.staging.tmp.path in system properties and HBase conf 2024-11-26T20:51:19,938 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-11-26T20:51:20,294 WARN [Time-limited test {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 2024-11-26T20:51:20,376 INFO [Time-limited test {}] log.Log(170): Logging initialized @2152ms to org.eclipse.jetty.util.log.Slf4jLog 2024-11-26T20:51:20,460 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-26T20:51:20,523 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-26T20:51:20,547 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-26T20:51:20,548 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-26T20:51:20,549 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-11-26T20:51:20,565 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-26T20:51:20,568 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@746f7db{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir/,AVAILABLE} 2024-11-26T20:51:20,569 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@61b73bb3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-26T20:51:20,778 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@5599def{hdfs,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/java.io.tmpdir/jetty-localhost-41387-hadoop-hdfs-3_4_1-tests_jar-_-any-10676976988889125905/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-11-26T20:51:20,785 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@493ba8a1{HTTP/1.1, (http/1.1)}{localhost:41387} 2024-11-26T20:51:20,785 INFO [Time-limited test {}] server.Server(415): Started @2563ms 2024-11-26T20:51:20,809 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-11-26T20:51:21,179 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-26T20:51:21,186 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-26T20:51:21,187 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-26T20:51:21,187 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-26T20:51:21,188 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-11-26T20:51:21,188 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@208945{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir/,AVAILABLE} 2024-11-26T20:51:21,189 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@43a917ce{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-26T20:51:21,308 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@411b19f7{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/java.io.tmpdir/jetty-localhost-39835-hadoop-hdfs-3_4_1-tests_jar-_-any-5432320712383633798/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-26T20:51:21,309 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@64bb503e{HTTP/1.1, (http/1.1)}{localhost:39835} 2024-11-26T20:51:21,309 INFO [Time-limited test {}] server.Server(415): Started @3087ms 2024-11-26T20:51:21,363 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-11-26T20:51:21,481 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-26T20:51:21,487 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-26T20:51:21,489 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-26T20:51:21,489 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-26T20:51:21,489 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-11-26T20:51:21,493 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@443ad5c2{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir/,AVAILABLE} 2024-11-26T20:51:21,493 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@243038a3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-26T20:51:21,623 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@28637041{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/java.io.tmpdir/jetty-localhost-41237-hadoop-hdfs-3_4_1-tests_jar-_-any-6924628432683759999/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-26T20:51:21,624 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@5e18b7fd{HTTP/1.1, (http/1.1)}{localhost:41237} 2024-11-26T20:51:21,624 INFO [Time-limited test {}] server.Server(415): Started @3402ms 2024-11-26T20:51:21,627 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-11-26T20:51:21,673 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-26T20:51:21,678 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-26T20:51:21,681 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-26T20:51:21,681 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-26T20:51:21,681 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-11-26T20:51:21,682 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@4c49fcd3{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir/,AVAILABLE} 2024-11-26T20:51:21,682 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@47ddd06a{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-26T20:51:21,796 WARN [Thread-105 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data3/current/BP-2040341567-172.17.0.2-1732654280033/current, will proceed with Du for space computation calculation, 2024-11-26T20:51:21,796 WARN [Thread-107 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data1/current/BP-2040341567-172.17.0.2-1732654280033/current, will proceed with Du for space computation calculation, 2024-11-26T20:51:21,796 WARN [Thread-106 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data4/current/BP-2040341567-172.17.0.2-1732654280033/current, will proceed with Du for space computation calculation, 2024-11-26T20:51:21,796 WARN [Thread-108 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data2/current/BP-2040341567-172.17.0.2-1732654280033/current, will proceed with Du for space computation calculation, 2024-11-26T20:51:21,838 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@4b2b884e{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/java.io.tmpdir/jetty-localhost-40341-hadoop-hdfs-3_4_1-tests_jar-_-any-8925805586466154941/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-26T20:51:21,839 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@fa4aa4c{HTTP/1.1, (http/1.1)}{localhost:40341} 2024-11-26T20:51:21,839 INFO [Time-limited test {}] server.Server(415): Started @3617ms 2024-11-26T20:51:21,841 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-11-26T20:51:21,842 WARN [Thread-81 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-11-26T20:51:21,843 WARN [Thread-58 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-11-26T20:51:21,923 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x770d97bead839618 with lease ID 0x6298083fdd6ced83: Processing first storage report for DS-2a500c98-adc1-4d80-a12e-b74467ed64f4 from datanode DatanodeRegistration(127.0.0.1:42739, datanodeUuid=cc3eb308-f8d7-48da-b191-5e8a836cb6c7, infoPort=35583, infoSecurePort=0, ipcPort=37101, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033) 2024-11-26T20:51:21,924 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x770d97bead839618 with lease ID 0x6298083fdd6ced83: from storage DS-2a500c98-adc1-4d80-a12e-b74467ed64f4 node DatanodeRegistration(127.0.0.1:42739, datanodeUuid=cc3eb308-f8d7-48da-b191-5e8a836cb6c7, infoPort=35583, infoSecurePort=0, ipcPort=37101, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-11-26T20:51:21,925 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x9c73498cfffa2a6 with lease ID 0x6298083fdd6ced82: Processing first storage report for DS-cf310024-d2b0-45d4-a2bc-d0ccbd465339 from datanode DatanodeRegistration(127.0.0.1:46741, datanodeUuid=ded40a5e-296c-4218-99d4-3df239036265, infoPort=37399, infoSecurePort=0, ipcPort=35413, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033) 2024-11-26T20:51:21,925 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x9c73498cfffa2a6 with lease ID 0x6298083fdd6ced82: from storage DS-cf310024-d2b0-45d4-a2bc-d0ccbd465339 node DatanodeRegistration(127.0.0.1:46741, datanodeUuid=ded40a5e-296c-4218-99d4-3df239036265, infoPort=37399, infoSecurePort=0, ipcPort=35413, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-11-26T20:51:21,925 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x770d97bead839618 with lease ID 0x6298083fdd6ced83: Processing first storage report for DS-0af7b0a2-a3dd-44a3-bb9d-846a177ba961 from datanode DatanodeRegistration(127.0.0.1:42739, datanodeUuid=cc3eb308-f8d7-48da-b191-5e8a836cb6c7, infoPort=35583, infoSecurePort=0, ipcPort=37101, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033) 2024-11-26T20:51:21,925 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x770d97bead839618 with lease ID 0x6298083fdd6ced83: from storage DS-0af7b0a2-a3dd-44a3-bb9d-846a177ba961 node DatanodeRegistration(127.0.0.1:42739, datanodeUuid=cc3eb308-f8d7-48da-b191-5e8a836cb6c7, infoPort=35583, infoSecurePort=0, ipcPort=37101, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-11-26T20:51:21,926 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x9c73498cfffa2a6 with lease ID 0x6298083fdd6ced82: Processing first storage report for DS-03d94878-1c68-4928-8e61-60a72d9cef97 from datanode DatanodeRegistration(127.0.0.1:46741, datanodeUuid=ded40a5e-296c-4218-99d4-3df239036265, infoPort=37399, infoSecurePort=0, ipcPort=35413, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033) 2024-11-26T20:51:21,926 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x9c73498cfffa2a6 with lease ID 0x6298083fdd6ced82: from storage DS-03d94878-1c68-4928-8e61-60a72d9cef97 node DatanodeRegistration(127.0.0.1:46741, datanodeUuid=ded40a5e-296c-4218-99d4-3df239036265, infoPort=37399, infoSecurePort=0, ipcPort=35413, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-11-26T20:51:21,968 WARN [Thread-139 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data5/current/BP-2040341567-172.17.0.2-1732654280033/current, will proceed with Du for space computation calculation, 2024-11-26T20:51:21,968 WARN [Thread-140 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data6/current/BP-2040341567-172.17.0.2-1732654280033/current, will proceed with Du for space computation calculation, 2024-11-26T20:51:22,001 WARN [Thread-129 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-11-26T20:51:22,006 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xdab9befead18d992 with lease ID 0x6298083fdd6ced84: Processing first storage report for DS-33209b92-4879-4099-9df4-a8673cfc3827 from datanode DatanodeRegistration(127.0.0.1:35013, datanodeUuid=30466a83-cf79-46c2-a64a-fea7fe737379, infoPort=43577, infoSecurePort=0, ipcPort=33427, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033) 2024-11-26T20:51:22,006 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xdab9befead18d992 with lease ID 0x6298083fdd6ced84: from storage DS-33209b92-4879-4099-9df4-a8673cfc3827 node DatanodeRegistration(127.0.0.1:35013, datanodeUuid=30466a83-cf79-46c2-a64a-fea7fe737379, infoPort=43577, infoSecurePort=0, ipcPort=33427, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0 2024-11-26T20:51:22,007 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xdab9befead18d992 with lease ID 0x6298083fdd6ced84: Processing first storage report for DS-3e715cf8-e999-4c03-b30b-8bfb0930be69 from datanode DatanodeRegistration(127.0.0.1:35013, datanodeUuid=30466a83-cf79-46c2-a64a-fea7fe737379, infoPort=43577, infoSecurePort=0, ipcPort=33427, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033) 2024-11-26T20:51:22,007 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xdab9befead18d992 with lease ID 0x6298083fdd6ced84: from storage DS-3e715cf8-e999-4c03-b30b-8bfb0930be69 node DatanodeRegistration(127.0.0.1:35013, datanodeUuid=30466a83-cf79-46c2-a64a-fea7fe737379, infoPort=43577, infoSecurePort=0, ipcPort=33427, storageInfo=lv=-57;cid=testClusterID;nsid=1693661475;c=1732654280033), blocks: 0, hasStaleStorage: false, processing time: 1 msecs, invalidatedBlocks: 0 2024-11-26T20:51:22,193 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(631): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73 2024-11-26T20:51:22,208 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=160, OpenFileDescriptor=392, MaxFileDescriptor=1048576, SystemLoadAverage=176, ProcessCount=11, AvailableMemoryMB=6551 2024-11-26T20:51:22,231 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:22,235 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:22,471 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741825_1001 (size=7) 2024-11-26T20:51:22,472 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741825_1001 (size=7) 2024-11-26T20:51:22,473 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741825_1001 (size=7) 2024-11-26T20:51:22,879 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:22,879 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:22,882 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:22,894 DEBUG [Time-limited test {}] util.ClassSize(228): Using Unsafe to estimate memory layout 2024-11-26T20:51:22,917 INFO [Time-limited test {}] metrics.MetricRegistriesLoader(60): Loaded MetricRegistries class org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl 2024-11-26T20:51:22,918 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:22,927 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/testSyncRunnerIndexOverflow, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/oldWALs, maxLogs=1760 2024-11-26T20:51:22,973 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654282964 2024-11-26T20:51:23,036 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/testSyncRunnerIndexOverflow/wal.1732654282964 2024-11-26T20:51:23,084 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:43577:43577)] 2024-11-26T20:51:23,141 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:23,141 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:23,142 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:23,142 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:23,142 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:23,147 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741826_1002 (size=1293) 2024-11-26T20:51:23,147 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741826_1002 (size=1293) 2024-11-26T20:51:23,148 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741826_1002 (size=1293) 2024-11-26T20:51:23,157 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/oldWALs 2024-11-26T20:51:23,160 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732654282964) 2024-11-26T20:51:23,169 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=167 (was 160) Potentially hanging thread: sync.4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: sync.1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) Potentially hanging thread: weak-ref-cleaner-strictcontextstorage java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:155) java.base@17.0.11/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:176) app//io.opentelemetry.context.StrictContextStorage$PendingScopes.run(StrictContextStorage.java:269) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: LeaseRenewer:jenkins@localhost:34261 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:441) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$800(LeaseRenewer.java:77) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:336) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.0 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.takeSyncRequest(FSHLog.java:426) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:441) - Thread LEAK? -, OpenFileDescriptor=403 (was 392) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=176 (was 176), ProcessCount=11 (was 11), AvailableMemoryMB=6498 (was 6551) 2024-11-26T20:51:23,176 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=167, OpenFileDescriptor=403, MaxFileDescriptor=1048576, SystemLoadAverage=176, ProcessCount=11, AvailableMemoryMB=6497 2024-11-26T20:51:23,192 WARN [IPC Server handler 4 on default port 34261 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-11-26T20:51:23,192 WARN [IPC Server handler 4 on default port 34261 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-11-26T20:51:23,193 WARN [IPC Server handler 4 on default port 34261 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-11-26T20:51:23,199 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741827_1003 (size=7) 2024-11-26T20:51:23,200 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741827_1003 (size=7) 2024-11-26T20:51:23,202 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:23,202 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:23,205 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:23,214 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:23,215 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/testUnflushedSeqIdTracking, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/oldWALs, maxLogs=1760 2024-11-26T20:51:23,218 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654283217 2024-11-26T20:51:23,222 WARN [IPC Server handler 3 on default port 34261 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-11-26T20:51:23,222 WARN [IPC Server handler 3 on default port 34261 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-11-26T20:51:23,222 WARN [IPC Server handler 3 on default port 34261 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-11-26T20:51:23,228 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/testUnflushedSeqIdTracking/wal.1732654283217 2024-11-26T20:51:23,233 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:37399:37399)] 2024-11-26T20:51:23,235 INFO [Time-limited test {}] regionserver.ChunkCreator(472): data poolSizePercentage is less than 0. So not using pool 2024-11-26T20:51:23,236 INFO [Time-limited test {}] regionserver.ChunkCreator(472): index poolSizePercentage is less than 0. So not using pool 2024-11-26T20:51:23,259 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 02f2021c62e761faf3fea07254caedf1, NAME => 'testUnflushedSeqIdTracking,,1732654283234.02f2021c62e761faf3fea07254caedf1.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testUnflushedSeqIdTracking', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73 2024-11-26T20:51:23,272 WARN [IPC Server handler 3 on default port 34261 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-11-26T20:51:23,272 WARN [IPC Server handler 3 on default port 34261 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-11-26T20:51:23,272 WARN [IPC Server handler 3 on default port 34261 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-11-26T20:51:23,279 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741829_1005 (size=61) 2024-11-26T20:51:23,280 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741829_1005 (size=61) 2024-11-26T20:51:23,284 INFO [Time-limited test {}] throttle.StoreHotnessProtector(112): StoreHotnessProtector is disabled. Set hbase.region.store.parallel.put.limit > 0 to enable, which may help mitigate load under heavy write pressure. 2024-11-26T20:51:23,289 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testUnflushedSeqIdTracking,,1732654283234.02f2021c62e761faf3fea07254caedf1.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-26T20:51:23,345 INFO [StoreOpener-02f2021c62e761faf3fea07254caedf1-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 02f2021c62e761faf3fea07254caedf1 2024-11-26T20:51:23,376 INFO [StoreOpener-02f2021c62e761faf3fea07254caedf1-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 02f2021c62e761faf3fea07254caedf1 columnFamilyName b 2024-11-26T20:51:23,382 DEBUG [StoreOpener-02f2021c62e761faf3fea07254caedf1-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-26T20:51:23,386 INFO [StoreOpener-02f2021c62e761faf3fea07254caedf1-1 {}] regionserver.HStore(327): Store=02f2021c62e761faf3fea07254caedf1/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-26T20:51:23,389 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 02f2021c62e761faf3fea07254caedf1 2024-11-26T20:51:23,394 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testUnflushedSeqIdTracking/02f2021c62e761faf3fea07254caedf1 2024-11-26T20:51:23,395 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testUnflushedSeqIdTracking/02f2021c62e761faf3fea07254caedf1 2024-11-26T20:51:23,396 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/8fa88c3b-0d61-6658-6d95-938072dc1e51/data/default/testUnflushedSeqIdTracking/02f2021c62e761faf3fea07254caedf1 2024-11-26T20:51:23,399 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 02f2021c62e761faf3fea07254caedf1 2024-11-26T20:51:23,399 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 02f2021c62e761faf3fea07254caedf1 2024-11-26T20:51:23,406 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 02f2021c62e761faf3fea07254caedf1 2024-11-26T20:51:23,411 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:34261/user/jenkins/test-data/8fa88c3b-0d61-6658-6d95-938072dc1e51/data/default/testUnflushedSeqIdTracking/02f2021c62e761faf3fea07254caedf1/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-26T20:51:23,412 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 02f2021c62e761faf3fea07254caedf1; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=74833845, jitterRate=0.11511118710041046}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-26T20:51:23,425 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 02f2021c62e761faf3fea07254caedf1: Writing region info on filesystem at 1732654283319Initializing all the Stores at 1732654283321 (+2 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732654283322 (+1 ms)Cleaning up temporary data from old regions at 1732654283399 (+77 ms)Region opened successfully at 1732654283423 (+24 ms) 2024-11-26T20:51:24,899 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741829_1005 (size=61) 2024-11-26T20:51:24,900 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741827_1003 (size=7) 2024-11-26T20:51:26,448 INFO [pool-60-thread-2 {}] regionserver.HRegion(2902): Flushing 02f2021c62e761faf3fea07254caedf1 1/1 column families, dataSize=24 B heapSize=352 B 2024-11-26T20:51:29,551 DEBUG [pool-60-thread-2 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/testUnflushedSeqIdTracking/02f2021c62e761faf3fea07254caedf1/.tmp/b/27d1a9fbaf734d2993c4a41038241f79 is 28, key is b/b:b/1732654283443/Put/seqid=0 2024-11-26T20:51:29,567 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741830_1006 (size=4945) 2024-11-26T20:51:29,568 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741830_1006 (size=4945) 2024-11-26T20:51:29,569 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741830_1006 (size=4945) 2024-11-26T20:51:29,569 INFO [pool-60-thread-2 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=4 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/testUnflushedSeqIdTracking/02f2021c62e761faf3fea07254caedf1/.tmp/b/27d1a9fbaf734d2993c4a41038241f79 2024-11-26T20:51:29,641 DEBUG [pool-60-thread-2 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/testUnflushedSeqIdTracking/02f2021c62e761faf3fea07254caedf1/.tmp/b/27d1a9fbaf734d2993c4a41038241f79 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/testUnflushedSeqIdTracking/02f2021c62e761faf3fea07254caedf1/b/27d1a9fbaf734d2993c4a41038241f79 2024-11-26T20:51:29,653 INFO [pool-60-thread-2 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/testUnflushedSeqIdTracking/02f2021c62e761faf3fea07254caedf1/b/27d1a9fbaf734d2993c4a41038241f79, entries=1, sequenceid=4, filesize=4.8 K 2024-11-26T20:51:29,663 INFO [pool-60-thread-2 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 02f2021c62e761faf3fea07254caedf1 in 3213ms, sequenceid=4, compaction requested=false 2024-11-26T20:51:29,664 DEBUG [pool-60-thread-2 {}] regionserver.HRegion(2603): Flush status journal for 02f2021c62e761faf3fea07254caedf1: 2024-11-26T20:51:29,664 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(193): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-11-26T20:51:29,664 INFO [pool-60-thread-2 {}] wal.TestFSHLog$4(194): Flush succeeded:true 2024-11-26T20:51:29,665 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 02f2021c62e761faf3fea07254caedf1, disabling compactions & flushes 2024-11-26T20:51:29,665 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testUnflushedSeqIdTracking,,1732654283234.02f2021c62e761faf3fea07254caedf1. 2024-11-26T20:51:29,666 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testUnflushedSeqIdTracking,,1732654283234.02f2021c62e761faf3fea07254caedf1. 2024-11-26T20:51:29,666 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testUnflushedSeqIdTracking,,1732654283234.02f2021c62e761faf3fea07254caedf1. after waiting 0 ms 2024-11-26T20:51:29,666 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testUnflushedSeqIdTracking,,1732654283234.02f2021c62e761faf3fea07254caedf1. 2024-11-26T20:51:29,668 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testUnflushedSeqIdTracking,,1732654283234.02f2021c62e761faf3fea07254caedf1. 2024-11-26T20:51:29,668 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 02f2021c62e761faf3fea07254caedf1: Waiting for close lock at 1732654289665Disabling compacts and flushes for region at 1732654289665Disabling writes for close at 1732654289666 (+1 ms)Writing region close event to WAL at 1732654289668 (+2 ms)Closed at 1732654289668 2024-11-26T20:51:29,669 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,669 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,669 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,670 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,670 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,673 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741828_1004 (size=875) 2024-11-26T20:51:29,673 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741828_1004 (size=875) 2024-11-26T20:51:29,677 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/oldWALs 2024-11-26T20:51:29,677 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732654283217) 2024-11-26T20:51:29,689 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=184 (was 167) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:58954 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: org.apache.hadoop.hdfs.PeerCache@6ce8e82c java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.PeerCache.run(PeerCache.java:253) app//org.apache.hadoop.hdfs.PeerCache.access$000(PeerCache.java:46) app//org.apache.hadoop.hdfs.PeerCache$1.run(PeerCache.java:124) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: HBase-Metrics2-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Monitor thread for TaskMonitor java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hbase.monitoring.TaskMonitor$MonitorRunnable.run(TaskMonitor.java:325) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-60-thread-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: java.util.concurrent.ThreadPoolExecutor$Worker@5fee1e1a[State = -1, empty queue] java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:401) java.base@17.0.11/java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:903) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: java.util.concurrent.ThreadPoolExecutor$Worker@5e183340[State = -1, empty queue] java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:401) java.base@17.0.11/java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:903) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=411 (was 403) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=162 (was 176), ProcessCount=11 (was 11), AvailableMemoryMB=6471 (was 6497) 2024-11-26T20:51:29,696 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALComparator Thread=184, OpenFileDescriptor=411, MaxFileDescriptor=1048576, SystemLoadAverage=162, ProcessCount=11, AvailableMemoryMB=6470 2024-11-26T20:51:29,711 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741831_1007 (size=7) 2024-11-26T20:51:29,711 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741831_1007 (size=7) 2024-11-26T20:51:29,712 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741831_1007 (size=7) 2024-11-26T20:51:29,714 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:29,714 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:29,716 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:29,723 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:29,723 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/394c7e9d-d076-2713-b993-d08853058c27/testWALComparator, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/394c7e9d-d076-2713-b993-d08853058c27/oldWALs, maxLogs=1760 2024-11-26T20:51:29,725 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654289725 2024-11-26T20:51:29,735 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/394c7e9d-d076-2713-b993-d08853058c27/testWALComparator/wal.1732654289725 2024-11-26T20:51:29,737 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583)] 2024-11-26T20:51:29,739 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(215): Log obtained is: FSHLog wal:(num 1732654289725) 2024-11-26T20:51:29,743 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:29,743 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=.meta, logDir=hdfs://localhost:34261/user/jenkins/test-data/394c7e9d-d076-2713-b993-d08853058c27/testWALComparator, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/394c7e9d-d076-2713-b993-d08853058c27/oldWALs, maxLogs=1760 2024-11-26T20:51:29,745 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654289745.meta 2024-11-26T20:51:29,758 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/394c7e9d-d076-2713-b993-d08853058c27/testWALComparator/wal.1732654289745.meta 2024-11-26T20:51:29,760 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:43577:43577)] 2024-11-26T20:51:29,766 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,766 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,766 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,766 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,766 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,771 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741832_1008 (size=93) 2024-11-26T20:51:29,772 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741832_1008 (size=93) 2024-11-26T20:51:29,772 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741832_1008 (size=93) 2024-11-26T20:51:29,777 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/394c7e9d-d076-2713-b993-d08853058c27/oldWALs 2024-11-26T20:51:29,777 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732654289725) 2024-11-26T20:51:29,778 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,778 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,778 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,778 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,778 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,781 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741833_1009 (size=93) 2024-11-26T20:51:29,782 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741833_1009 (size=93) 2024-11-26T20:51:29,782 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741833_1009 (size=93) 2024-11-26T20:51:29,786 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/394c7e9d-d076-2713-b993-d08853058c27/oldWALs 2024-11-26T20:51:29,786 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:.meta(num 1732654289745) 2024-11-26T20:51:29,794 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALComparator Thread=194 (was 184) - Thread LEAK? -, OpenFileDescriptor=419 (was 411) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=162 (was 162), ProcessCount=11 (was 11), AvailableMemoryMB=6457 (was 6470) 2024-11-26T20:51:29,801 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=194, OpenFileDescriptor=419, MaxFileDescriptor=1048576, SystemLoadAverage=162, ProcessCount=11, AvailableMemoryMB=6457 2024-11-26T20:51:29,814 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741834_1010 (size=7) 2024-11-26T20:51:29,814 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741834_1010 (size=7) 2024-11-26T20:51:29,815 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741834_1010 (size=7) 2024-11-26T20:51:29,818 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:29,818 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:29,820 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:29,823 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(383): testFindMemStoresEligibleForFlush 2024-11-26T20:51:29,845 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:29,845 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs, maxLogs=1 2024-11-26T20:51:29,846 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654289846 2024-11-26T20:51:29,856 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289846 2024-11-26T20:51:29,857 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:37399:37399)] 2024-11-26T20:51:29,860 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654289860 2024-11-26T20:51:29,869 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,869 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,870 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,870 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,870 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,871 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289846 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289860 2024-11-26T20:51:29,872 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:43577:43577)] 2024-11-26T20:51:29,873 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289846 is not closed yet, will try archiving it next time 2024-11-26T20:51:29,874 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741835_1011 (size=283) 2024-11-26T20:51:29,874 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741835_1011 (size=283) 2024-11-26T20:51:29,875 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741835_1011 (size=283) 2024-11-26T20:51:29,876 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654289876 2024-11-26T20:51:29,886 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,886 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,886 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,887 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,887 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,887 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289860 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289876 2024-11-26T20:51:29,888 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583)] 2024-11-26T20:51:29,888 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289860 is not closed yet, will try archiving it next time 2024-11-26T20:51:29,889 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 385dba3af7f0e643425be97aad9dc19a[cf1] 2024-11-26T20:51:29,890 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741836_1012 (size=283) 2024-11-26T20:51:29,890 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741836_1012 (size=283) 2024-11-26T20:51:29,891 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741836_1012 (size=283) 2024-11-26T20:51:29,891 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-26T20:51:29,891 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 385dba3af7f0e643425be97aad9dc19a[cf1] 2024-11-26T20:51:29,893 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 385dba3af7f0e643425be97aad9dc19a[cf1] 2024-11-26T20:51:29,894 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654289893 2024-11-26T20:51:29,902 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,902 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,902 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,902 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,903 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,903 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289876 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289893 2024-11-26T20:51:29,904 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:37399:37399)] 2024-11-26T20:51:29,904 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289876 is not closed yet, will try archiving it next time 2024-11-26T20:51:29,905 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289846 to hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs/wal.1732654289846 2024-11-26T20:51:29,905 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-26T20:51:29,905 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741837_1013 (size=283) 2024-11-26T20:51:29,905 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654289905 2024-11-26T20:51:29,906 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741837_1013 (size=283) 2024-11-26T20:51:29,906 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741837_1013 (size=283) 2024-11-26T20:51:29,908 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289860 to hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs/wal.1732654289860 2024-11-26T20:51:29,910 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289876 to hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs/wal.1732654289876 2024-11-26T20:51:29,917 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,917 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,917 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,918 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,918 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,918 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289893 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289905 2024-11-26T20:51:29,921 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741838_1014 (size=93) 2024-11-26T20:51:29,921 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741838_1014 (size=93) 2024-11-26T20:51:29,921 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741838_1014 (size=93) 2024-11-26T20:51:29,922 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289893 to hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs/wal.1732654289893 2024-11-26T20:51:29,923 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:37399:37399)] 2024-11-26T20:51:29,923 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-26T20:51:29,926 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654289926 2024-11-26T20:51:29,935 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,935 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,935 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,936 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,936 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,936 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289905 with entries=4, filesize=465 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289926 2024-11-26T20:51:29,937 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:35583:35583)] 2024-11-26T20:51:29,937 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289905 is not closed yet, will try archiving it next time 2024-11-26T20:51:29,937 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-26T20:51:29,938 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741839_1015 (size=473) 2024-11-26T20:51:29,939 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741839_1015 (size=473) 2024-11-26T20:51:29,939 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741839_1015 (size=473) 2024-11-26T20:51:29,940 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654289940 2024-11-26T20:51:29,948 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,948 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,948 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,948 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,949 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,949 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289926 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289940 2024-11-26T20:51:29,950 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:37399:37399)] 2024-11-26T20:51:29,950 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289926 is not closed yet, will try archiving it next time 2024-11-26T20:51:29,950 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 385dba3af7f0e643425be97aad9dc19a[cf1],de316422ecc1a82925e70bcf86eed466[cf1] 2024-11-26T20:51:29,950 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 385dba3af7f0e643425be97aad9dc19a[cf1],de316422ecc1a82925e70bcf86eed466[cf1] 2024-11-26T20:51:29,950 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654289950 2024-11-26T20:51:29,951 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741840_1016 (size=283) 2024-11-26T20:51:29,952 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741840_1016 (size=283) 2024-11-26T20:51:29,953 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289905 to hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs/wal.1732654289905 2024-11-26T20:51:29,953 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741840_1016 (size=283) 2024-11-26T20:51:29,955 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289926 to hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs/wal.1732654289926 2024-11-26T20:51:29,960 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,960 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,960 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,960 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,961 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:29,961 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289940 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289950 2024-11-26T20:51:29,962 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:43577:43577)] 2024-11-26T20:51:29,962 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289940 is not closed yet, will try archiving it next time 2024-11-26T20:51:29,962 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-26T20:51:29,963 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741841_1017 (size=93) 2024-11-26T20:51:29,964 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741841_1017 (size=93) 2024-11-26T20:51:29,964 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741841_1017 (size=93) 2024-11-26T20:51:29,964 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289940 to hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs/wal.1732654289940 2024-11-26T20:51:30,065 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654290065 2024-11-26T20:51:30,076 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,076 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,076 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,076 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,077 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,077 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289950 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290065 2024-11-26T20:51:30,078 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:43577:43577)] 2024-11-26T20:51:30,078 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289950 is not closed yet, will try archiving it next time 2024-11-26T20:51:30,078 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-26T20:51:30,078 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654290078 2024-11-26T20:51:30,080 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741842_1018 (size=283) 2024-11-26T20:51:30,080 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741842_1018 (size=283) 2024-11-26T20:51:30,081 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741842_1018 (size=283) 2024-11-26T20:51:30,082 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654289950 to hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs/wal.1732654289950 2024-11-26T20:51:30,088 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,088 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,088 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,089 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,089 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,089 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290065 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290078 2024-11-26T20:51:30,090 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583)] 2024-11-26T20:51:30,090 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290065 is not closed yet, will try archiving it next time 2024-11-26T20:51:30,091 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741843_1019 (size=93) 2024-11-26T20:51:30,092 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741843_1019 (size=93) 2024-11-26T20:51:30,092 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741843_1019 (size=93) 2024-11-26T20:51:30,093 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(968): Archiving hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290065 to hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs/wal.1732654290065 2024-11-26T20:51:30,095 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654290095 2024-11-26T20:51:30,106 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,106 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,106 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,107 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,107 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,107 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290078 with entries=6, filesize=709 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290095 2024-11-26T20:51:30,108 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583)] 2024-11-26T20:51:30,108 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290078 is not closed yet, will try archiving it next time 2024-11-26T20:51:30,110 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741844_1020 (size=717) 2024-11-26T20:51:30,110 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741844_1020 (size=717) 2024-11-26T20:51:30,111 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741844_1020 (size=717) 2024-11-26T20:51:30,111 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654290110 2024-11-26T20:51:30,120 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,120 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,120 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,121 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,121 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,121 INFO [Time-limited test {}] wal.AbstractFSWAL(987): Rolled WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290095 with entries=2, filesize=293 B; new WAL /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290110 2024-11-26T20:51:30,122 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:43577:43577)] 2024-11-26T20:51:30,122 DEBUG [Time-limited test {}] wal.AbstractFSWAL(879): hdfs://localhost:34261/user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/testFindMemStoresEligibleForFlush/wal.1732654290095 is not closed yet, will try archiving it next time 2024-11-26T20:51:30,122 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 12eee706141df11b1dcf27b504e9f049[cf1,cf3,cf2] 2024-11-26T20:51:30,122 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-26T20:51:30,123 INFO [Time-limited test {}] wal.AbstractFSWAL(843): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): 12eee706141df11b1dcf27b504e9f049[cf3,cf2] 2024-11-26T20:51:30,123 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,123 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,123 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741845_1021 (size=301) 2024-11-26T20:51:30,124 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,124 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,124 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741845_1021 (size=301) 2024-11-26T20:51:30,124 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,125 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741845_1021 (size=301) 2024-11-26T20:51:30,127 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741846_1022 (size=93) 2024-11-26T20:51:30,127 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741846_1022 (size=93) 2024-11-26T20:51:30,128 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741846_1022 (size=93) 2024-11-26T20:51:30,134 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 3 WAL file(s) to /user/jenkins/test-data/7114ee35-5fc4-d42e-1b68-f38333ff955e/oldWALs 2024-11-26T20:51:30,134 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732654290110) 2024-11-26T20:51:30,142 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=199 (was 194) - Thread LEAK? -, OpenFileDescriptor=419 (was 419), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=162 (was 162), ProcessCount=11 (was 11), AvailableMemoryMB=6447 (was 6457) 2024-11-26T20:51:30,150 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=199, OpenFileDescriptor=419, MaxFileDescriptor=1048576, SystemLoadAverage=162, ProcessCount=11, AvailableMemoryMB=6446 2024-11-26T20:51:30,165 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741847_1023 (size=7) 2024-11-26T20:51:30,166 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741847_1023 (size=7) 2024-11-26T20:51:30,166 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741847_1023 (size=7) 2024-11-26T20:51:30,167 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:30,168 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:30,170 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:30,176 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:30,176 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/bd8629b2-6669-a2cc-b3b8-ebdca6b71282/testRollWriterForClosedWAL, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/bd8629b2-6669-a2cc-b3b8-ebdca6b71282/testRollWriterForClosedWAL, maxLogs=1760 2024-11-26T20:51:30,177 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654290177 2024-11-26T20:51:30,186 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/bd8629b2-6669-a2cc-b3b8-ebdca6b71282/testRollWriterForClosedWAL/wal.1732654290177 2024-11-26T20:51:30,190 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:35583:35583)] 2024-11-26T20:51:30,191 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,191 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,191 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,191 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,191 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:30,195 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741848_1024 (size=93) 2024-11-26T20:51:30,195 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741848_1024 (size=93) 2024-11-26T20:51:30,195 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741848_1024 (size=93) 2024-11-26T20:51:30,198 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/bd8629b2-6669-a2cc-b3b8-ebdca6b71282/testRollWriterForClosedWAL 2024-11-26T20:51:30,198 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732654290177) 2024-11-26T20:51:30,207 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testRollWriterForClosedWAL Thread=204 (was 199) - Thread LEAK? -, OpenFileDescriptor=419 (was 419), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=162 (was 162), ProcessCount=11 (was 11), AvailableMemoryMB=6444 (was 6446) 2024-11-26T20:51:30,215 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=204, OpenFileDescriptor=419, MaxFileDescriptor=1048576, SystemLoadAverage=162, ProcessCount=11, AvailableMemoryMB=6444 2024-11-26T20:51:30,228 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741849_1025 (size=7) 2024-11-26T20:51:30,228 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741849_1025 (size=7) 2024-11-26T20:51:30,228 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741849_1025 (size=7) 2024-11-26T20:51:30,230 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:30,230 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:30,232 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:30,239 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:30,240 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/testMaxFlushedSequenceIdGoBackwards, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/oldWALs, maxLogs=1760 2024-11-26T20:51:30,241 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654290241 2024-11-26T20:51:30,249 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/testMaxFlushedSequenceIdGoBackwards/wal.1732654290241 2024-11-26T20:51:30,249 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583)] 2024-11-26T20:51:30,251 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 1d887387860742fdc66adc2db6b67941, NAME => 'table,,1732654290250.1d887387860742fdc66adc2db6b67941.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73 2024-11-26T20:51:30,261 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741851_1027 (size=40) 2024-11-26T20:51:30,261 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741851_1027 (size=40) 2024-11-26T20:51:30,262 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741851_1027 (size=40) 2024-11-26T20:51:30,262 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1732654290250.1d887387860742fdc66adc2db6b67941.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-26T20:51:30,265 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,267 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 1d887387860742fdc66adc2db6b67941 columnFamilyName a 2024-11-26T20:51:30,267 DEBUG [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-26T20:51:30,267 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] regionserver.HStore(327): Store=1d887387860742fdc66adc2db6b67941/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-26T20:51:30,268 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,270 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 1d887387860742fdc66adc2db6b67941 columnFamilyName b 2024-11-26T20:51:30,270 DEBUG [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-26T20:51:30,271 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] regionserver.HStore(327): Store=1d887387860742fdc66adc2db6b67941/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-26T20:51:30,271 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,273 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/table/1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,273 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/table/1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,274 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/b55e6673-d199-fe37-4167-07f47718a3d2/data/default/table/1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,275 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,275 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,277 DEBUG [Time-limited test {}] regionserver.FlushLargeStoresPolicy(65): No hbase.hregion.percolumnfamilyflush.size.lower.bound set in table table descriptor;using region.getMemStoreFlushHeapSize/# of families (64.0 M)) instead. 2024-11-26T20:51:30,279 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,282 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:34261/user/jenkins/test-data/b55e6673-d199-fe37-4167-07f47718a3d2/data/default/table/1d887387860742fdc66adc2db6b67941/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-26T20:51:30,283 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 1d887387860742fdc66adc2db6b67941; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=64187449, jitterRate=-0.04353247582912445}}}, FlushLargeStoresPolicy{flushSizeLowerBound=67108864} 2024-11-26T20:51:30,286 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 1d887387860742fdc66adc2db6b67941: Writing region info on filesystem at 1732654290263Initializing all the Stores at 1732654290264 (+1 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732654290264Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732654290264Cleaning up temporary data from old regions at 1732654290275 (+11 ms)Region opened successfully at 1732654290286 (+11 ms) 2024-11-26T20:51:30,286 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 1d887387860742fdc66adc2db6b67941, disabling compactions & flushes 2024-11-26T20:51:30,286 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1732654290250.1d887387860742fdc66adc2db6b67941. 2024-11-26T20:51:30,286 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1732654290250.1d887387860742fdc66adc2db6b67941. 2024-11-26T20:51:30,286 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1732654290250.1d887387860742fdc66adc2db6b67941. after waiting 0 ms 2024-11-26T20:51:30,286 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1732654290250.1d887387860742fdc66adc2db6b67941. 2024-11-26T20:51:30,287 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1732654290250.1d887387860742fdc66adc2db6b67941. 2024-11-26T20:51:30,287 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 1d887387860742fdc66adc2db6b67941: Waiting for close lock at 1732654290286Disabling compacts and flushes for region at 1732654290286Disabling writes for close at 1732654290286Writing region close event to WAL at 1732654290287 (+1 ms)Closed at 1732654290287 2024-11-26T20:51:30,723 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 1d887387860742fdc66adc2db6b67941, NAME => 'table,,1732654290250.1d887387860742fdc66adc2db6b67941.', STARTKEY => '', ENDKEY => ''} 2024-11-26T20:51:30,742 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,742 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1732654290250.1d887387860742fdc66adc2db6b67941.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-26T20:51:30,744 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,745 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,749 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,750 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 1d887387860742fdc66adc2db6b67941 columnFamilyName a 2024-11-26T20:51:30,750 DEBUG [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-26T20:51:30,751 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] regionserver.HStore(327): Store=1d887387860742fdc66adc2db6b67941/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-26T20:51:30,752 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,753 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 1d887387860742fdc66adc2db6b67941 columnFamilyName b 2024-11-26T20:51:30,753 DEBUG [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-26T20:51:30,754 INFO [StoreOpener-1d887387860742fdc66adc2db6b67941-1 {}] regionserver.HStore(327): Store=1d887387860742fdc66adc2db6b67941/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-26T20:51:30,754 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,755 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/table/1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,756 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/table/1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,758 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/b55e6673-d199-fe37-4167-07f47718a3d2/data/default/table/1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,759 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,759 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,762 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,763 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 1d887387860742fdc66adc2db6b67941; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=69505762, jitterRate=0.03571656346321106}}}, org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL$FlushSpecificStoresPolicy@4ab59e36 2024-11-26T20:51:30,763 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for 1d887387860742fdc66adc2db6b67941 2024-11-26T20:51:30,766 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 1d887387860742fdc66adc2db6b67941: Running coprocessor pre-open hook at 1732654290746Writing region info on filesystem at 1732654290746Initializing all the Stores at 1732654290748 (+2 ms)Instantiating store for column family {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732654290748Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732654290748Cleaning up temporary data from old regions at 1732654290759 (+11 ms)Running coprocessor post-open hooks at 1732654290763 (+4 ms)Region opened successfully at 1732654290766 (+3 ms) 2024-11-26T20:51:33,784 INFO [pool-78-thread-1 {}] regionserver.HRegion(2902): Flushing 1d887387860742fdc66adc2db6b67941 2/2 column families, dataSize=96 B heapSize=896 B 2024-11-26T20:51:36,253 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-11-26T20:51:36,809 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/a/078586ba422b461a8b26d9cd041b9e34 is 28, key is a/a:a/1732654290774/Put/seqid=0 2024-11-26T20:51:36,819 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741852_1028 (size=4945) 2024-11-26T20:51:36,819 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741852_1028 (size=4945) 2024-11-26T20:51:36,820 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741852_1028 (size=4945) 2024-11-26T20:51:36,820 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/a/078586ba422b461a8b26d9cd041b9e34 2024-11-26T20:51:36,850 DEBUG [pool-78-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/b/5c27ae0b35af48638e03f3b11b41a42f is 28, key is a/b:b/1732654290774/Put/seqid=0 2024-11-26T20:51:36,858 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741853_1029 (size=4945) 2024-11-26T20:51:36,859 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741853_1029 (size=4945) 2024-11-26T20:51:36,859 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741853_1029 (size=4945) 2024-11-26T20:51:36,860 INFO [pool-78-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/b/5c27ae0b35af48638e03f3b11b41a42f 2024-11-26T20:51:36,871 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/a/078586ba422b461a8b26d9cd041b9e34 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/a/078586ba422b461a8b26d9cd041b9e34 2024-11-26T20:51:36,879 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/a/078586ba422b461a8b26d9cd041b9e34, entries=1, sequenceid=6, filesize=4.8 K 2024-11-26T20:51:36,881 DEBUG [pool-78-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/b/5c27ae0b35af48638e03f3b11b41a42f as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/b/5c27ae0b35af48638e03f3b11b41a42f 2024-11-26T20:51:36,890 INFO [pool-78-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/b/5c27ae0b35af48638e03f3b11b41a42f, entries=1, sequenceid=6, filesize=4.8 K 2024-11-26T20:51:36,892 INFO [pool-78-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~96 B/96, heapSize ~864 B/864, currentSize=0 B/0 for 1d887387860742fdc66adc2db6b67941 in 3108ms, sequenceid=6, compaction requested=false 2024-11-26T20:51:36,892 DEBUG [pool-78-thread-1 {}] regionserver.HRegion(2603): Flush status journal for 1d887387860742fdc66adc2db6b67941: 2024-11-26T20:51:36,893 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(676): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-11-26T20:51:36,893 INFO [pool-78-thread-1 {}] wal.AbstractTestFSWAL(677): Flush succeeded:true 2024-11-26T20:51:36,898 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 1d887387860742fdc66adc2db6b67941 1/2 column families, dataSize=48 B heapSize=704 B; a={dataSize=24 B, heapSize=352 B, offHeapSize=0 B} 2024-11-26T20:51:36,904 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/a/44317646cb7f434d996a852182dbac12 is 28, key is a/a:a/1732654290774/Put/seqid=0 2024-11-26T20:51:36,912 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741854_1030 (size=4945) 2024-11-26T20:51:36,913 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741854_1030 (size=4945) 2024-11-26T20:51:36,913 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741854_1030 (size=4945) 2024-11-26T20:51:36,914 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=10 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/a/44317646cb7f434d996a852182dbac12 2024-11-26T20:51:36,924 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/a/44317646cb7f434d996a852182dbac12 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/a/44317646cb7f434d996a852182dbac12 2024-11-26T20:51:36,932 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/a/44317646cb7f434d996a852182dbac12, entries=1, sequenceid=10, filesize=4.8 K 2024-11-26T20:51:36,934 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=24 B/24 for 1d887387860742fdc66adc2db6b67941 in 36ms, sequenceid=10, compaction requested=false 2024-11-26T20:51:36,935 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for 1d887387860742fdc66adc2db6b67941: 2024-11-26T20:51:36,936 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 1d887387860742fdc66adc2db6b67941, disabling compactions & flushes 2024-11-26T20:51:36,936 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1732654290250.1d887387860742fdc66adc2db6b67941. 2024-11-26T20:51:36,936 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1732654290250.1d887387860742fdc66adc2db6b67941. 2024-11-26T20:51:36,936 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1732654290250.1d887387860742fdc66adc2db6b67941. after waiting 0 ms 2024-11-26T20:51:36,936 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1732654290250.1d887387860742fdc66adc2db6b67941. 2024-11-26T20:51:36,936 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 1d887387860742fdc66adc2db6b67941 2/2 column families, dataSize=24 B heapSize=608 B 2024-11-26T20:51:36,943 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/b/a37a30b2825e49918b2b1ed0dc5876a8 is 28, key is a/b:b/1732654290774/Put/seqid=0 2024-11-26T20:51:36,951 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741855_1031 (size=4945) 2024-11-26T20:51:36,952 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741855_1031 (size=4945) 2024-11-26T20:51:36,952 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741855_1031 (size=4945) 2024-11-26T20:51:36,953 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=13 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/b/a37a30b2825e49918b2b1ed0dc5876a8 2024-11-26T20:51:36,965 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/.tmp/b/a37a30b2825e49918b2b1ed0dc5876a8 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/b/a37a30b2825e49918b2b1ed0dc5876a8 2024-11-26T20:51:36,975 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/1d887387860742fdc66adc2db6b67941/b/a37a30b2825e49918b2b1ed0dc5876a8, entries=1, sequenceid=13, filesize=4.8 K 2024-11-26T20:51:36,977 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 1d887387860742fdc66adc2db6b67941 in 41ms, sequenceid=13, compaction requested=false 2024-11-26T20:51:36,986 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:34261/user/jenkins/test-data/b55e6673-d199-fe37-4167-07f47718a3d2/data/default/table/1d887387860742fdc66adc2db6b67941/recovered.edits/16.seqid, newMaxSeqId=16, maxSeqId=1 2024-11-26T20:51:36,987 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1732654290250.1d887387860742fdc66adc2db6b67941. 2024-11-26T20:51:36,987 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 1d887387860742fdc66adc2db6b67941: Waiting for close lock at 1732654296936Running coprocessor pre-close hooks at 1732654296936Disabling compacts and flushes for region at 1732654296936Disabling writes for close at 1732654296936Obtaining lock to block concurrent updates at 1732654296936Preparing flush snapshotting stores in 1d887387860742fdc66adc2db6b67941 at 1732654296936Finished memstore snapshotting table,,1732654290250.1d887387860742fdc66adc2db6b67941., syncing WAL and waiting on mvcc, flushsize=dataSize=24, getHeapSize=576, getOffHeapSize=0, getCellsCount=1 at 1732654296937 (+1 ms)Flushing stores of table,,1732654290250.1d887387860742fdc66adc2db6b67941. at 1732654296938 (+1 ms)Flushing 1d887387860742fdc66adc2db6b67941/b: creating writer at 1732654296938Flushing 1d887387860742fdc66adc2db6b67941/b: appending metadata at 1732654296942 (+4 ms)Flushing 1d887387860742fdc66adc2db6b67941/b: closing flushed file at 1732654296942Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@21deb1a6: reopening flushed file at 1732654296964 (+22 ms)Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for 1d887387860742fdc66adc2db6b67941 in 41ms, sequenceid=13, compaction requested=false at 1732654296977 (+13 ms)Writing region close event to WAL at 1732654296979 (+2 ms)Running coprocessor post-close hooks at 1732654296987 (+8 ms)Closed at 1732654296987 2024-11-26T20:51:36,988 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:36,988 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:36,988 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:36,988 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:36,988 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:36,991 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741850_1026 (size=2357) 2024-11-26T20:51:36,991 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741850_1026 (size=2357) 2024-11-26T20:51:36,992 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741850_1026 (size=2357) 2024-11-26T20:51:36,995 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/oldWALs 2024-11-26T20:51:36,995 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732654290241) 2024-11-26T20:51:37,003 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=211 (was 204) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:36248 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:46674 [Waiting for operation #9] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Timer for 'HBase' metrics system java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.util.TimerThread.mainLoop(Timer.java:563) java.base@17.0.11/java.util.TimerThread.run(Timer.java:516) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:42456 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=425 (was 419) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=149 (was 162), ProcessCount=11 (was 11), AvailableMemoryMB=6397 (was 6444) 2024-11-26T20:51:37,012 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=211, OpenFileDescriptor=425, MaxFileDescriptor=1048576, SystemLoadAverage=149, ProcessCount=11, AvailableMemoryMB=6397 2024-11-26T20:51:37,025 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741856_1032 (size=7) 2024-11-26T20:51:37,026 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741856_1032 (size=7) 2024-11-26T20:51:37,026 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741856_1032 (size=7) 2024-11-26T20:51:37,027 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:37,028 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:37,030 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:37,040 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(244): No decryptEncryptedDataEncryptionKey method in DFSClient, should be hadoop version with HDFS-12396 java.lang.NoSuchMethodException: org.apache.hadoop.hdfs.DFSClient.decryptEncryptedDataEncryptionKey(org.apache.hadoop.fs.FileEncryptionInfo) at java.lang.Class.getDeclaredMethod(Class.java:2675) ~[?:?] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelperWithoutHDFS12396(FanOutOneBlockAsyncDFSOutputSaslHelper.java:183) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelper(FanOutOneBlockAsyncDFSOutputSaslHelper.java:242) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.(FanOutOneBlockAsyncDFSOutputSaslHelper.java:253) ~[hbase-asyncfs-4.0.0-alpha-1-SNAPSHOT.jar:4.0.0-alpha-1-SNAPSHOT] at java.lang.Class.forName0(Native Method) ~[?:?] at java.lang.Class.forName(Class.java:375) ~[?:?] at org.apache.hadoop.hbase.wal.AsyncFSWALProvider.load(AsyncFSWALProvider.java:150) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.getProviderClass(WALFactory.java:174) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:262) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:214) ~[classes/:?] at org.apache.hadoop.hbase.HBaseTestingUtil.createWal(HBaseTestingUtil.java:2160) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2205) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtil.createRegionAndWAL(HBaseTestingUtil.java:2169) ~[test-classes/:4.0.0-alpha-1-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFlushSequenceIdIsGreaterThanAllEditsInHFile(AbstractTestFSWAL.java:425) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-26T20:51:37,044 INFO [Time-limited test {}] wal.WALFactory(196): Instantiating WALProvider of type class org.apache.hadoop.hbase.wal.AsyncFSWALProvider 2024-11-26T20:51:37,048 DEBUG [Time-limited test {}] channel.MultithreadEventLoopGroup(44): -Dio.netty.eventLoopThreads: 16 2024-11-26T20:51:37,060 DEBUG [Time-limited test {}] nio.NioEventLoop(110): -Dio.netty.noKeySetOptimization: false 2024-11-26T20:51:37,060 DEBUG [Time-limited test {}] nio.NioEventLoop(111): -Dio.netty.selectorAutoRebuildThreshold: 512 2024-11-26T20:51:37,075 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor defaultMonitorName 2024-11-26T20:51:37,079 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:37,079 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=hregion-31578796, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/WALs/hregion-31578796, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/oldWALs, maxLogs=1760 2024-11-26T20:51:37,096 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputHelper(524): When create output stream for /user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/WALs/hregion-31578796/hregion-31578796.1732654297080, exclude list is [], retry=0 2024-11-26T20:51:37,109 DEBUG [Time-limited test {}] channel.DefaultChannelId(84): -Dio.netty.processId: 7428 (auto-detected) 2024-11-26T20:51:37,112 DEBUG [Time-limited test {}] channel.DefaultChannelId(106): -Dio.netty.machineId: 02:42:ac:ff:fe:11:00:02 (auto-detected) 2024-11-26T20:51:37,134 DEBUG [AsyncFSWAL-1-3 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:46741,DS-cf310024-d2b0-45d4-a2bc-d0ccbd465339,DISK] 2024-11-26T20:51:37,134 DEBUG [AsyncFSWAL-1-2 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:42739,DS-2a500c98-adc1-4d80-a12e-b74467ed64f4,DISK] 2024-11-26T20:51:37,134 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:35013,DS-33209b92-4879-4099-9df4-a8673cfc3827,DISK] 2024-11-26T20:51:37,137 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.ProtobufDecoder(117): Hadoop 3.3 and above shades protobuf. 2024-11-26T20:51:37,167 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/WALs/hregion-31578796/hregion-31578796.1732654297080 2024-11-26T20:51:37,167 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new AsyncFSWAL writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:37399:37399)] 2024-11-26T20:51:37,167 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => 415bc1d550e7aba2222e2a5efb3f392b, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testFlushSequenceIdIsGreaterThanAllEditsInHFile', {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 2024-11-26T20:51:37,178 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741858_1034 (size=82) 2024-11-26T20:51:37,178 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741858_1034 (size=82) 2024-11-26T20:51:37,178 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741858_1034 (size=82) 2024-11-26T20:51:37,179 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-26T20:51:37,181 INFO [StoreOpener-415bc1d550e7aba2222e2a5efb3f392b-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,183 INFO [StoreOpener-415bc1d550e7aba2222e2a5efb3f392b-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 415bc1d550e7aba2222e2a5efb3f392b columnFamilyName f 2024-11-26T20:51:37,183 DEBUG [StoreOpener-415bc1d550e7aba2222e2a5efb3f392b-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-26T20:51:37,184 INFO [StoreOpener-415bc1d550e7aba2222e2a5efb3f392b-1 {}] regionserver.HStore(327): Store=415bc1d550e7aba2222e2a5efb3f392b/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-26T20:51:37,184 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,185 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,185 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,186 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,186 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,187 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,189 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,192 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-26T20:51:37,193 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 415bc1d550e7aba2222e2a5efb3f392b; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=65446901, jitterRate=-0.024765178561210632}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-26T20:51:37,196 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 415bc1d550e7aba2222e2a5efb3f392b: Writing region info on filesystem at 1732654297179Initializing all the Stores at 1732654297180 (+1 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732654297180Cleaning up temporary data from old regions at 1732654297187 (+7 ms)Region opened successfully at 1732654297196 (+9 ms) 2024-11-26T20:51:37,196 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 415bc1d550e7aba2222e2a5efb3f392b, disabling compactions & flushes 2024-11-26T20:51:37,196 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b. 2024-11-26T20:51:37,196 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b. 2024-11-26T20:51:37,196 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b. after waiting 0 ms 2024-11-26T20:51:37,196 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b. 2024-11-26T20:51:37,197 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b. 2024-11-26T20:51:37,197 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 415bc1d550e7aba2222e2a5efb3f392b: Waiting for close lock at 1732654297196Disabling compacts and flushes for region at 1732654297196Disabling writes for close at 1732654297196Writing region close event to WAL at 1732654297197 (+1 ms)Closed at 1732654297197 2024-11-26T20:51:37,202 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741857_1033 (size=93) 2024-11-26T20:51:37,203 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741857_1033 (size=93) 2024-11-26T20:51:37,203 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741857_1033 (size=93) 2024-11-26T20:51:37,207 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/oldWALs 2024-11-26T20:51:37,208 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: AsyncFSWAL hregion-31578796:(num 1732654297080) 2024-11-26T20:51:37,210 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:37,211 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/testFlushSequenceIdIsGreaterThanAllEditsInHFile, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/testFlushSequenceIdIsGreaterThanAllEditsInHFile, maxLogs=1760 2024-11-26T20:51:37,212 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654297211 2024-11-26T20:51:37,219 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/testFlushSequenceIdIsGreaterThanAllEditsInHFile/wal.1732654297211 2024-11-26T20:51:37,220 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:37399:37399)] 2024-11-26T20:51:37,221 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:37,223 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => 415bc1d550e7aba2222e2a5efb3f392b, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b.', STARTKEY => '', ENDKEY => ''} 2024-11-26T20:51:37,223 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-26T20:51:37,223 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,223 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,225 INFO [StoreOpener-415bc1d550e7aba2222e2a5efb3f392b-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,227 INFO [StoreOpener-415bc1d550e7aba2222e2a5efb3f392b-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 415bc1d550e7aba2222e2a5efb3f392b columnFamilyName f 2024-11-26T20:51:37,227 DEBUG [StoreOpener-415bc1d550e7aba2222e2a5efb3f392b-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-26T20:51:37,228 INFO [StoreOpener-415bc1d550e7aba2222e2a5efb3f392b-1 {}] regionserver.HStore(327): Store=415bc1d550e7aba2222e2a5efb3f392b/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-26T20:51:37,228 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,229 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,229 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,231 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,232 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,232 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,234 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for 415bc1d550e7aba2222e2a5efb3f392b 2024-11-26T20:51:37,236 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened 415bc1d550e7aba2222e2a5efb3f392b; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=74926118, jitterRate=0.11648616194725037}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-26T20:51:37,238 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for 415bc1d550e7aba2222e2a5efb3f392b: Writing region info on filesystem at 1732654297223Initializing all the Stores at 1732654297225 (+2 ms)Instantiating store for column family {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732654297225Cleaning up temporary data from old regions at 1732654297232 (+7 ms)Region opened successfully at 1732654297238 (+6 ms) 2024-11-26T20:51:37,254 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.log.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir so I do NOT create it in target/test-data/81870a8f-aad6-ed5b-2197-5aa1bdbef18f 2024-11-26T20:51:37,255 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.log.dir property value differs in configuration and system: Configuration=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/../logs while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir Erasing configuration value by system value. 2024-11-26T20:51:37,255 INFO [Time-limited test {}] hbase.HBaseTestingUtil(401): System.getProperty("hadoop.tmp.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.tmp.dir so I do NOT create it in target/test-data/81870a8f-aad6-ed5b-2197-5aa1bdbef18f 2024-11-26T20:51:37,255 WARN [Time-limited test {}] hbase.HBaseTestingUtil(405): hadoop.tmp.dir property value differs in configuration and system: Configuration=/tmp/hadoop-jenkins while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.tmp.dir Erasing configuration value by system value. 2024-11-26T20:51:37,255 DEBUG [Time-limited test {}] hbase.HBaseTestingUtil(323): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/81870a8f-aad6-ed5b-2197-5aa1bdbef18f 2024-11-26T20:51:37,281 INFO [Time-limited test {}] regionserver.HRegion(2902): Flushing 415bc1d550e7aba2222e2a5efb3f392b 1/1 column families, dataSize=1.14 KB heapSize=2.13 KB 2024-11-26T20:51:37,381 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:37,481 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:37,582 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:37,682 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:37,783 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:37,883 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:37,983 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:38,084 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:38,184 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:38,285 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:38,385 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:38,406 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b/.tmp/f/14eb4d986901494fa05d722fcfcc10d9 is 121, key is testFlushSequenceIdIsGreaterThanAllEditsInHFile/f:x0/1732654297255/Put/seqid=0 2024-11-26T20:51:38,414 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741860_1036 (size=6333) 2024-11-26T20:51:38,414 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741860_1036 (size=6333) 2024-11-26T20:51:38,415 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741860_1036 (size=6333) 2024-11-26T20:51:38,416 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=1.14 KB at sequenceid=23 (bloomFilter=true), to=hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b/.tmp/f/14eb4d986901494fa05d722fcfcc10d9 2024-11-26T20:51:38,427 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b/.tmp/f/14eb4d986901494fa05d722fcfcc10d9 as hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b/f/14eb4d986901494fa05d722fcfcc10d9 2024-11-26T20:51:38,436 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1990): Added hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/415bc1d550e7aba2222e2a5efb3f392b/f/14eb4d986901494fa05d722fcfcc10d9, entries=10, sequenceid=23, filesize=6.2 K 2024-11-26T20:51:38,537 DEBUG [FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df-prefix:default {}] wal.AbstractTestFSWAL$1(442): Sleeping before appending 100ms 2024-11-26T20:51:38,539 INFO [Time-limited test {}] regionserver.HRegion(3140): Finished flush of dataSize ~1.14 KB/1170, heapSize ~2.11 KB/2160, currentSize=0 B/0 for 415bc1d550e7aba2222e2a5efb3f392b in 1257ms, sequenceid=23, compaction requested=false 2024-11-26T20:51:38,539 DEBUG [Time-limited test {}] regionserver.HRegion(2603): Flush status journal for 415bc1d550e7aba2222e2a5efb3f392b: 2024-11-26T20:51:38,539 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing 415bc1d550e7aba2222e2a5efb3f392b, disabling compactions & flushes 2024-11-26T20:51:38,539 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b. 2024-11-26T20:51:38,539 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b. 2024-11-26T20:51:38,539 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b. after waiting 0 ms 2024-11-26T20:51:38,539 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b. 2024-11-26T20:51:38,540 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1732654297033.415bc1d550e7aba2222e2a5efb3f392b. 2024-11-26T20:51:38,540 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for 415bc1d550e7aba2222e2a5efb3f392b: Waiting for close lock at 1732654298539Disabling compacts and flushes for region at 1732654298539Disabling writes for close at 1732654298539Writing region close event to WAL at 1732654298540 (+1 ms)Closed at 1732654298540 2024-11-26T20:51:38,541 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,541 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,541 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,541 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,541 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,544 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741859_1035 (size=16537) 2024-11-26T20:51:38,544 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741859_1035 (size=16537) 2024-11-26T20:51:38,544 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741859_1035 (size=16537) 2024-11-26T20:51:38,547 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/23baedec-0074-668a-e81b-446fecd876df/testFlushSequenceIdIsGreaterThanAllEditsInHFile 2024-11-26T20:51:38,547 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: wal:(num 1732654297211) 2024-11-26T20:51:38,557 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=221 (was 211) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data5 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:36248 [Waiting for operation #8] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-2 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_1193318952_22 at /127.0.0.1:42456 [Waiting for operation #6] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-3 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-1 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data6 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=457 (was 425) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=137 (was 149), ProcessCount=11 (was 11), AvailableMemoryMB=6376 (was 6397) 2024-11-26T20:51:38,565 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=221, OpenFileDescriptor=457, MaxFileDescriptor=1048576, SystemLoadAverage=137, ProcessCount=11, AvailableMemoryMB=6376 2024-11-26T20:51:38,578 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741861_1037 (size=7) 2024-11-26T20:51:38,578 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741861_1037 (size=7) 2024-11-26T20:51:38,578 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741861_1037 (size=7) 2024-11-26T20:51:38,579 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:38,580 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:38,582 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:38,586 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:38,587 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/oldWALs, maxLogs=1760 2024-11-26T20:51:38,587 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654298587 2024-11-26T20:51:38,594 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298587 2024-11-26T20:51:38,596 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:43577:43577)] 2024-11-26T20:51:38,597 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654298597 2024-11-26T20:51:38,604 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654298598 2024-11-26T20:51:38,609 WARN [Time-limited test {}] wal.AbstractProtobufLogWriter(199): Init output failed, path=hdfs://localhost:34261/user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298598 java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-11-26T20:51:38,611 DEBUG [Time-limited test {}] wal.FSHLogProvider(93): Error instantiating log writer. java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:115) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:82) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:259) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:51) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:406) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 41 more 2024-11-26T20:51:38,623 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=248 (was 221) Potentially hanging thread: PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:35013] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:42739, 127.0.0.1:35013] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298597 block BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:36318 [Receiving block BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:35013] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:36312 [Receiving block BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:42739, 127.0.0.1:35013] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:42548 [Receiving block BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:46758 [Receiving block BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: FSHLog-0-hdfs://localhost:34261/user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0-prefix:default java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:46752 [Receiving block BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:42532 [Receiving block BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298587 block BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) - Thread LEAK? -, OpenFileDescriptor=483 (was 457) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=137 (was 137), ProcessCount=11 (was 11), AvailableMemoryMB=6371 (was 6376) 2024-11-26T20:51:38,632 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=248, OpenFileDescriptor=483, MaxFileDescriptor=1048576, SystemLoadAverage=137, ProcessCount=11, AvailableMemoryMB=6370 2024-11-26T20:51:38,643 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741864_1040 (size=7) 2024-11-26T20:51:38,643 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741864_1040 (size=7) 2024-11-26T20:51:38,643 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741864_1040 (size=7) 2024-11-26T20:51:38,645 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:38,645 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:38,647 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:38,653 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:38,653 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/12a55768-2015-2a1a-36aa-8f0f87275d0d/testWALCoprocessorLoaded, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/12a55768-2015-2a1a-36aa-8f0f87275d0d/oldWALs, maxLogs=1760 2024-11-26T20:51:38,654 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654298654 2024-11-26T20:51:38,692 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/12a55768-2015-2a1a-36aa-8f0f87275d0d/testWALCoprocessorLoaded/wal.1732654298654 2024-11-26T20:51:38,694 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:37399:37399)] 2024-11-26T20:51:38,695 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,695 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,695 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,695 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,695 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,698 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741865_1041 (size=93) 2024-11-26T20:51:38,698 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741865_1041 (size=93) 2024-11-26T20:51:38,698 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741865_1041 (size=93) 2024-11-26T20:51:38,701 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/12a55768-2015-2a1a-36aa-8f0f87275d0d/oldWALs 2024-11-26T20:51:38,701 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732654298654) 2024-11-26T20:51:38,711 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=253 (was 248) - Thread LEAK? -, OpenFileDescriptor=495 (was 483) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=137 (was 137), ProcessCount=11 (was 11), AvailableMemoryMB=6367 (was 6370) 2024-11-26T20:51:38,719 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=253, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=137, ProcessCount=11, AvailableMemoryMB=6367 2024-11-26T20:51:38,731 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741866_1042 (size=7) 2024-11-26T20:51:38,731 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741866_1042 (size=7) 2024-11-26T20:51:38,731 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741866_1042 (size=7) 2024-11-26T20:51:38,732 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:38,733 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:38,734 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:38,739 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:38,739 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/6e9aee87-22f8-c4a7-9467-32ca58019c68/testSyncNoAppend, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/6e9aee87-22f8-c4a7-9467-32ca58019c68/testSyncNoAppend, maxLogs=1760 2024-11-26T20:51:38,740 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654298740 2024-11-26T20:51:38,747 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/6e9aee87-22f8-c4a7-9467-32ca58019c68/testSyncNoAppend/wal.1732654298740 2024-11-26T20:51:38,748 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:43577:43577),(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:35583:35583)] 2024-11-26T20:51:38,749 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,749 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,749 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,749 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,749 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,752 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741867_1043 (size=93) 2024-11-26T20:51:38,752 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741867_1043 (size=93) 2024-11-26T20:51:38,752 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741867_1043 (size=93) 2024-11-26T20:51:38,755 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/6e9aee87-22f8-c4a7-9467-32ca58019c68/testSyncNoAppend 2024-11-26T20:51:38,755 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732654298740) 2024-11-26T20:51:38,764 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=258 (was 253) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=137 (was 137), ProcessCount=11 (was 11), AvailableMemoryMB=6365 (was 6367) 2024-11-26T20:51:38,773 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=258, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=137, ProcessCount=11, AvailableMemoryMB=6364 2024-11-26T20:51:38,783 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741868_1044 (size=7) 2024-11-26T20:51:38,783 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741868_1044 (size=7) 2024-11-26T20:51:38,784 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741868_1044 (size=7) 2024-11-26T20:51:38,785 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:38,785 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:38,787 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:38,791 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:38,791 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/5902751f-f9bb-f5f7-5d6e-57a7e1a8f09e/testWriteEntryCanBeNull, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/5902751f-f9bb-f5f7-5d6e-57a7e1a8f09e/testWriteEntryCanBeNull, maxLogs=1760 2024-11-26T20:51:38,792 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654298791 2024-11-26T20:51:38,799 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/5902751f-f9bb-f5f7-5d6e-57a7e1a8f09e/testWriteEntryCanBeNull/wal.1732654298791 2024-11-26T20:51:38,803 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:43577:43577)] 2024-11-26T20:51:38,804 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,805 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,805 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,805 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,805 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:38,807 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741869_1045 (size=93) 2024-11-26T20:51:38,807 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741869_1045 (size=93) 2024-11-26T20:51:38,808 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741869_1045 (size=93) 2024-11-26T20:51:38,810 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/5902751f-f9bb-f5f7-5d6e-57a7e1a8f09e/testWriteEntryCanBeNull 2024-11-26T20:51:38,811 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732654298791) 2024-11-26T20:51:38,824 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=263 (was 258) - Thread LEAK? -, OpenFileDescriptor=495 (was 495), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=137 (was 137), ProcessCount=11 (was 11), AvailableMemoryMB=6362 (was 6364) 2024-11-26T20:51:38,834 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=263, OpenFileDescriptor=495, MaxFileDescriptor=1048576, SystemLoadAverage=137, ProcessCount=11, AvailableMemoryMB=6360 2024-11-26T20:51:38,844 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741870_1046 (size=7) 2024-11-26T20:51:38,845 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741870_1046 (size=7) 2024-11-26T20:51:38,845 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741870_1046 (size=7) 2024-11-26T20:51:38,846 INFO [Time-limited test {}] util.FSUtils(489): Created version file at hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632 with version=8 2024-11-26T20:51:38,847 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:38,848 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-26T20:51:38,853 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-26T20:51:38,854 INFO [Time-limited test {}] wal.AbstractFSWAL(613): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/testUnflushedSeqIdTrackingWithAsyncWal, archiveDir=hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/oldWALs, maxLogs=1760 2024-11-26T20:51:38,854 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1732654298854 2024-11-26T20:51:38,861 INFO [Time-limited test {}] wal.AbstractFSWAL(991): New WAL /user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/testUnflushedSeqIdTrackingWithAsyncWal/wal.1732654298854 2024-11-26T20:51:38,862 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1109): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:35583:35583),(127.0.0.1/127.0.0.1:37399:37399),(127.0.0.1/127.0.0.1:43577:43577)] 2024-11-26T20:51:38,863 INFO [Time-limited test {}] regionserver.HRegion(7572): creating {ENCODED => b503af128fefc5972a500b104be2d9fa, NAME => 'table,,1732654298862.b503af128fefc5972a500b104be2d9fa.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73 2024-11-26T20:51:38,873 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741872_1048 (size=40) 2024-11-26T20:51:38,874 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741872_1048 (size=40) 2024-11-26T20:51:38,874 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741872_1048 (size=40) 2024-11-26T20:51:38,875 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1732654298862.b503af128fefc5972a500b104be2d9fa.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-26T20:51:38,877 INFO [StoreOpener-b503af128fefc5972a500b104be2d9fa-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,879 INFO [StoreOpener-b503af128fefc5972a500b104be2d9fa-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region b503af128fefc5972a500b104be2d9fa columnFamilyName b 2024-11-26T20:51:38,879 DEBUG [StoreOpener-b503af128fefc5972a500b104be2d9fa-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-26T20:51:38,879 INFO [StoreOpener-b503af128fefc5972a500b104be2d9fa-1 {}] regionserver.HStore(327): Store=b503af128fefc5972a500b104be2d9fa/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-26T20:51:38,880 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,880 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/table/b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,881 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/table/b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,881 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3675c0c7-5e27-0602-93eb-e0c978fb31e3/data/default/table/b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,882 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,882 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,884 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,887 DEBUG [Time-limited test {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:34261/user/jenkins/test-data/3675c0c7-5e27-0602-93eb-e0c978fb31e3/data/default/table/b503af128fefc5972a500b104be2d9fa/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-26T20:51:38,887 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened b503af128fefc5972a500b104be2d9fa; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=64580772, jitterRate=-0.03767150640487671}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-26T20:51:38,891 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for b503af128fefc5972a500b104be2d9fa: Writing region info on filesystem at 1732654298875Initializing all the Stores at 1732654298876 (+1 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732654298876Cleaning up temporary data from old regions at 1732654298882 (+6 ms)Region opened successfully at 1732654298891 (+9 ms) 2024-11-26T20:51:38,891 DEBUG [Time-limited test {}] regionserver.HRegion(1722): Closing b503af128fefc5972a500b104be2d9fa, disabling compactions & flushes 2024-11-26T20:51:38,891 INFO [Time-limited test {}] regionserver.HRegion(1755): Closing region table,,1732654298862.b503af128fefc5972a500b104be2d9fa. 2024-11-26T20:51:38,891 DEBUG [Time-limited test {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1732654298862.b503af128fefc5972a500b104be2d9fa. 2024-11-26T20:51:38,892 DEBUG [Time-limited test {}] regionserver.HRegion(1843): Acquired close lock on table,,1732654298862.b503af128fefc5972a500b104be2d9fa. after waiting 0 ms 2024-11-26T20:51:38,892 DEBUG [Time-limited test {}] regionserver.HRegion(1853): Updates disabled for region table,,1732654298862.b503af128fefc5972a500b104be2d9fa. 2024-11-26T20:51:38,892 INFO [Time-limited test {}] regionserver.HRegion(1973): Closed table,,1732654298862.b503af128fefc5972a500b104be2d9fa. 2024-11-26T20:51:38,893 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for b503af128fefc5972a500b104be2d9fa: Waiting for close lock at 1732654298891Disabling compacts and flushes for region at 1732654298891Disabling writes for close at 1732654298892 (+1 ms)Writing region close event to WAL at 1732654298892Closed at 1732654298892 2024-11-26T20:51:38,894 DEBUG [Time-limited test {}] regionserver.HRegion(7752): Opening region: {ENCODED => b503af128fefc5972a500b104be2d9fa, NAME => 'table,,1732654298862.b503af128fefc5972a500b104be2d9fa.', STARTKEY => '', ENDKEY => ''} 2024-11-26T20:51:38,895 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,895 DEBUG [Time-limited test {}] regionserver.HRegion(898): Instantiated table,,1732654298862.b503af128fefc5972a500b104be2d9fa.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-26T20:51:38,895 DEBUG [Time-limited test {}] regionserver.HRegion(7794): checking encryption for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,895 DEBUG [Time-limited test {}] regionserver.HRegion(7797): checking classloading for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,897 INFO [StoreOpener-b503af128fefc5972a500b104be2d9fa-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,899 INFO [StoreOpener-b503af128fefc5972a500b104be2d9fa-1 {}] compactions.CompactionConfiguration(183): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region b503af128fefc5972a500b104be2d9fa columnFamilyName b 2024-11-26T20:51:38,899 DEBUG [StoreOpener-b503af128fefc5972a500b104be2d9fa-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-26T20:51:38,900 INFO [StoreOpener-b503af128fefc5972a500b104be2d9fa-1 {}] regionserver.HStore(327): Store=b503af128fefc5972a500b104be2d9fa/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-26T20:51:38,900 DEBUG [Time-limited test {}] regionserver.HRegion(1038): replaying wal for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,901 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/table/b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,901 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/data/default/table/b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,903 DEBUG [Time-limited test {}] regionserver.HRegion(5546): Found 0 recovered edits file(s) under hdfs://localhost:34261/user/jenkins/test-data/3675c0c7-5e27-0602-93eb-e0c978fb31e3/data/default/table/b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,903 DEBUG [Time-limited test {}] regionserver.HRegion(1048): stopping wal replay for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,903 DEBUG [Time-limited test {}] regionserver.HRegion(1060): Cleaning up temporary data for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,906 DEBUG [Time-limited test {}] regionserver.HRegion(1093): writing seq id for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,907 INFO [Time-limited test {}] regionserver.HRegion(1114): Opened b503af128fefc5972a500b104be2d9fa; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=74723567, jitterRate=0.11346791684627533}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-26T20:51:38,907 DEBUG [Time-limited test {}] regionserver.HRegion(1122): Running coprocessor post-open hooks for b503af128fefc5972a500b104be2d9fa 2024-11-26T20:51:38,908 DEBUG [Time-limited test {}] regionserver.HRegion(1006): Region open journal for b503af128fefc5972a500b104be2d9fa: Running coprocessor pre-open hook at 1732654298895Writing region info on filesystem at 1732654298895Initializing all the Stores at 1732654298896 (+1 ms)Instantiating store for column family {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'} at 1732654298897 (+1 ms)Cleaning up temporary data from old regions at 1732654298903 (+6 ms)Running coprocessor post-open hooks at 1732654298907 (+4 ms)Region opened successfully at 1732654298908 (+1 ms) 2024-11-26T20:51:39,546 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor 2024-11-26T20:51:39,547 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor Metrics about HBase WALObservers 2024-11-26T20:51:39,548 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=TableRequests_Namespace_default_table_table 2024-11-26T20:51:39,548 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=TableRequests_Namespace_default_table_table Metrics about Tables on a single HBase RegionServer 2024-11-26T20:51:41,915 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1722): Closing b503af128fefc5972a500b104be2d9fa, disabling compactions & flushes 2024-11-26T20:51:41,915 INFO [pool-106-thread-1 {}] regionserver.HRegion(1755): Closing region table,,1732654298862.b503af128fefc5972a500b104be2d9fa. 2024-11-26T20:51:41,915 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1776): Time limited wait for close lock on table,,1732654298862.b503af128fefc5972a500b104be2d9fa. 2024-11-26T20:51:41,915 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1843): Acquired close lock on table,,1732654298862.b503af128fefc5972a500b104be2d9fa. after waiting 0 ms 2024-11-26T20:51:41,915 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1853): Updates disabled for region table,,1732654298862.b503af128fefc5972a500b104be2d9fa. 2024-11-26T20:51:41,916 INFO [pool-106-thread-1 {}] regionserver.HRegion(2902): Flushing b503af128fefc5972a500b104be2d9fa 1/1 column families, dataSize=48 B heapSize=448 B 2024-11-26T20:51:42,489 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-11-26T20:51:44,935 DEBUG [pool-106-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/b503af128fefc5972a500b104be2d9fa/.tmp/b/d51a48fc8ab646a8b64a612146c615d5 is 28, key is b/b:b/1732654298911/Put/seqid=0 2024-11-26T20:51:44,942 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741873_1049 (size=4945) 2024-11-26T20:51:44,942 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741873_1049 (size=4945) 2024-11-26T20:51:44,942 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741873_1049 (size=4945) 2024-11-26T20:51:44,943 INFO [pool-106-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/b503af128fefc5972a500b104be2d9fa/.tmp/b/d51a48fc8ab646a8b64a612146c615d5 2024-11-26T20:51:44,953 DEBUG [pool-106-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/b503af128fefc5972a500b104be2d9fa/.tmp/b/d51a48fc8ab646a8b64a612146c615d5 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/b503af128fefc5972a500b104be2d9fa/b/d51a48fc8ab646a8b64a612146c615d5 2024-11-26T20:51:44,961 INFO [pool-106-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1990): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/b503af128fefc5972a500b104be2d9fa/b/d51a48fc8ab646a8b64a612146c615d5, entries=1, sequenceid=6, filesize=4.8 K 2024-11-26T20:51:44,963 INFO [pool-106-thread-1 {}] regionserver.HRegion(3140): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for b503af128fefc5972a500b104be2d9fa in 3047ms, sequenceid=6, compaction requested=false 2024-11-26T20:51:44,968 DEBUG [pool-106-thread-1 {}] wal.WALSplitUtil(410): Wrote file=hdfs://localhost:34261/user/jenkins/test-data/3675c0c7-5e27-0602-93eb-e0c978fb31e3/data/default/table/b503af128fefc5972a500b104be2d9fa/recovered.edits/9.seqid, newMaxSeqId=9, maxSeqId=1 2024-11-26T20:51:44,969 INFO [pool-106-thread-1 {}] regionserver.HRegion(1973): Closed table,,1732654298862.b503af128fefc5972a500b104be2d9fa. 2024-11-26T20:51:44,969 DEBUG [pool-106-thread-1 {}] regionserver.HRegion(1676): Region close journal for b503af128fefc5972a500b104be2d9fa: Waiting for close lock at 1732654301915Running coprocessor pre-close hooks at 1732654301915Disabling compacts and flushes for region at 1732654301915Disabling writes for close at 1732654301915Obtaining lock to block concurrent updates at 1732654301916 (+1 ms)Preparing flush snapshotting stores in b503af128fefc5972a500b104be2d9fa at 1732654301916Finished memstore snapshotting table,,1732654298862.b503af128fefc5972a500b104be2d9fa., syncing WAL and waiting on mvcc, flushsize=dataSize=48, getHeapSize=432, getOffHeapSize=0, getCellsCount=2 at 1732654301917 (+1 ms)Flushing stores of table,,1732654298862.b503af128fefc5972a500b104be2d9fa. at 1732654304917 (+3000 ms)Flushing b503af128fefc5972a500b104be2d9fa/b: creating writer at 1732654304917Flushing b503af128fefc5972a500b104be2d9fa/b: appending metadata at 1732654304934 (+17 ms)Flushing b503af128fefc5972a500b104be2d9fa/b: closing flushed file at 1732654304934Flushing org.apache.hadoop.hbase.regionserver.HStore$StoreFlusherImpl@1956c81a: reopening flushed file at 1732654304952 (+18 ms)Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for b503af128fefc5972a500b104be2d9fa in 3047ms, sequenceid=6, compaction requested=false at 1732654304963 (+11 ms)Writing region close event to WAL at 1732654304964 (+1 ms)Running coprocessor post-close hooks at 1732654304969 (+5 ms)Closed at 1732654304969 2024-11-26T20:51:44,969 INFO [pool-106-thread-1 {}] wal.AbstractTestFSWAL(620): Close result:{[B@33fa91b7=[/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/data/default/table/b503af128fefc5972a500b104be2d9fa/b/d51a48fc8ab646a8b64a612146c615d5]} 2024-11-26T20:51:44,969 WARN [Time-limited test {}] regionserver.HRegion(1707): Region table,,1732654298862.b503af128fefc5972a500b104be2d9fa. already closed 2024-11-26T20:51:44,970 DEBUG [Time-limited test {}] regionserver.HRegion(1676): Region close journal for b503af128fefc5972a500b104be2d9fa: Waiting for close lock at 1732654304969 2024-11-26T20:51:44,970 INFO [sync.0 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:44,970 INFO [sync.1 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:44,970 INFO [sync.2 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:44,970 INFO [sync.3 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:44,971 INFO [sync.4 {}] wal.FSHLog$SyncRunner(477): interrupted 2024-11-26T20:51:44,973 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:35013 is added to blk_1073741871_1047 (size=1206) 2024-11-26T20:51:44,973 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:46741 is added to blk_1073741871_1047 (size=1206) 2024-11-26T20:51:44,973 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:42739 is added to blk_1073741871_1047 (size=1206) 2024-11-26T20:51:44,976 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1256): Moved 1 WAL file(s) to /user/jenkins/test-data/3541a0f1-8578-01ba-edfd-ca01c5bd7632/oldWALs 2024-11-26T20:51:44,976 INFO [Time-limited test {}] wal.AbstractFSWAL(1259): Closed WAL: FSHLog wal:(num 1732654298854) 2024-11-26T20:51:44,986 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=267 (was 263) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:54948 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:44428 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=505 (was 495) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=126 (was 137), ProcessCount=11 (was 11), AvailableMemoryMB=6342 (was 6360) 2024-11-26T20:51:44,986 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1019): Shutting down minicluster 2024-11-26T20:51:44,988 WARN [PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039, type=LAST_IN_PIPELINE {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-2040341567-172.17.0.2-1732654280033:1073741863 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-26T20:51:44,988 WARN [ResponseProcessor for block BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039 java.io.IOException: Bad response ERROR for BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039 from datanode DatanodeInfoWithStorage[127.0.0.1:35013,DS-33209b92-4879-4099-9df4-a8673cfc3827,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-26T20:51:44,989 WARN [DataStreamer for file /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298597 block BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039 {}] hdfs.DataStreamer(1731): Error Recovery for BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039 in pipeline [DatanodeInfoWithStorage[127.0.0.1:46741,DS-cf310024-d2b0-45d4-a2bc-d0ccbd465339,DISK], DatanodeInfoWithStorage[127.0.0.1:42739,DS-2a500c98-adc1-4d80-a12e-b74467ed64f4,DISK], DatanodeInfoWithStorage[127.0.0.1:35013,DS-33209b92-4879-4099-9df4-a8673cfc3827,DISK]]: datanode 2(DatanodeInfoWithStorage[127.0.0.1:35013,DS-33209b92-4879-4099-9df4-a8673cfc3827,DISK]) is bad. 2024-11-26T20:51:44,989 WARN [PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:42739, 127.0.0.1:35013] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-26T20:51:44,989 WARN [PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:35013] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-26T20:51:44,990 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:36318 [Receiving block BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039] {}] datanode.BlockReceiver(316): Block 1073741863 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-11-26T20:51:44,994 WARN [DataStreamer for file /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298597 block BP-2040341567-172.17.0.2-1732654280033:blk_1073741863_1039 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298597 (inode 16549) Holder DFSClient_NONMAPREDUCE_-1558872134_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-26T20:51:44,995 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298597 with renewLeaseKey: DEFAULT_16549 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298597 (inode 16549) Holder DFSClient_NONMAPREDUCE_-1558872134_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-26T20:51:44,996 WARN [PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038, type=LAST_IN_PIPELINE {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException: Replica does not exist BP-2040341567-172.17.0.2-1732654280033:1073741862 at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getReplicaInfo(FsDatasetImpl.java:897) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.getStorageUuidForLock(FsDatasetImpl.java:905) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.finalizeBlock(FsDatasetImpl.java:1975) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.finalizeBlock(BlockReceiver.java:1563) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1514) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-26T20:51:44,997 WARN [ResponseProcessor for block BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038 java.io.IOException: Bad response ERROR for BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038 from datanode DatanodeInfoWithStorage[127.0.0.1:35013,DS-33209b92-4879-4099-9df4-a8673cfc3827,DISK] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1223) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-26T20:51:44,997 WARN [DataStreamer for file /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298587 block BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038 {}] hdfs.DataStreamer(1731): Error Recovery for BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038 in pipeline [DatanodeInfoWithStorage[127.0.0.1:46741,DS-cf310024-d2b0-45d4-a2bc-d0ccbd465339,DISK], DatanodeInfoWithStorage[127.0.0.1:42739,DS-2a500c98-adc1-4d80-a12e-b74467ed64f4,DISK], DatanodeInfoWithStorage[127.0.0.1:35013,DS-33209b92-4879-4099-9df4-a8673cfc3827,DISK]]: datanode 2(DatanodeInfoWithStorage[127.0.0.1:35013,DS-33209b92-4879-4099-9df4-a8673cfc3827,DISK]) is bad. 2024-11-26T20:51:44,997 WARN [PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:42739, 127.0.0.1:35013] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-26T20:51:44,997 WARN [PacketResponder: BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:35013] {}] datanode.BlockReceiver$PacketResponder(1529): IOException in PacketResponder.run(): java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcherImpl.write0(Native Method) ~[?:?] at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:62) ~[?:?] at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:132) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:97) ~[?:?] at sun.nio.ch.IOUtil.write(IOUtil.java:53) ~[?:?] at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:532) ~[?:?] at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:141) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:158) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:116) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:81) ~[?:?] at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:142) ~[?:?] at java.io.DataOutputStream.flush(DataOutputStream.java:128) ~[?:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstreamUnprotected(BlockReceiver.java:1681) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.sendAckUpstream(BlockReceiver.java:1612) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1520) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-26T20:51:44,998 WARN [DataXceiver for client DFSClient_NONMAPREDUCE_-1558872134_22 at /127.0.0.1:36312 [Receiving block BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038] {}] datanode.BlockReceiver(316): Block 1073741862 has not released the reserved bytes. Releasing 2097067 bytes as part of close. 2024-11-26T20:51:44,998 WARN [DataStreamer for file /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298587 block BP-2040341567-172.17.0.2-1732654280033:blk_1073741862_1038 {}] hdfs.DataStreamer(859): DataStreamer Exception org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298587 (inode 16548) Holder DFSClient_NONMAPREDUCE_-1558872134_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-26T20:51:44,999 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298587 with renewLeaseKey: DEFAULT_16548 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/faaadf8e-5b3c-c583-795f-27c8c74d30c0/testFailedToCreateWALIfParentRenamed/wal.1732654298587 (inode 16548) Holder DFSClient_NONMAPREDUCE_-1558872134_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:3123) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:963) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolServerSideTranslatorPB.java:627) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getAdditionalDatanode$12(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getAdditionalDatanode(ClientNamenodeProtocolTranslatorPB.java:520) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor79.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.getAdditionalDatanode(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1505) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-26T20:51:45,003 WARN [BP-2040341567-172.17.0.2-1732654280033 heartbeating to localhost/127.0.0.1:34261 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-2040341567-172.17.0.2-1732654280033 (Datanode Uuid 30466a83-cf79-46c2-a64a-fea7fe737379) service to localhost/127.0.0.1:34261 2024-11-26T20:51:45,004 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@4b2b884e{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-26T20:51:45,004 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data5/current/BP-2040341567-172.17.0.2-1732654280033 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-26T20:51:45,004 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data6/current/BP-2040341567-172.17.0.2-1732654280033 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-26T20:51:45,006 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@fa4aa4c{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-26T20:51:45,006 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-26T20:51:45,006 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@47ddd06a{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-26T20:51:45,006 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@4c49fcd3{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir/,STOPPED} 2024-11-26T20:51:45,010 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-11-26T20:51:45,012 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@28637041{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-26T20:51:45,012 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@5e18b7fd{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-26T20:51:45,012 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-26T20:51:45,031 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@243038a3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-26T20:51:45,031 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@443ad5c2{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir/,STOPPED} 2024-11-26T20:51:45,033 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-11-26T20:51:45,033 WARN [BP-2040341567-172.17.0.2-1732654280033 heartbeating to localhost/127.0.0.1:34261 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-11-26T20:51:45,033 WARN [BP-2040341567-172.17.0.2-1732654280033 heartbeating to localhost/127.0.0.1:34261 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-2040341567-172.17.0.2-1732654280033 (Datanode Uuid cc3eb308-f8d7-48da-b191-5e8a836cb6c7) service to localhost/127.0.0.1:34261 2024-11-26T20:51:45,033 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-11-26T20:51:45,033 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data3/current/BP-2040341567-172.17.0.2-1732654280033 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-26T20:51:45,034 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data4/current/BP-2040341567-172.17.0.2-1732654280033 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-26T20:51:45,034 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-11-26T20:51:45,036 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@411b19f7{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-26T20:51:45,036 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@64bb503e{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-26T20:51:45,036 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-26T20:51:45,037 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@43a917ce{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-26T20:51:45,037 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@208945{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir/,STOPPED} 2024-11-26T20:51:45,038 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-11-26T20:51:45,038 WARN [BP-2040341567-172.17.0.2-1732654280033 heartbeating to localhost/127.0.0.1:34261 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-11-26T20:51:45,038 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-11-26T20:51:45,038 WARN [BP-2040341567-172.17.0.2-1732654280033 heartbeating to localhost/127.0.0.1:34261 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-2040341567-172.17.0.2-1732654280033 (Datanode Uuid ded40a5e-296c-4218-99d4-3df239036265) service to localhost/127.0.0.1:34261 2024-11-26T20:51:45,038 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data1/current/BP-2040341567-172.17.0.2-1732654280033 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-26T20:51:45,039 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/cluster_8b6485fb-d1b3-c394-4937-f1824aaca9b2/data/data2/current/BP-2040341567-172.17.0.2-1732654280033 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-26T20:51:45,039 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-11-26T20:51:45,048 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@5599def{hdfs,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-11-26T20:51:45,049 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@493ba8a1{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-26T20:51:45,049 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-26T20:51:45,049 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@61b73bb3{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-26T20:51:45,049 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@746f7db{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_master/hbase-server/target/test-data/f5c1877f-457c-5441-9f8a-ba5e021e0c73/hadoop.log.dir/,STOPPED} 2024-11-26T20:51:45,089 INFO [Time-limited test {}] hbase.HBaseTestingUtil(1026): Minicluster is down