Diagnosed failure

ClientTest.TestCompactionOfSoftDeletedAndRecalledTable: WARNING: ThreadSanitizer: data race (pid=5023)  Write of size 8 at 0x7b4400151748 by main thread:
    #0 std::__1::__vector_base<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::__destruct_at_end(kudu::MemTracker**) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:429:12 (libkudu_util.so+0x3a6c30)
    #1 std::__1::__vector_base<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libkudu_util.so+0x3a6b44)
    #2 std::__1::__vector_base<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libkudu_util.so+0x3a693b)
    #3 std::__1::vector<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libkudu_util.so+0x39ebf1)
    #4 kudu::MemTracker::~MemTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:83:1 (libkudu_util.so+0x39e7a3)
    #5 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3503:23 (libkudu_util.so+0x3a5a65)
    #6 std::__1::__shared_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3341:9 (libtablet_test_util.so+0x35465)
    #7 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3383:27 (libtablet_test_util.so+0x35409)
    #8 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #9 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #10 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #11 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #12 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #13 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #14 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #15 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #16 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #17 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #18 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #19 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #20 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #21 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #22 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #23 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #24 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #25 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #26 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #27 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #28 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #29 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #30 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #31 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #32 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #33 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #34 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #35 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #36 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #37 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #38 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #39 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #40 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #41 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #42 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #43 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #44 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #45 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #46 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous read of size 8 at 0x7b4400151748 by thread T114 (mutexes: write M920558637112824480):
    #0 std::__1::vector<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::end() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:1536:30 (libkudu_util.so+0x39fb0a)
    #1 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:284:22 (libkudu_util.so+0x39d1e6)
    #2 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #3 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #4 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #5 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #6 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #7 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #8 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #9 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #10 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #11 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #12 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #13 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #14 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #15 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #16 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #17 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #18 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #19 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #20 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Location is heap block of size 264 at 0x7b44001516c0 allocated by thread T88:
    #0 operator new(unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:64 (client-test+0x4f1827)
    #1 std::__1::__libcpp_allocate(unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:253:10 (libkudu_util.so+0x29a206)
    #2 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::allocate(unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1789:34 (libkudu_util.so+0x3a5345)
    #3 std::__1::enable_if<!(is_array<long&>::value), std::__1::shared_ptr<long&> >::type std::__1::make_shared<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler&&...) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4290:45 (libkudu_util.so+0x3a5121)
    #4 std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/make_shared.h:61:12 (libkudu_util.so+0x39ea55)
    #5 kudu::MemTracker::CreateTracker(long, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:54:34 (libkudu_util.so+0x39ca7e)
    #6 kudu::tablet::OpTracker::StartMemoryTracking(std::__1::shared_ptr<kudu::MemTracker> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:311:20 (libtablet.so+0x41dc9b)
    #7 kudu::tablet::TabletReplica::Start(kudu::consensus::ConsensusBootstrapInfo const&, std::__1::shared_ptr<kudu::tablet::Tablet>, kudu::clock::Clock*, std::__1::shared_ptr<kudu::rpc::Messenger>, scoped_refptr<kudu::rpc::ResultTracker>, scoped_refptr<kudu::log::Log>, kudu::ThreadPool*, kudu::DnsResolver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:277:19 (libtablet.so+0x3d0656)
    #8 kudu::tserver::TSTabletManager::OpenTablet(scoped_refptr<kudu::tablet::TabletReplica> const&, scoped_refptr<kudu::tserver::TransitionInProgressDeleter> const&, std::__1::atomic<int>*, std::__1::atomic<int>*, kudu::Timer*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1430:18 (libtserver.so+0x2b9b20)
    #9 kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:703:3 (libtserver.so+0x2cda91)
    #10 decltype(std::__1::forward<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11&>(fp)()) std::__1::__invoke<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11&>(kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtserver.so+0x2cda39)
    #11 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11&>(kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtserver.so+0x2cd9c9)
    #12 std::__1::__function::__alloc_func<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11, std::__1::allocator<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtserver.so+0x2cd991)
    #13 std::__1::__function::__func<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11, std::__1::allocator<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtserver.so+0x2ccc6d)
    #14 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #15 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #16 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #17 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #18 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #19 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #20 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #21 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #22 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #23 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #24 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  Thread T88 'tablet-open [wo' (tid=8801, finished) created by thread T188 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:703:3 (libtserver.so+0x2b6d12)
    #7 kudu::tserver::TabletServiceAdminImpl::CreateTablet(kudu::tserver::CreateTabletRequestPB const*, kudu::tserver::CreateTabletResponsePB*, kudu::rpc::RpcContext*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_service.cc:1475:34 (libtserver.so+0x267199)
    #8 kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1::operator()(google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*) const /home/jenkins-slave/workspace/build_and_test_flaky/build/tsan/src/kudu/tserver/tserver_admin.service.cc:189:13 (libtserver_admin_proto.so+0x68be4)
    #9 decltype(std::__1::forward<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1&>(fp)(std::__1::forward<google::protobuf::Message const*>(fp0), std::__1::forward<google::protobuf::Message*>(fp0), std::__1::forward<kudu::rpc::RpcContext*>(fp0))) std::__1::__invoke<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1&, google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*>(kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1&, google::protobuf::Message const*&&, google::protobuf::Message*&&, kudu::rpc::RpcContext*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtserver_admin_proto.so+0x68b72)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1&, google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*>(kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1&, google::protobuf::Message const*&&, google::protobuf::Message*&&, kudu::rpc::RpcContext*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtserver_admin_proto.so+0x68aa1)
    #11 std::__1::__function::__alloc_func<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1, std::__1::allocator<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1>, void (google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*)>::operator()(google::protobuf::Message const*&&, google::protobuf::Message*&&, kudu::rpc::RpcContext*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtserver_admin_proto.so+0x68a1c)
    #12 std::__1::__function::__func<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1, std::__1::allocator<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1>, void (google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*)>::operator()(google::protobuf::Message const*&&, google::protobuf::Message*&&, kudu::rpc::RpcContext*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtserver_admin_proto.so+0x67cd2)
    #13 std::__1::__function::__value_func<void (google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*)>::operator()(google::protobuf::Message const*&&, google::protobuf::Message*&&, kudu::rpc::RpcContext*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libkrpc.so+0x1f3c4c)
    #14 std::__1::function<void (google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*)>::operator()(google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libkrpc.so+0x1f3086)
    #15 kudu::rpc::GeneratedServiceIf::Handle(kudu::rpc::InboundCall*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/rpc/service_if.cc:137:3 (libkrpc.so+0x1f2a2f)
    #16 kudu::rpc::ServicePool::RunThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/rpc/service_pool.cc:229:15 (libkrpc.so+0x1f5d43)
    #17 kudu::rpc::ServicePool::Init(int)::$_0::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/rpc/service_pool.cc:92:5 (libkrpc.so+0x1f7061)
    #18 decltype(std::__1::forward<kudu::rpc::ServicePool::Init(int)::$_0&>(fp)()) std::__1::__invoke<kudu::rpc::ServicePool::Init(int)::$_0&>(kudu::rpc::ServicePool::Init(int)::$_0&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkrpc.so+0x1f7019)
    #19 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::rpc::ServicePool::Init(int)::$_0&>(kudu::rpc::ServicePool::Init(int)::$_0&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkrpc.so+0x1f6fa9)
    #20 std::__1::__function::__alloc_func<kudu::rpc::ServicePool::Init(int)::$_0, std::__1::allocator<kudu::rpc::ServicePool::Init(int)::$_0>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkrpc.so+0x1f6f71)
    #21 std::__1::__function::__func<kudu::rpc::ServicePool::Init(int)::$_0, std::__1::allocator<kudu::rpc::ServicePool::Init(int)::$_0>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkrpc.so+0x1f626d)
    #22 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #23 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #24 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)


ClientTest.TestCompactionOfSoftDeletedAndRecalledTable: WARNING: ThreadSanitizer: data race (pid=5023)  Write of size 8 at 0x7b08000a5380 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<kudu::MemTracker*>::deallocate(kudu::MemTracker**, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a6d0a)
    #6 std::__1::allocator_traits<std::__1::allocator<kudu::MemTracker*> >::deallocate(std::__1::allocator<kudu::MemTracker*>&, kudu::MemTracker**, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1533:14 (libkudu_util.so+0x3a6b89)
    #7 std::__1::__vector_base<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:466:9 (libkudu_util.so+0x3a6967)
    #8 std::__1::vector<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libkudu_util.so+0x39ebf1)
    #9 kudu::MemTracker::~MemTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:83:1 (libkudu_util.so+0x39e7a3)
    #10 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3503:23 (libkudu_util.so+0x3a5a65)
    #11 std::__1::__shared_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3341:9 (libtablet_test_util.so+0x35465)
    #12 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3383:27 (libtablet_test_util.so+0x35409)
    #13 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #14 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #15 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #16 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #17 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #18 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #19 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #20 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #21 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #22 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #23 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #24 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #25 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #26 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #27 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #28 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #29 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #30 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #31 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #32 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #33 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #34 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #35 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #36 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #37 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #38 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #39 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #40 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #41 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #42 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #43 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #44 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #45 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #46 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #47 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #48 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #49 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #50 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #51 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous read of size 8 at 0x7b08000a5380 by thread T114 (mutexes: write M920558637112824480):
    #0 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:285:5 (libkudu_util.so+0x39d222)
    #1 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #2 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #3 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #4 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #5 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #6 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #7 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #8 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #9 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #10 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #11 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #12 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #13 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #14 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #15 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #16 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #17 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #18 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #19 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)


ClientTest.TestCompactionOfSoftDeletedAndRecalledTable: WARNING: ThreadSanitizer: data race (pid=5023)  Write of size 8 at 0x7b4400150e70 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::deallocate(std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a612d)
    #6 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3515:9 (libkudu_util.so+0x3a5ad0)
    #7 std::__1::__shared_weak_count::__release_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/libcxx/src/memory.cpp (libc++.so.1+0xbde46)
    #8 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3384:9 (libtablet_test_util.so+0x35415)
    #9 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #10 kudu::MemTracker::~MemTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:83:1 (libkudu_util.so+0x39e7ab)
    #11 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3503:23 (libkudu_util.so+0x3a5a65)
    #12 std::__1::__shared_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3341:9 (libtablet_test_util.so+0x35465)
    #13 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3383:27 (libtablet_test_util.so+0x35409)
    #14 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #15 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #16 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #17 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #18 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #19 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #20 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #21 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #22 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #23 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #24 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #25 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #26 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #27 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #28 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #29 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #30 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #31 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #32 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #33 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #34 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #35 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #36 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #37 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #38 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #39 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #40 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #41 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #42 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #43 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #44 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #45 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #46 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #47 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #48 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #49 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #50 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #51 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #52 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous atomic write of size 8 at 0x7b4400150e70 by thread T114 (mutexes: write M920558637112824480):
    #0 __tsan_atomic64_fetch_add /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp:620 (client-test+0x4c53d5)
    #1 long std::__1::__cxx_atomic_fetch_add<long>(std::__1::__cxx_atomic_base_impl<long>*, long, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1042:12 (libkudu_util.so+0x2a5163)
    #2 std::__1::__atomic_base<long, true>::fetch_add(long, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1706:17 (libkudu_util.so+0x2a50a9)
    #3 kudu::HighWaterMark::IncrementBy(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:70:30 (libkudu_util.so+0x39fbc2)
    #4 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:285:27 (libkudu_util.so+0x39d231)
    #5 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #6 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #7 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #8 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #13 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #14 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #15 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #16 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #17 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #18 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #19 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #20 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #21 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #22 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #23 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)


ClientTest.TestCompactionOfSoftDeletedAndRecalledTable: WARNING: ThreadSanitizer: data race (pid=5023)  Write of size 8 at 0x7b4400150e78 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::deallocate(std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a612d)
    #6 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3515:9 (libkudu_util.so+0x3a5ad0)
    #7 std::__1::__shared_weak_count::__release_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/libcxx/src/memory.cpp (libc++.so.1+0xbde46)
    #8 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3384:9 (libtablet_test_util.so+0x35415)
    #9 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #10 kudu::MemTracker::~MemTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:83:1 (libkudu_util.so+0x39e7ab)
    #11 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3503:23 (libkudu_util.so+0x3a5a65)
    #12 std::__1::__shared_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3341:9 (libtablet_test_util.so+0x35465)
    #13 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3383:27 (libtablet_test_util.so+0x35409)
    #14 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #15 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #16 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #17 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #18 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #19 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #20 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #21 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #22 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #23 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #24 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #25 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #26 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #27 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #28 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #29 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #30 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #31 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #32 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #33 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #34 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #35 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #36 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #37 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #38 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #39 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #40 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #41 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #42 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #43 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #44 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #45 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #46 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #47 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #48 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #49 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #50 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #51 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #52 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous atomic write of size 8 at 0x7b4400150e78 by thread T114 (mutexes: write M920558637112824480):
    #0 __tsan_atomic64_compare_exchange_val /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp:850 (client-test+0x4cc4a8)
    #1 bool std::__1::__cxx_atomic_compare_exchange_weak<long>(std::__1::__cxx_atomic_base_impl<long>*, long*, long, std::__1::memory_order, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1031:12 (liblog.so+0x8bce4)
    #2 std::__1::__atomic_base<long, false>::compare_exchange_weak(long&, long, std::__1::memory_order, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1622:17 (liblog.so+0x8bb09)
    #3 void kudu::AtomicStoreMax<long>(std::__1::atomic<long>&, long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/atomic-utils.h:33:13 (liblog.so+0x7ea60)
    #4 kudu::HighWaterMark::UpdateMax(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:80:5 (libkudu_util.so+0x3a4d64)
    #5 kudu::HighWaterMark::IncrementBy(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:70:5 (libkudu_util.so+0x39fbcd)
    #6 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:285:27 (libkudu_util.so+0x39d231)
    #7 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #8 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #9 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #10 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #11 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #12 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #13 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #14 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #15 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #16 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #17 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #18 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #19 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #21 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #22 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #23 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #24 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #25 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)


ClientTest.TestCompactionOfSoftDeletedAndRecalledTable: WARNING: ThreadSanitizer: data race (pid=5023)  Write of size 8 at 0x7b4400151730 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::deallocate(std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a612d)
    #6 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3515:9 (libkudu_util.so+0x3a5ad0)
    #7 std::__1::__shared_weak_count::__release_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/libcxx/src/memory.cpp (libc++.so.1+0xbde46)
    #8 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3384:9 (libtablet_test_util.so+0x35415)
    #9 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #10 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #11 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #12 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #13 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #14 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #15 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #16 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #17 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #18 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #19 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #20 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #21 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #22 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #23 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #24 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #25 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #26 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #27 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #28 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #29 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #30 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #31 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #32 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #33 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #34 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #35 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #36 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #37 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #38 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #39 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #40 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #41 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #42 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #43 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #44 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #45 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #46 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #47 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous atomic write of size 8 at 0x7b4400151730 by thread T114 (mutexes: write M920558637112824480):
    #0 __tsan_atomic64_fetch_add /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp:620 (client-test+0x4c53d5)
    #1 long std::__1::__cxx_atomic_fetch_add<long>(std::__1::__cxx_atomic_base_impl<long>*, long, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1042:12 (libkudu_util.so+0x2a5163)
    #2 std::__1::__atomic_base<long, true>::fetch_add(long, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1706:17 (libkudu_util.so+0x2a50a9)
    #3 kudu::HighWaterMark::IncrementBy(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:70:30 (libkudu_util.so+0x39fbc2)
    #4 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:285:27 (libkudu_util.so+0x39d231)
    #5 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #6 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #7 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #8 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #13 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #14 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #15 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #16 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #17 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #18 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #19 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #20 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #21 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #22 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #23 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)


ClientTest.TestCompactionOfSoftDeletedAndRecalledTable: WARNING: ThreadSanitizer: data race (pid=5023)  Write of size 8 at 0x7b4400151738 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::deallocate(std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a612d)
    #6 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3515:9 (libkudu_util.so+0x3a5ad0)
    #7 std::__1::__shared_weak_count::__release_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/libcxx/src/memory.cpp (libc++.so.1+0xbde46)
    #8 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3384:9 (libtablet_test_util.so+0x35415)
    #9 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #10 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #11 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #12 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #13 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #14 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #15 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #16 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #17 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #18 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #19 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #20 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #21 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #22 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #23 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #24 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #25 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #26 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #27 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #28 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #29 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #30 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #31 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #32 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #33 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #34 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #35 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #36 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #37 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #38 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #39 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #40 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #41 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #42 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #43 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #44 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #45 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #46 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #47 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous atomic write of size 8 at 0x7b4400151738 by thread T114 (mutexes: write M920558637112824480):
    #0 __tsan_atomic64_compare_exchange_val /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp:850 (client-test+0x4cc4a8)
    #1 bool std::__1::__cxx_atomic_compare_exchange_weak<long>(std::__1::__cxx_atomic_base_impl<long>*, long*, long, std::__1::memory_order, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1031:12 (liblog.so+0x8bce4)
    #2 std::__1::__atomic_base<long, false>::compare_exchange_weak(long&, long, std::__1::memory_order, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1622:17 (liblog.so+0x8bb09)
    #3 void kudu::AtomicStoreMax<long>(std::__1::atomic<long>&, long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/atomic-utils.h:33:13 (liblog.so+0x7ea60)
    #4 kudu::HighWaterMark::UpdateMax(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:80:5 (libkudu_util.so+0x3a4d64)
    #5 kudu::HighWaterMark::IncrementBy(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:70:5 (libkudu_util.so+0x39fbcd)
    #6 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:285:27 (libkudu_util.so+0x39d231)
    #7 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #8 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #9 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #10 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #11 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #12 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #13 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #14 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #15 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #16 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #17 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #18 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #19 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #21 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #22 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #23 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #24 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #25 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)


ClientTest.TestCompactionOfSoftDeletedAndRecalledTable: WARNING: ThreadSanitizer: data race (pid=5023)  Write of size 8 at 0x7b4400151740 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::deallocate(std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a612d)
    #6 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3515:9 (libkudu_util.so+0x3a5ad0)
    #7 std::__1::__shared_weak_count::__release_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/libcxx/src/memory.cpp (libc++.so.1+0xbde46)
    #8 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3384:9 (libtablet_test_util.so+0x35415)
    #9 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #10 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #11 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #12 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #13 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #14 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #15 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #16 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #17 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #18 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #19 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #20 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #21 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #22 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #23 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #24 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #25 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #26 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #27 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #28 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #29 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #30 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #31 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #32 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #33 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #34 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #35 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #36 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #37 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #38 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #39 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #40 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #41 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #42 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #43 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #44 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #45 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #46 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #47 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous read of size 8 at 0x7b4400151740 by thread T114 (mutexes: write M920558637112824480):
    #0 std::__1::vector<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::begin() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:1520:30 (libkudu_util.so+0x39fac9)
    #1 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:284:22 (libkudu_util.so+0x39d1da)
    #2 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #3 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #4 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #5 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #6 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #7 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #8 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #9 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #10 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #11 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #12 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #13 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #14 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #15 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #16 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #17 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #18 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #19 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #20 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

Full log

Note: This is test shard 3 of 8.
[==========] Running 28 tests from 7 test suites.
[----------] Global test environment set-up.
[----------] 21 tests from ClientTest
[ RUN      ] ClientTest.TestListTables
WARNING: Logging before InitGoogleLogging() is written to STDERR
I20250901 14:18:11.141849  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:33885
I20250901 14:18:11.143451  5023 env_posix.cc:2264] Not raising this process' open files per process limit of 1048576; it is already as high as it can go
I20250901 14:18:11.144524  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:11.161458  5030 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:11.161500  5032 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:11.161890  5029 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:11.163370  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:12.409670  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:12.409929  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:12.410121  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736292410095 us; error 0 us; skew 500 ppm
I20250901 14:18:12.410974  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:12.417418  5023 webserver.cc:480] Webserver started at http://127.4.231.254:40433/ using document root <none> and password file <none>
I20250901 14:18:12.418568  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:12.418869  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:12.419349  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:12.425683  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestListTables.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "6d3e0be1d06f475b9082fe85426d81a2"
format_stamp: "Formatted at 2025-09-01 14:18:12 on dist-test-slave-9gf0"
I20250901 14:18:12.433948  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.007s	user 0.006s	sys 0.001s
I20250901 14:18:12.439363  5038 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:12.440586  5023 fs_manager.cc:730] Time spent opening block manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:12.440948  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestListTables.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "6d3e0be1d06f475b9082fe85426d81a2"
format_stamp: "Formatted at 2025-09-01 14:18:12 on dist-test-slave-9gf0"
I20250901 14:18:12.441284  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestListTables.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestListTables.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestListTables.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:12.503521  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:12.505101  5023 env_posix.cc:2264] Not raising this process' running threads per effective uid limit of 18446744073709551615; it is already as high as it can go
I20250901 14:18:12.505625  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:12.594893  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:33885
I20250901 14:18:12.594942  5099 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:33885 every 8 connection(s)
I20250901 14:18:12.602006  5100 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:12.625981  5100 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2: Bootstrap starting.
I20250901 14:18:12.632556  5100 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:12.634534  5100 log.cc:826] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2: Log is configured to *not* fsync() on all Append() calls
I20250901 14:18:12.639736  5100 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2: No bootstrap required, opened a new log
I20250901 14:18:12.662055  5100 raft_consensus.cc:357] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6d3e0be1d06f475b9082fe85426d81a2" member_type: VOTER }
I20250901 14:18:12.662657  5100 raft_consensus.cc:383] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:12.662905  5100 raft_consensus.cc:738] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 6d3e0be1d06f475b9082fe85426d81a2, State: Initialized, Role: FOLLOWER
I20250901 14:18:12.663697  5100 consensus_queue.cc:260] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6d3e0be1d06f475b9082fe85426d81a2" member_type: VOTER }
I20250901 14:18:12.664294  5100 raft_consensus.cc:397] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:12.664597  5100 raft_consensus.cc:491] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:12.664920  5100 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:12.669732  5100 raft_consensus.cc:513] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6d3e0be1d06f475b9082fe85426d81a2" member_type: VOTER }
I20250901 14:18:12.670462  5100 leader_election.cc:304] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 6d3e0be1d06f475b9082fe85426d81a2; no voters: 
I20250901 14:18:12.672207  5100 leader_election.cc:290] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:12.672555  5103 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:12.675307  5103 raft_consensus.cc:695] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 1 LEADER]: Becoming Leader. State: Replica: 6d3e0be1d06f475b9082fe85426d81a2, State: Running, Role: LEADER
I20250901 14:18:12.676712  5103 consensus_queue.cc:237] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6d3e0be1d06f475b9082fe85426d81a2" member_type: VOTER }
I20250901 14:18:12.677462  5100 sys_catalog.cc:564] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:12.692202  5105 sys_catalog.cc:455] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [sys.catalog]: SysCatalogTable state changed. Reason: New leader 6d3e0be1d06f475b9082fe85426d81a2. Latest consensus state: current_term: 1 leader_uuid: "6d3e0be1d06f475b9082fe85426d81a2" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6d3e0be1d06f475b9082fe85426d81a2" member_type: VOTER } }
I20250901 14:18:12.692047  5104 sys_catalog.cc:455] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "6d3e0be1d06f475b9082fe85426d81a2" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6d3e0be1d06f475b9082fe85426d81a2" member_type: VOTER } }
I20250901 14:18:12.692860  5105 sys_catalog.cc:458] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:12.693038  5104 sys_catalog.cc:458] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:12.696976  5113 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:12.712507  5113 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:12.714923  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:12.730374  5113 catalog_manager.cc:1349] Generated new cluster ID: cf11736678134a18987b05496b77bd0c
I20250901 14:18:12.730684  5113 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:12.751645  5113 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:12.753262  5113 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:12.766017  5113 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2: Generated new TSK 0
I20250901 14:18:12.767055  5113 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:12.781488  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:12.791536  5122 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:12.794719  5123 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:12.798393  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:12.798956  5125 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:12.799850  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:12.800057  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:12.800213  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736292800195 us; error 0 us; skew 500 ppm
I20250901 14:18:12.800770  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:12.805330  5023 webserver.cc:480] Webserver started at http://127.4.231.193:40353/ using document root <none> and password file <none>
I20250901 14:18:12.805891  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:12.806110  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:12.806381  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:12.807561  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestListTables.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "4d544134a82c4dea821bf13386fa7319"
format_stamp: "Formatted at 2025-09-01 14:18:12 on dist-test-slave-9gf0"
I20250901 14:18:12.813585  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.005s	user 0.007s	sys 0.000s
I20250901 14:18:12.818262  5130 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:12.819240  5023 fs_manager.cc:730] Time spent opening block manager: real 0.004s	user 0.002s	sys 0.002s
I20250901 14:18:12.819577  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestListTables.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "4d544134a82c4dea821bf13386fa7319"
format_stamp: "Formatted at 2025-09-01 14:18:12 on dist-test-slave-9gf0"
I20250901 14:18:12.819890  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestListTables.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestListTables.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestListTables.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:12.841449  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:12.842844  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:12.860987  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:12.861554  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.001s	user 0.001s	sys 0.000s
I20250901 14:18:12.861886  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:12.862057  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:13.013286  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:40471
I20250901 14:18:13.013411  5200 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:40471 every 8 connection(s)
I20250901 14:18:13.021411  5201 heartbeater.cc:344] Connected to a master server at 127.4.231.254:33885
I20250901 14:18:13.021931  5201 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:13.022895  5201 heartbeater.cc:507] Master 127.4.231.254:33885 requested a full tablet report, sending...
I20250901 14:18:13.025660  5055 ts_manager.cc:194] Registered new tserver with Master: 4d544134a82c4dea821bf13386fa7319 (127.4.231.193:40471)
I20250901 14:18:13.025658  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.005162075s
I20250901 14:18:13.029093  5055 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:35288
I20250901 14:18:13.043128  5201 heartbeater.cc:499] Master 127.4.231.254:33885 was elected leader, sending a full tablet report...
I20250901 14:18:13.064960  5054 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:35304:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:13.244777  5165 tablet_service.cc:1468] Processing CreateTablet for tablet 397ea3cb189b4c7ab3d6bfeddc712f73 (DEFAULT_TABLE table=client-testtb [id=a4a37cc3dfbb4cc5bf5c9a20d3eb2a2b]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:13.244777  5166 tablet_service.cc:1468] Processing CreateTablet for tablet 982a393de91f4eb083552eea2e427a17 (DEFAULT_TABLE table=client-testtb [id=a4a37cc3dfbb4cc5bf5c9a20d3eb2a2b]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:13.246984  5165 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 397ea3cb189b4c7ab3d6bfeddc712f73. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:13.247713  5166 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 982a393de91f4eb083552eea2e427a17. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:13.265062  5211 tablet_bootstrap.cc:492] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319: Bootstrap starting.
I20250901 14:18:13.271528  5211 tablet_bootstrap.cc:654] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:13.276110  5211 tablet_bootstrap.cc:492] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319: No bootstrap required, opened a new log
I20250901 14:18:13.276569  5211 ts_tablet_manager.cc:1397] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319: Time spent bootstrapping tablet: real 0.012s	user 0.000s	sys 0.010s
I20250901 14:18:13.279493  5211 raft_consensus.cc:357] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.279990  5211 raft_consensus.cc:383] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:13.280226  5211 raft_consensus.cc:738] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 4d544134a82c4dea821bf13386fa7319, State: Initialized, Role: FOLLOWER
I20250901 14:18:13.280753  5211 consensus_queue.cc:260] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.281205  5211 raft_consensus.cc:397] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:13.281415  5211 raft_consensus.cc:491] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:13.281704  5211 raft_consensus.cc:3058] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:13.286082  5211 raft_consensus.cc:513] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.286655  5211 leader_election.cc:304] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 4d544134a82c4dea821bf13386fa7319; no voters: 
I20250901 14:18:13.290117  5211 leader_election.cc:290] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:13.291409  5213 raft_consensus.cc:2802] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:13.295864  5211 ts_tablet_manager.cc:1428] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319: Time spent starting tablet: real 0.019s	user 0.007s	sys 0.012s
I20250901 14:18:13.296389  5213 raft_consensus.cc:695] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 1 LEADER]: Becoming Leader. State: Replica: 4d544134a82c4dea821bf13386fa7319, State: Running, Role: LEADER
I20250901 14:18:13.296866  5211 tablet_bootstrap.cc:492] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319: Bootstrap starting.
I20250901 14:18:13.297171  5213 consensus_queue.cc:237] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.305158  5211 tablet_bootstrap.cc:654] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:13.311865  5055 catalog_manager.cc:5582] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 reported cstate change: term changed from 0 to 1, leader changed from <none> to 4d544134a82c4dea821bf13386fa7319 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "4d544134a82c4dea821bf13386fa7319" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:13.320407  5211 tablet_bootstrap.cc:492] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319: No bootstrap required, opened a new log
I20250901 14:18:13.320866  5211 ts_tablet_manager.cc:1397] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319: Time spent bootstrapping tablet: real 0.024s	user 0.006s	sys 0.012s
I20250901 14:18:13.323712  5211 raft_consensus.cc:357] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.324267  5211 raft_consensus.cc:383] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:13.324565  5211 raft_consensus.cc:738] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 4d544134a82c4dea821bf13386fa7319, State: Initialized, Role: FOLLOWER
I20250901 14:18:13.325333  5211 consensus_queue.cc:260] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.325976  5211 raft_consensus.cc:397] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:13.326272  5211 raft_consensus.cc:491] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:13.326624  5211 raft_consensus.cc:3058] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:13.332300  5211 raft_consensus.cc:513] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.332875  5211 leader_election.cc:304] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 4d544134a82c4dea821bf13386fa7319; no voters: 
I20250901 14:18:13.333438  5211 leader_election.cc:290] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:13.333596  5213 raft_consensus.cc:2802] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:13.334160  5213 raft_consensus.cc:695] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 1 LEADER]: Becoming Leader. State: Replica: 4d544134a82c4dea821bf13386fa7319, State: Running, Role: LEADER
I20250901 14:18:13.334911  5213 consensus_queue.cc:237] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.335259  5211 ts_tablet_manager.cc:1428] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319: Time spent starting tablet: real 0.014s	user 0.011s	sys 0.004s
I20250901 14:18:13.341984  5055 catalog_manager.cc:5582] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 reported cstate change: term changed from 0 to 1, leader changed from <none> to 4d544134a82c4dea821bf13386fa7319 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "4d544134a82c4dea821bf13386fa7319" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:13.375267  5055 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:35304:
name: "client-testtb2"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:13.398890  5166 tablet_service.cc:1468] Processing CreateTablet for tablet d4a6299bef9747fd84ec0166ed1dac74 (DEFAULT_TABLE table=client-testtb2 [id=a91ed5f9b0e34036be321f73a9eeba90]), partition=RANGE (key) PARTITION UNBOUNDED
I20250901 14:18:13.399967  5166 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet d4a6299bef9747fd84ec0166ed1dac74. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:13.413092  5211 tablet_bootstrap.cc:492] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319: Bootstrap starting.
I20250901 14:18:13.419875  5211 tablet_bootstrap.cc:654] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:13.426815  5211 tablet_bootstrap.cc:492] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319: No bootstrap required, opened a new log
I20250901 14:18:13.427270  5211 ts_tablet_manager.cc:1397] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319: Time spent bootstrapping tablet: real 0.014s	user 0.008s	sys 0.003s
I20250901 14:18:13.429251  5211 raft_consensus.cc:357] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.429713  5211 raft_consensus.cc:383] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:13.429934  5211 raft_consensus.cc:738] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 4d544134a82c4dea821bf13386fa7319, State: Initialized, Role: FOLLOWER
I20250901 14:18:13.430413  5211 consensus_queue.cc:260] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.430866  5211 raft_consensus.cc:397] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:13.431123  5211 raft_consensus.cc:491] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:13.431511  5211 raft_consensus.cc:3058] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:13.437325  5211 raft_consensus.cc:513] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.438015  5211 leader_election.cc:304] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 4d544134a82c4dea821bf13386fa7319; no voters: 
I20250901 14:18:13.438630  5211 leader_election.cc:290] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:13.438796  5213 raft_consensus.cc:2802] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:13.439260  5213 raft_consensus.cc:695] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 1 LEADER]: Becoming Leader. State: Replica: 4d544134a82c4dea821bf13386fa7319, State: Running, Role: LEADER
I20250901 14:18:13.439915  5213 consensus_queue.cc:237] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } }
I20250901 14:18:13.440444  5211 ts_tablet_manager.cc:1428] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319: Time spent starting tablet: real 0.013s	user 0.012s	sys 0.000s
I20250901 14:18:13.446758  5054 catalog_manager.cc:5582] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 reported cstate change: term changed from 0 to 1, leader changed from <none> to 4d544134a82c4dea821bf13386fa7319 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "4d544134a82c4dea821bf13386fa7319" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "4d544134a82c4dea821bf13386fa7319" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40471 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:13.471931  5055 catalog_manager.cc:2482] Servicing SoftDeleteTable request from {username='slave'} at 127.0.0.1:35304:
table { table_name: "client-testtb2" } modify_external_catalogs: true reserve_seconds: 1000
I20250901 14:18:13.483100  5055 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:35304:
table { table_name: "client-testtb2" } new_extra_configs { key: "kudu.table.disable_compaction" value: "true" }
I20250901 14:18:13.503321  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:13.502697  5222 tablet.cc:1722] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319: Alter schema from (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
) version 0 to (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
) version 1
W20250901 14:18:13.521790  5202 tablet_mm_ops.cc:139] Rowset compaction is disabled (check --enable_rowset_compaction and disable_compaction in extra_config for tablet:d4a6299bef9747fd84ec0166ed1dac74)
W20250901 14:18:13.522949  5202 tablet_mm_ops.cc:304] Major delta compaction is disabled (check --enable_major_delta_compaction and disable_compaction in extra_config for tablet:d4a6299bef9747fd84ec0166ed1dac74)
W20250901 14:18:13.523370  5202 tablet_mm_ops.cc:220] Minor delta compaction is disabled (check --enable_minor_delta_compaction and disable_compaction in extra_config for tablet:d4a6299bef9747fd84ec0166ed1dac74)
I20250901 14:18:13.533355  5054 catalog_manager.cc:6127] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2: client-testtb2 [id=a91ed5f9b0e34036be321f73a9eeba90] alter complete (version 1)
I20250901 14:18:13.540979  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:13.541865  5023 tablet_replica.cc:331] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319: stopping tablet replica
I20250901 14:18:13.542624  5023 raft_consensus.cc:2241] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:13.543233  5023 raft_consensus.cc:2270] T d4a6299bef9747fd84ec0166ed1dac74 P 4d544134a82c4dea821bf13386fa7319 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:13.546296  5023 tablet_replica.cc:331] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319: stopping tablet replica
I20250901 14:18:13.546916  5023 raft_consensus.cc:2241] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:13.547405  5023 raft_consensus.cc:2270] T 397ea3cb189b4c7ab3d6bfeddc712f73 P 4d544134a82c4dea821bf13386fa7319 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:13.549741  5023 tablet_replica.cc:331] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319: stopping tablet replica
I20250901 14:18:13.550299  5023 raft_consensus.cc:2241] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:13.550750  5023 raft_consensus.cc:2270] T 982a393de91f4eb083552eea2e427a17 P 4d544134a82c4dea821bf13386fa7319 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:13.712065  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:13.726481  5023 master.cc:561] Master@127.4.231.254:33885 shutting down...
I20250901 14:18:13.819075  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:13.819617  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:13.819955  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 6d3e0be1d06f475b9082fe85426d81a2: stopping tablet replica
W20250901 14:18:14.160326  5096 debug-util.cc:398] Leaking SignalData structure 0x7b080006d060 after lost signal to thread 5026
W20250901 14:18:14.837052  5023 thread.cc:527] Waited for 1000ms trying to join with diag-logger (tid 5096)
W20250901 14:18:15.837692  5023 thread.cc:527] Waited for 2000ms trying to join with diag-logger (tid 5096)
I20250901 14:18:15.915313  5023 master.cc:583] Master@127.4.231.254:33885 shutdown complete.
[       OK ] ClientTest.TestListTables (4801 ms)
[ RUN      ] ClientTest.TestRandomizedLimitScans
I20250901 14:18:15.938716  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:42369
I20250901 14:18:15.939771  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:15.944666  5223 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:15.945366  5224 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:15.946832  5226 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:15.950475  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:15.953334  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:15.953588  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:15.953730  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736295953717 us; error 0 us; skew 500 ppm
I20250901 14:18:15.954213  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:15.956622  5023 webserver.cc:480] Webserver started at http://127.4.231.254:33083/ using document root <none> and password file <none>
I20250901 14:18:15.957088  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:15.957257  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:15.957499  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:15.958679  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomizedLimitScans.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "f1511985fc68416c9d173125eaf13379"
format_stamp: "Formatted at 2025-09-01 14:18:15 on dist-test-slave-9gf0"
I20250901 14:18:15.963351  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:15.966723  5231 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:15.967401  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:15.967682  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomizedLimitScans.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "f1511985fc68416c9d173125eaf13379"
format_stamp: "Formatted at 2025-09-01 14:18:15 on dist-test-slave-9gf0"
I20250901 14:18:15.967927  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomizedLimitScans.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomizedLimitScans.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomizedLimitScans.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:15.991932  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:15.992902  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:16.032285  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:42369
I20250901 14:18:16.032369  5292 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:42369 every 8 connection(s)
I20250901 14:18:16.035897  5293 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:16.045483  5293 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379: Bootstrap starting.
I20250901 14:18:16.049480  5293 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:16.053371  5293 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379: No bootstrap required, opened a new log
I20250901 14:18:16.055240  5293 raft_consensus.cc:357] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1511985fc68416c9d173125eaf13379" member_type: VOTER }
I20250901 14:18:16.055640  5293 raft_consensus.cc:383] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:16.055889  5293 raft_consensus.cc:738] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: f1511985fc68416c9d173125eaf13379, State: Initialized, Role: FOLLOWER
I20250901 14:18:16.056432  5293 consensus_queue.cc:260] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1511985fc68416c9d173125eaf13379" member_type: VOTER }
I20250901 14:18:16.056866  5293 raft_consensus.cc:397] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:16.057083  5293 raft_consensus.cc:491] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:16.057317  5293 raft_consensus.cc:3058] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:16.061825  5293 raft_consensus.cc:513] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1511985fc68416c9d173125eaf13379" member_type: VOTER }
I20250901 14:18:16.062361  5293 leader_election.cc:304] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: f1511985fc68416c9d173125eaf13379; no voters: 
I20250901 14:18:16.063481  5293 leader_election.cc:290] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:16.063805  5296 raft_consensus.cc:2802] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:16.065142  5296 raft_consensus.cc:695] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 1 LEADER]: Becoming Leader. State: Replica: f1511985fc68416c9d173125eaf13379, State: Running, Role: LEADER
I20250901 14:18:16.065843  5296 consensus_queue.cc:237] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1511985fc68416c9d173125eaf13379" member_type: VOTER }
I20250901 14:18:16.066295  5293 sys_catalog.cc:564] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:16.070461  5298 sys_catalog.cc:455] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [sys.catalog]: SysCatalogTable state changed. Reason: New leader f1511985fc68416c9d173125eaf13379. Latest consensus state: current_term: 1 leader_uuid: "f1511985fc68416c9d173125eaf13379" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1511985fc68416c9d173125eaf13379" member_type: VOTER } }
I20250901 14:18:16.071106  5298 sys_catalog.cc:458] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:16.072692  5297 sys_catalog.cc:455] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "f1511985fc68416c9d173125eaf13379" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1511985fc68416c9d173125eaf13379" member_type: VOTER } }
I20250901 14:18:16.073503  5297 sys_catalog.cc:458] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:16.073611  5305 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:16.079700  5305 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:16.080662  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:16.087865  5305 catalog_manager.cc:1349] Generated new cluster ID: b2fe9d2872404d7f93f54ba5d981d43d
I20250901 14:18:16.088173  5305 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:16.098868  5305 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:16.100070  5305 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:16.117345  5305 catalog_manager.cc:5955] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379: Generated new TSK 0
I20250901 14:18:16.117959  5305 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:16.147150  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:16.152585  5314 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:16.154342  5315 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:16.157094  5317 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:16.157145  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:16.158167  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:16.158352  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:16.158516  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736296158497 us; error 0 us; skew 500 ppm
I20250901 14:18:16.159013  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:16.161238  5023 webserver.cc:480] Webserver started at http://127.4.231.193:34071/ using document root <none> and password file <none>
I20250901 14:18:16.161733  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:16.161911  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:16.162153  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:16.163187  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomizedLimitScans.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "839ef01d38034d379a65947c71b7f545"
format_stamp: "Formatted at 2025-09-01 14:18:16 on dist-test-slave-9gf0"
I20250901 14:18:16.167555  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.002s
I20250901 14:18:16.170879  5322 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:16.171650  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:16.171922  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomizedLimitScans.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "839ef01d38034d379a65947c71b7f545"
format_stamp: "Formatted at 2025-09-01 14:18:16 on dist-test-slave-9gf0"
I20250901 14:18:16.172184  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomizedLimitScans.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomizedLimitScans.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomizedLimitScans.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:16.183203  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:16.184191  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:16.188972  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:16.189400  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.001s	sys 0.000s
I20250901 14:18:16.189800  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:16.190099  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:16.240924  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:36373
I20250901 14:18:16.241047  5392 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:36373 every 8 connection(s)
I20250901 14:18:16.245275  5393 heartbeater.cc:344] Connected to a master server at 127.4.231.254:42369
I20250901 14:18:16.245661  5393 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:16.246356  5393 heartbeater.cc:507] Master 127.4.231.254:42369 requested a full tablet report, sending...
I20250901 14:18:16.248183  5248 ts_manager.cc:194] Registered new tserver with Master: 839ef01d38034d379a65947c71b7f545 (127.4.231.193:36373)
I20250901 14:18:16.248768  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004716558s
I20250901 14:18:16.249884  5248 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:50128
I20250901 14:18:16.262708  5393 heartbeater.cc:499] Master 127.4.231.254:42369 was elected leader, sending a full tablet report...
I20250901 14:18:16.269994  5247 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:50146:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:16.299885  5356 tablet_service.cc:1468] Processing CreateTablet for tablet d23ef30561624dd7ab515d88eac3da11 (DEFAULT_TABLE table=client-testtb [id=457c688ef8c949a9a96c0fd82b3eb839]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:16.300484  5354 tablet_service.cc:1468] Processing CreateTablet for tablet b03968af92be4b489247ac44cf8d0eda (DEFAULT_TABLE table=client-testtb [id=457c688ef8c949a9a96c0fd82b3eb839]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:16.301421  5356 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet d23ef30561624dd7ab515d88eac3da11. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:16.301920  5354 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet b03968af92be4b489247ac44cf8d0eda. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:16.315446  5403 tablet_bootstrap.cc:492] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545: Bootstrap starting.
I20250901 14:18:16.321717  5403 tablet_bootstrap.cc:654] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:16.330780  5403 tablet_bootstrap.cc:492] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545: No bootstrap required, opened a new log
I20250901 14:18:16.331162  5403 ts_tablet_manager.cc:1397] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545: Time spent bootstrapping tablet: real 0.016s	user 0.014s	sys 0.000s
I20250901 14:18:16.333132  5403 raft_consensus.cc:357] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.333678  5403 raft_consensus.cc:383] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:16.333909  5403 raft_consensus.cc:738] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 839ef01d38034d379a65947c71b7f545, State: Initialized, Role: FOLLOWER
I20250901 14:18:16.334470  5403 consensus_queue.cc:260] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.335105  5403 raft_consensus.cc:397] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:16.335345  5403 raft_consensus.cc:491] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:16.335604  5403 raft_consensus.cc:3058] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:16.340704  5403 raft_consensus.cc:513] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.341270  5403 leader_election.cc:304] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 839ef01d38034d379a65947c71b7f545; no voters: 
I20250901 14:18:16.342553  5403 leader_election.cc:290] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:16.343025  5405 raft_consensus.cc:2802] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:16.347142  5403 ts_tablet_manager.cc:1428] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545: Time spent starting tablet: real 0.016s	user 0.012s	sys 0.004s
I20250901 14:18:16.348166  5403 tablet_bootstrap.cc:492] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545: Bootstrap starting.
I20250901 14:18:16.350540  5405 raft_consensus.cc:695] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 1 LEADER]: Becoming Leader. State: Replica: 839ef01d38034d379a65947c71b7f545, State: Running, Role: LEADER
I20250901 14:18:16.351194  5405 consensus_queue.cc:237] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.355208  5403 tablet_bootstrap.cc:654] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:16.357972  5248 catalog_manager.cc:5582] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 reported cstate change: term changed from 0 to 1, leader changed from <none> to 839ef01d38034d379a65947c71b7f545 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "839ef01d38034d379a65947c71b7f545" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:16.362908  5403 tablet_bootstrap.cc:492] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545: No bootstrap required, opened a new log
I20250901 14:18:16.363329  5403 ts_tablet_manager.cc:1397] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545: Time spent bootstrapping tablet: real 0.015s	user 0.008s	sys 0.004s
I20250901 14:18:16.365728  5403 raft_consensus.cc:357] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.366302  5403 raft_consensus.cc:383] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:16.366533  5403 raft_consensus.cc:738] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 839ef01d38034d379a65947c71b7f545, State: Initialized, Role: FOLLOWER
I20250901 14:18:16.367101  5403 consensus_queue.cc:260] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.367620  5403 raft_consensus.cc:397] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:16.367892  5403 raft_consensus.cc:491] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:16.368141  5403 raft_consensus.cc:3058] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:16.373031  5403 raft_consensus.cc:513] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.373658  5403 leader_election.cc:304] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 839ef01d38034d379a65947c71b7f545; no voters: 
I20250901 14:18:16.374151  5403 leader_election.cc:290] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:16.374284  5405 raft_consensus.cc:2802] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:16.374881  5405 raft_consensus.cc:695] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 1 LEADER]: Becoming Leader. State: Replica: 839ef01d38034d379a65947c71b7f545, State: Running, Role: LEADER
I20250901 14:18:16.375641  5403 ts_tablet_manager.cc:1428] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545: Time spent starting tablet: real 0.012s	user 0.010s	sys 0.000s
I20250901 14:18:16.375499  5405 consensus_queue.cc:237] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.382313  5247 catalog_manager.cc:5582] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 reported cstate change: term changed from 0 to 1, leader changed from <none> to 839ef01d38034d379a65947c71b7f545 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "839ef01d38034d379a65947c71b7f545" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:16.407928  5247 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:50146:
name: "table"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
}
partition_schema {
  hash_schema {
    columns {
      name: "key"
    }
    num_buckets: 2
    seed: 0
  }
}
I20250901 14:18:16.430763  5354 tablet_service.cc:1468] Processing CreateTablet for tablet ec1e0c5f2c7e44529d1dcf7e7804fd4c (DEFAULT_TABLE table=table [id=cb9e7aefeb9a4d2f9ee087fc10c009e4]), partition=HASH (key) PARTITION 0, RANGE (key) PARTITION UNBOUNDED
I20250901 14:18:16.431880  5354 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet ec1e0c5f2c7e44529d1dcf7e7804fd4c. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:16.431901  5356 tablet_service.cc:1468] Processing CreateTablet for tablet 7250aae0af78470e8bc33597a8b13ed3 (DEFAULT_TABLE table=table [id=cb9e7aefeb9a4d2f9ee087fc10c009e4]), partition=HASH (key) PARTITION 1, RANGE (key) PARTITION UNBOUNDED
I20250901 14:18:16.432976  5356 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 7250aae0af78470e8bc33597a8b13ed3. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:16.447870  5403 tablet_bootstrap.cc:492] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545: Bootstrap starting.
I20250901 14:18:16.453563  5403 tablet_bootstrap.cc:654] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:16.458951  5403 tablet_bootstrap.cc:492] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545: No bootstrap required, opened a new log
I20250901 14:18:16.459312  5403 ts_tablet_manager.cc:1397] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545: Time spent bootstrapping tablet: real 0.012s	user 0.009s	sys 0.000s
I20250901 14:18:16.461144  5403 raft_consensus.cc:357] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.461568  5403 raft_consensus.cc:383] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:16.461871  5403 raft_consensus.cc:738] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 839ef01d38034d379a65947c71b7f545, State: Initialized, Role: FOLLOWER
I20250901 14:18:16.462347  5403 consensus_queue.cc:260] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.462793  5403 raft_consensus.cc:397] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:16.463014  5403 raft_consensus.cc:491] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:16.463322  5403 raft_consensus.cc:3058] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:16.469298  5403 raft_consensus.cc:513] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.469880  5403 leader_election.cc:304] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 839ef01d38034d379a65947c71b7f545; no voters: 
I20250901 14:18:16.470391  5403 leader_election.cc:290] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:16.470538  5405 raft_consensus.cc:2802] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:16.470968  5405 raft_consensus.cc:695] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 1 LEADER]: Becoming Leader. State: Replica: 839ef01d38034d379a65947c71b7f545, State: Running, Role: LEADER
I20250901 14:18:16.471757  5405 consensus_queue.cc:237] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.472239  5403 ts_tablet_manager.cc:1428] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545: Time spent starting tablet: real 0.013s	user 0.012s	sys 0.000s
I20250901 14:18:16.473064  5403 tablet_bootstrap.cc:492] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545: Bootstrap starting.
I20250901 14:18:16.477806  5248 catalog_manager.cc:5582] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 reported cstate change: term changed from 0 to 1, leader changed from <none> to 839ef01d38034d379a65947c71b7f545 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "839ef01d38034d379a65947c71b7f545" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:16.479859  5403 tablet_bootstrap.cc:654] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:16.486878  5403 tablet_bootstrap.cc:492] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545: No bootstrap required, opened a new log
I20250901 14:18:16.487228  5403 ts_tablet_manager.cc:1397] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545: Time spent bootstrapping tablet: real 0.014s	user 0.008s	sys 0.004s
I20250901 14:18:16.490024  5403 raft_consensus.cc:357] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.490593  5403 raft_consensus.cc:383] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:16.490852  5403 raft_consensus.cc:738] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 839ef01d38034d379a65947c71b7f545, State: Initialized, Role: FOLLOWER
I20250901 14:18:16.491379  5403 consensus_queue.cc:260] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.491834  5403 raft_consensus.cc:397] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:16.492058  5403 raft_consensus.cc:491] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:16.492291  5403 raft_consensus.cc:3058] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:16.498276  5403 raft_consensus.cc:513] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.498844  5403 leader_election.cc:304] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 839ef01d38034d379a65947c71b7f545; no voters: 
I20250901 14:18:16.499353  5403 leader_election.cc:290] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:16.499502  5405 raft_consensus.cc:2802] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:16.499946  5405 raft_consensus.cc:695] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 1 LEADER]: Becoming Leader. State: Replica: 839ef01d38034d379a65947c71b7f545, State: Running, Role: LEADER
I20250901 14:18:16.500576  5405 consensus_queue.cc:237] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } }
I20250901 14:18:16.501309  5403 ts_tablet_manager.cc:1428] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545: Time spent starting tablet: real 0.014s	user 0.008s	sys 0.004s
I20250901 14:18:16.507390  5247 catalog_manager.cc:5582] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 reported cstate change: term changed from 0 to 1, leader changed from <none> to 839ef01d38034d379a65947c71b7f545 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "839ef01d38034d379a65947c71b7f545" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "839ef01d38034d379a65947c71b7f545" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36373 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:17.249015  5237 maintenance_manager.cc:419] P f1511985fc68416c9d173125eaf13379: Scheduling FlushMRSOp(00000000000000000000000000000000): perf score=0.007377
I20250901 14:18:17.300575  5236 maintenance_manager.cc:643] P f1511985fc68416c9d173125eaf13379: FlushMRSOp(00000000000000000000000000000000) complete. Timing: real 0.051s	user 0.039s	sys 0.008s Metrics: {"bytes_written":7667,"cfile_init":1,"dirs.queue_time_us":845,"dirs.run_cpu_time_us":367,"dirs.run_wall_time_us":979,"drs_written":1,"lbm_read_time_us":206,"lbm_reads_lt_1ms":4,"lbm_write_time_us":1984,"lbm_writes_lt_1ms":27,"peak_mem_usage":0,"rows_written":9,"thread_start_us":439,"threads_started":1}
I20250901 14:18:17.303417  5237 maintenance_manager.cc:419] P f1511985fc68416c9d173125eaf13379: Scheduling UndoDeltaBlockGCOp(00000000000000000000000000000000): 577 bytes on disk
I20250901 14:18:17.305058  5236 maintenance_manager.cc:643] P f1511985fc68416c9d173125eaf13379: UndoDeltaBlockGCOp(00000000000000000000000000000000) complete. Timing: real 0.001s	user 0.000s	sys 0.000s Metrics: {"cfile_init":1,"lbm_read_time_us":220,"lbm_reads_lt_1ms":4}
I20250901 14:18:17.502816  5394 maintenance_manager.cc:419] P 839ef01d38034d379a65947c71b7f545: Scheduling FlushMRSOp(b03968af92be4b489247ac44cf8d0eda): perf score=0.242942
I20250901 14:18:17.862936  5327 maintenance_manager.cc:643] P 839ef01d38034d379a65947c71b7f545: FlushMRSOp(b03968af92be4b489247ac44cf8d0eda) complete. Timing: real 0.359s	user 0.318s	sys 0.039s Metrics: {"bytes_written":21705,"cfile_init":1,"compiler_manager_pool.queue_time_us":937,"dirs.queue_time_us":1049,"dirs.run_cpu_time_us":315,"dirs.run_wall_time_us":942,"drs_written":1,"lbm_read_time_us":149,"lbm_reads_lt_1ms":4,"lbm_write_time_us":1861,"lbm_writes_lt_1ms":27,"peak_mem_usage":0,"rows_written":1412,"thread_start_us":1351,"threads_started":2}
I20250901 14:18:17.866041  5394 maintenance_manager.cc:419] P 839ef01d38034d379a65947c71b7f545: Scheduling FlushMRSOp(d23ef30561624dd7ab515d88eac3da11): perf score=0.001678
I20250901 14:18:17.891588  5327 maintenance_manager.cc:643] P 839ef01d38034d379a65947c71b7f545: FlushMRSOp(d23ef30561624dd7ab515d88eac3da11) complete. Timing: real 0.025s	user 0.023s	sys 0.000s Metrics: {"bytes_written":4841,"cfile_init":1,"dirs.queue_time_us":90,"dirs.run_cpu_time_us":308,"dirs.run_wall_time_us":1453,"drs_written":1,"lbm_read_time_us":196,"lbm_reads_lt_1ms":4,"lbm_write_time_us":1643,"lbm_writes_lt_1ms":27,"peak_mem_usage":0,"rows_written":9,"thread_start_us":636,"threads_started":1,"wal-append.queue_time_us":1001}
I20250901 14:18:17.894030  5394 maintenance_manager.cc:419] P 839ef01d38034d379a65947c71b7f545: Scheduling UndoDeltaBlockGCOp(b03968af92be4b489247ac44cf8d0eda): 8978 bytes on disk
I20250901 14:18:17.895377  5327 maintenance_manager.cc:643] P 839ef01d38034d379a65947c71b7f545: UndoDeltaBlockGCOp(b03968af92be4b489247ac44cf8d0eda) complete. Timing: real 0.001s	user 0.000s	sys 0.000s Metrics: {"cfile_init":1,"lbm_read_time_us":236,"lbm_reads_lt_1ms":4}
I20250901 14:18:17.898032  5394 maintenance_manager.cc:419] P 839ef01d38034d379a65947c71b7f545: Scheduling UndoDeltaBlockGCOp(d23ef30561624dd7ab515d88eac3da11): 206 bytes on disk
I20250901 14:18:17.899338  5327 maintenance_manager.cc:643] P 839ef01d38034d379a65947c71b7f545: UndoDeltaBlockGCOp(d23ef30561624dd7ab515d88eac3da11) complete. Timing: real 0.001s	user 0.000s	sys 0.000s Metrics: {"cfile_init":1,"lbm_read_time_us":175,"lbm_reads_lt_1ms":4}
I20250901 14:18:18.062405  5023 client-test.cc:1258] Total number of rows: 1421, batch size: 362
I20250901 14:18:18.063585  5023 client-test.cc:1275] Total bytes read on tserver so far: 382
I20250901 14:18:18.063984  5023 client-test.cc:1287] Scanning with a client-side limit of 7, expecting 7 rows
I20250901 14:18:18.091871  5023 client-test.cc:1275] Total bytes read on tserver so far: 929
I20250901 14:18:18.092339  5023 client-test.cc:1287] Scanning with a client-side limit of 12, expecting 12 rows
I20250901 14:18:18.125795  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.126348  5023 client-test.cc:1287] Scanning with a client-side limit of 42, expecting 42 rows
I20250901 14:18:18.151566  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.152148  5023 client-test.cc:1287] Scanning with a client-side limit of 15, expecting 15 rows
I20250901 14:18:18.176986  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.177397  5023 client-test.cc:1287] Scanning with a client-side limit of 67, expecting 67 rows
I20250901 14:18:18.200107  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.200567  5023 client-test.cc:1287] Scanning with a client-side limit of 82, expecting 82 rows
I20250901 14:18:18.220371  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.220888  5023 client-test.cc:1287] Scanning with a client-side limit of 80, expecting 80 rows
I20250901 14:18:18.248428  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.248970  5023 client-test.cc:1287] Scanning with a client-side limit of 59, expecting 59 rows
I20250901 14:18:18.269207  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.269796  5023 client-test.cc:1287] Scanning with a client-side limit of 75, expecting 75 rows
I20250901 14:18:18.289011  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.289565  5023 client-test.cc:1287] Scanning with a client-side limit of 117, expecting 117 rows
I20250901 14:18:18.312934  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.313359  5023 client-test.cc:1287] Scanning with a client-side limit of 4, expecting 4 rows
I20250901 14:18:18.326393  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.326807  5023 client-test.cc:1287] Scanning with a client-side limit of 40, expecting 40 rows
I20250901 14:18:18.346014  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.346468  5023 client-test.cc:1287] Scanning with a client-side limit of 71, expecting 71 rows
I20250901 14:18:18.365938  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.366456  5023 client-test.cc:1287] Scanning with a client-side limit of 117, expecting 117 rows
I20250901 14:18:18.386785  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.387319  5023 client-test.cc:1287] Scanning with a client-side limit of 9, expecting 9 rows
I20250901 14:18:18.399906  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.400322  5023 client-test.cc:1287] Scanning with a client-side limit of 177, expecting 177 rows
I20250901 14:18:18.419901  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.420300  5023 client-test.cc:1287] Scanning with a client-side limit of 51, expecting 51 rows
I20250901 14:18:18.443053  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.443537  5023 client-test.cc:1287] Scanning with a client-side limit of 131, expecting 131 rows
I20250901 14:18:18.466589  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.467025  5023 client-test.cc:1287] Scanning with a client-side limit of 87, expecting 87 rows
I20250901 14:18:18.488572  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.489001  5023 client-test.cc:1287] Scanning with a client-side limit of 87, expecting 87 rows
I20250901 14:18:18.509670  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.510188  5023 client-test.cc:1287] Scanning with a client-side limit of 81, expecting 81 rows
I20250901 14:18:18.536191  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.536639  5023 client-test.cc:1287] Scanning with a client-side limit of 283, expecting 283 rows
I20250901 14:18:18.557416  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.557965  5023 client-test.cc:1287] Scanning with a client-side limit of 226, expecting 226 rows
I20250901 14:18:18.582896  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.583428  5023 client-test.cc:1287] Scanning with a client-side limit of 54, expecting 54 rows
I20250901 14:18:18.602770  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.603199  5023 client-test.cc:1287] Scanning with a client-side limit of 264, expecting 264 rows
I20250901 14:18:18.623361  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.623895  5023 client-test.cc:1287] Scanning with a client-side limit of 233, expecting 233 rows
I20250901 14:18:18.644909  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.645331  5023 client-test.cc:1287] Scanning with a client-side limit of 243, expecting 243 rows
I20250901 14:18:18.665959  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.666407  5023 client-test.cc:1287] Scanning with a client-side limit of 307, expecting 307 rows
I20250901 14:18:18.687737  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.688153  5023 client-test.cc:1287] Scanning with a client-side limit of 154, expecting 154 rows
I20250901 14:18:18.709162  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.709620  5023 client-test.cc:1287] Scanning with a client-side limit of 232, expecting 232 rows
I20250901 14:18:18.730937  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.731362  5023 client-test.cc:1287] Scanning with a client-side limit of 302, expecting 302 rows
I20250901 14:18:18.752403  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.752869  5023 client-test.cc:1287] Scanning with a client-side limit of 134, expecting 134 rows
I20250901 14:18:18.774362  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.774885  5023 client-test.cc:1287] Scanning with a client-side limit of 213, expecting 213 rows
I20250901 14:18:18.796128  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.796546  5023 client-test.cc:1287] Scanning with a client-side limit of 347, expecting 347 rows
I20250901 14:18:18.816845  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.817380  5023 client-test.cc:1287] Scanning with a client-side limit of 469, expecting 469 rows
I20250901 14:18:18.839355  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.839782  5023 client-test.cc:1287] Scanning with a client-side limit of 416, expecting 416 rows
I20250901 14:18:18.861850  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.862349  5023 client-test.cc:1287] Scanning with a client-side limit of 66, expecting 66 rows
I20250901 14:18:18.883242  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.883764  5023 client-test.cc:1287] Scanning with a client-side limit of 359, expecting 359 rows
I20250901 14:18:18.909579  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.910085  5023 client-test.cc:1287] Scanning with a client-side limit of 302, expecting 302 rows
I20250901 14:18:18.937788  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.938297  5023 client-test.cc:1287] Scanning with a client-side limit of 183, expecting 183 rows
I20250901 14:18:18.958858  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.959290  5023 client-test.cc:1287] Scanning with a client-side limit of 158, expecting 158 rows
I20250901 14:18:18.979229  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:18.979758  5023 client-test.cc:1287] Scanning with a client-side limit of 33, expecting 33 rows
I20250901 14:18:19.000445  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.000883  5023 client-test.cc:1287] Scanning with a client-side limit of 224, expecting 224 rows
I20250901 14:18:19.021453  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.022063  5023 client-test.cc:1287] Scanning with a client-side limit of 624, expecting 624 rows
I20250901 14:18:19.048043  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.048468  5023 client-test.cc:1287] Scanning with a client-side limit of 123, expecting 123 rows
I20250901 14:18:19.070461  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.070991  5023 client-test.cc:1287] Scanning with a client-side limit of 427, expecting 427 rows
I20250901 14:18:19.093987  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.094538  5023 client-test.cc:1287] Scanning with a client-side limit of 456, expecting 456 rows
I20250901 14:18:19.116590  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.117017  5023 client-test.cc:1287] Scanning with a client-side limit of 250, expecting 250 rows
I20250901 14:18:19.138170  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.138653  5023 client-test.cc:1287] Scanning with a client-side limit of 322, expecting 322 rows
I20250901 14:18:19.159736  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.160143  5023 client-test.cc:1287] Scanning with a client-side limit of 86, expecting 86 rows
I20250901 14:18:19.181476  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.182029  5023 client-test.cc:1287] Scanning with a client-side limit of 233, expecting 233 rows
W20250901 14:18:19.186396  5342 compilation_manager.cc:203] RowProjector compilation request submit failed: Service unavailable: Thread pool is at capacity (1/1 tasks running, 100/100 tasks queued)
I20250901 14:18:19.204277  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.204809  5023 client-test.cc:1287] Scanning with a client-side limit of 358, expecting 358 rows
I20250901 14:18:19.234328  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.234894  5023 client-test.cc:1287] Scanning with a client-side limit of 72, expecting 72 rows
I20250901 14:18:19.254555  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.255014  5023 client-test.cc:1287] Scanning with a client-side limit of 57, expecting 57 rows
I20250901 14:18:19.275187  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.275605  5023 client-test.cc:1287] Scanning with a client-side limit of 176, expecting 176 rows
I20250901 14:18:19.296860  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.297369  5023 client-test.cc:1287] Scanning with a client-side limit of 190, expecting 190 rows
I20250901 14:18:19.318252  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.318722  5023 client-test.cc:1287] Scanning with a client-side limit of 634, expecting 634 rows
I20250901 14:18:19.341723  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.342190  5023 client-test.cc:1287] Scanning with a client-side limit of 234, expecting 234 rows
I20250901 14:18:19.364063  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.364533  5023 client-test.cc:1287] Scanning with a client-side limit of 345, expecting 345 rows
I20250901 14:18:19.386193  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.386619  5023 client-test.cc:1287] Scanning with a client-side limit of 784, expecting 784 rows
I20250901 14:18:19.420816  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.421336  5023 client-test.cc:1287] Scanning with a client-side limit of 11, expecting 11 rows
I20250901 14:18:19.447464  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.447979  5023 client-test.cc:1287] Scanning with a client-side limit of 670, expecting 670 rows
I20250901 14:18:19.479328  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.479862  5023 client-test.cc:1287] Scanning with a client-side limit of 445, expecting 445 rows
I20250901 14:18:19.505180  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.505764  5023 client-test.cc:1287] Scanning with a client-side limit of 25, expecting 25 rows
I20250901 14:18:19.527319  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.527834  5023 client-test.cc:1287] Scanning with a client-side limit of 777, expecting 777 rows
I20250901 14:18:19.559588  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.560160  5023 client-test.cc:1287] Scanning with a client-side limit of 740, expecting 740 rows
I20250901 14:18:19.586963  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.587546  5023 client-test.cc:1287] Scanning with a client-side limit of 668, expecting 668 rows
I20250901 14:18:19.615520  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.615974  5023 client-test.cc:1287] Scanning with a client-side limit of 267, expecting 267 rows
I20250901 14:18:19.639329  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.639880  5023 client-test.cc:1287] Scanning with a client-side limit of 428, expecting 428 rows
I20250901 14:18:19.663661  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.664106  5023 client-test.cc:1287] Scanning with a client-side limit of 277, expecting 277 rows
I20250901 14:18:19.686175  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.686609  5023 client-test.cc:1287] Scanning with a client-side limit of 349, expecting 349 rows
I20250901 14:18:19.708334  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.708866  5023 client-test.cc:1287] Scanning with a client-side limit of 714, expecting 714 rows
I20250901 14:18:19.735698  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.736316  5023 client-test.cc:1287] Scanning with a client-side limit of 869, expecting 869 rows
I20250901 14:18:19.760938  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.761384  5023 client-test.cc:1287] Scanning with a client-side limit of 115, expecting 115 rows
I20250901 14:18:19.782472  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.782876  5023 client-test.cc:1287] Scanning with a client-side limit of 229, expecting 229 rows
I20250901 14:18:19.803403  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.803906  5023 client-test.cc:1287] Scanning with a client-side limit of 729, expecting 729 rows
I20250901 14:18:19.828915  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.829420  5023 client-test.cc:1287] Scanning with a client-side limit of 819, expecting 819 rows
I20250901 14:18:19.864197  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.864799  5023 client-test.cc:1287] Scanning with a client-side limit of 684, expecting 684 rows
I20250901 14:18:19.890837  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.891258  5023 client-test.cc:1287] Scanning with a client-side limit of 535, expecting 535 rows
I20250901 14:18:19.912982  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.913414  5023 client-test.cc:1287] Scanning with a client-side limit of 837, expecting 837 rows
I20250901 14:18:19.936717  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.937103  5023 client-test.cc:1287] Scanning with a client-side limit of 109, expecting 109 rows
I20250901 14:18:19.955634  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.956058  5023 client-test.cc:1287] Scanning with a client-side limit of 1079, expecting 1079 rows
I20250901 14:18:19.980569  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:19.980988  5023 client-test.cc:1287] Scanning with a client-side limit of 254, expecting 254 rows
I20250901 14:18:20.000818  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.001288  5023 client-test.cc:1287] Scanning with a client-side limit of 483, expecting 483 rows
I20250901 14:18:20.021502  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.021971  5023 client-test.cc:1287] Scanning with a client-side limit of 1169, expecting 1169 rows
I20250901 14:18:20.046761  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.047188  5023 client-test.cc:1287] Scanning with a client-side limit of 1105, expecting 1105 rows
I20250901 14:18:20.072561  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.072968  5023 client-test.cc:1287] Scanning with a client-side limit of 1183, expecting 1183 rows
I20250901 14:18:20.098874  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.099296  5023 client-test.cc:1287] Scanning with a client-side limit of 40, expecting 40 rows
I20250901 14:18:20.118005  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.118516  5023 client-test.cc:1287] Scanning with a client-side limit of 812, expecting 812 rows
I20250901 14:18:20.140699  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.141126  5023 client-test.cc:1287] Scanning with a client-side limit of 1091, expecting 1091 rows
I20250901 14:18:20.164673  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.165187  5023 client-test.cc:1287] Scanning with a client-side limit of 45, expecting 45 rows
I20250901 14:18:20.184124  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.184636  5023 client-test.cc:1287] Scanning with a client-side limit of 1183, expecting 1183 rows
I20250901 14:18:20.210835  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.211262  5023 client-test.cc:1287] Scanning with a client-side limit of 665, expecting 665 rows
I20250901 14:18:20.235263  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.235680  5023 client-test.cc:1287] Scanning with a client-side limit of 472, expecting 472 rows
I20250901 14:18:20.256949  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.257376  5023 client-test.cc:1287] Scanning with a client-side limit of 289, expecting 289 rows
I20250901 14:18:20.276160  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.276659  5023 client-test.cc:1287] Scanning with a client-side limit of 151, expecting 151 rows
I20250901 14:18:20.295261  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.295794  5023 client-test.cc:1287] Scanning with a client-side limit of 1322, expecting 1322 rows
I20250901 14:18:20.320729  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.321166  5023 client-test.cc:1287] Scanning with a client-side limit of 685, expecting 685 rows
I20250901 14:18:20.342365  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.342896  5023 client-test.cc:1287] Scanning with a client-side limit of 1157, expecting 1157 rows
I20250901 14:18:20.367115  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.367519  5023 client-test.cc:1287] Scanning with a client-side limit of 212, expecting 212 rows
I20250901 14:18:20.386857  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.387261  5023 client-test.cc:1287] Scanning with a client-side limit of 957, expecting 957 rows
I20250901 14:18:20.409503  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.409962  5023 client-test.cc:1287] Scanning with a client-side limit of 381, expecting 381 rows
I20250901 14:18:20.431529  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.431988  5023 client-test.cc:1287] Scanning with a client-side limit of 1351, expecting 1351 rows
I20250901 14:18:20.458945  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.459383  5023 client-test.cc:1287] Scanning with a client-side limit of 455, expecting 455 rows
I20250901 14:18:20.482007  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.482457  5023 client-test.cc:1287] Scanning with a client-side limit of 622, expecting 622 rows
I20250901 14:18:20.503897  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.504472  5023 client-test.cc:1287] Scanning with a client-side limit of 1230, expecting 1230 rows
I20250901 14:18:20.530064  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.530449  5023 client-test.cc:1287] Scanning with a client-side limit of 1485, expecting 1421 rows
I20250901 14:18:20.556864  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.557284  5023 client-test.cc:1287] Scanning with a client-side limit of 1116, expecting 1116 rows
I20250901 14:18:20.581941  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.582306  5023 client-test.cc:1287] Scanning with a client-side limit of 941, expecting 941 rows
I20250901 14:18:20.605229  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.605676  5023 client-test.cc:1287] Scanning with a client-side limit of 307, expecting 307 rows
I20250901 14:18:20.625015  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.625559  5023 client-test.cc:1287] Scanning with a client-side limit of 288, expecting 288 rows
I20250901 14:18:20.645980  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.646493  5023 client-test.cc:1287] Scanning with a client-side limit of 712, expecting 712 rows
I20250901 14:18:20.669842  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.670264  5023 client-test.cc:1287] Scanning with a client-side limit of 1333, expecting 1333 rows
I20250901 14:18:20.695935  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.696341  5023 client-test.cc:1287] Scanning with a client-side limit of 925, expecting 925 rows
I20250901 14:18:20.719580  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.720082  5023 client-test.cc:1287] Scanning with a client-side limit of 828, expecting 828 rows
I20250901 14:18:20.743069  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.743476  5023 client-test.cc:1287] Scanning with a client-side limit of 1428, expecting 1421 rows
I20250901 14:18:20.769107  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.769593  5023 client-test.cc:1287] Scanning with a client-side limit of 40, expecting 40 rows
I20250901 14:18:20.788081  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.788579  5023 client-test.cc:1287] Scanning with a client-side limit of 104, expecting 104 rows
I20250901 14:18:20.808184  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.808642  5023 client-test.cc:1287] Scanning with a client-side limit of 1158, expecting 1158 rows
I20250901 14:18:20.832563  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.832976  5023 client-test.cc:1287] Scanning with a client-side limit of 567, expecting 567 rows
I20250901 14:18:20.854286  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.854862  5023 client-test.cc:1287] Scanning with a client-side limit of 78, expecting 78 rows
I20250901 14:18:20.873814  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.874204  5023 client-test.cc:1287] Scanning with a client-side limit of 377, expecting 377 rows
I20250901 14:18:20.894281  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.894819  5023 client-test.cc:1287] Scanning with a client-side limit of 1306, expecting 1306 rows
I20250901 14:18:20.919698  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.920231  5023 client-test.cc:1287] Scanning with a client-side limit of 316, expecting 316 rows
I20250901 14:18:20.940552  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.940963  5023 client-test.cc:1287] Scanning with a client-side limit of 587, expecting 587 rows
I20250901 14:18:20.962430  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.962905  5023 client-test.cc:1287] Scanning with a client-side limit of 1034, expecting 1034 rows
I20250901 14:18:20.986861  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:20.987524  5023 client-test.cc:1287] Scanning with a client-side limit of 232, expecting 232 rows
I20250901 14:18:21.006943  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.007411  5023 client-test.cc:1287] Scanning with a client-side limit of 1371, expecting 1371 rows
I20250901 14:18:21.032859  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.033427  5023 client-test.cc:1287] Scanning with a client-side limit of 1301, expecting 1301 rows
I20250901 14:18:21.058528  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.058954  5023 client-test.cc:1287] Scanning with a client-side limit of 1841, expecting 1421 rows
I20250901 14:18:21.085440  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.085893  5023 client-test.cc:1287] Scanning with a client-side limit of 1218, expecting 1218 rows
I20250901 14:18:21.110188  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.110692  5023 client-test.cc:1287] Scanning with a client-side limit of 705, expecting 705 rows
I20250901 14:18:21.132145  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.132578  5023 client-test.cc:1287] Scanning with a client-side limit of 1246, expecting 1246 rows
I20250901 14:18:21.185177  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.185806  5023 client-test.cc:1287] Scanning with a client-side limit of 1076, expecting 1076 rows
I20250901 14:18:21.211859  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.212272  5023 client-test.cc:1287] Scanning with a client-side limit of 935, expecting 935 rows
I20250901 14:18:21.236336  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.236759  5023 client-test.cc:1287] Scanning with a client-side limit of 529, expecting 529 rows
I20250901 14:18:21.259249  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.259654  5023 client-test.cc:1287] Scanning with a client-side limit of 977, expecting 977 rows
I20250901 14:18:21.282727  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.283157  5023 client-test.cc:1287] Scanning with a client-side limit of 396, expecting 396 rows
I20250901 14:18:21.304397  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.304865  5023 client-test.cc:1287] Scanning with a client-side limit of 941, expecting 941 rows
I20250901 14:18:21.329396  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.329852  5023 client-test.cc:1287] Scanning with a client-side limit of 1120, expecting 1120 rows
I20250901 14:18:21.358009  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.358377  5023 client-test.cc:1287] Scanning with a client-side limit of 1128, expecting 1128 rows
I20250901 14:18:21.384502  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.384881  5023 client-test.cc:1287] Scanning with a client-side limit of 1031, expecting 1031 rows
I20250901 14:18:21.409135  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.409545  5023 client-test.cc:1287] Scanning with a client-side limit of 1851, expecting 1421 rows
I20250901 14:18:21.435340  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.435802  5023 client-test.cc:1287] Scanning with a client-side limit of 1094, expecting 1094 rows
I20250901 14:18:21.459865  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.460283  5023 client-test.cc:1287] Scanning with a client-side limit of 347, expecting 347 rows
I20250901 14:18:21.480023  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.480490  5023 client-test.cc:1287] Scanning with a client-side limit of 408, expecting 408 rows
I20250901 14:18:21.500228  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.500900  5023 client-test.cc:1287] Scanning with a client-side limit of 1202, expecting 1202 rows
I20250901 14:18:21.526260  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.526684  5023 client-test.cc:1287] Scanning with a client-side limit of 1011, expecting 1011 rows
I20250901 14:18:21.549048  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.549587  5023 client-test.cc:1287] Scanning with a client-side limit of 222, expecting 222 rows
I20250901 14:18:21.568085  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.568497  5023 client-test.cc:1287] Scanning with a client-side limit of 814, expecting 814 rows
I20250901 14:18:21.590485  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.590989  5023 client-test.cc:1287] Scanning with a client-side limit of 2055, expecting 1421 rows
I20250901 14:18:21.617318  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.617777  5023 client-test.cc:1287] Scanning with a client-side limit of 794, expecting 794 rows
I20250901 14:18:21.640722  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.641125  5023 client-test.cc:1287] Scanning with a client-side limit of 1338, expecting 1338 rows
I20250901 14:18:21.666628  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.667155  5023 client-test.cc:1287] Scanning with a client-side limit of 2079, expecting 1421 rows
I20250901 14:18:21.692394  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.692832  5023 client-test.cc:1287] Scanning with a client-side limit of 950, expecting 950 rows
I20250901 14:18:21.719468  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.719906  5023 client-test.cc:1287] Scanning with a client-side limit of 277, expecting 277 rows
I20250901 14:18:21.739719  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.740124  5023 client-test.cc:1287] Scanning with a client-side limit of 1325, expecting 1325 rows
I20250901 14:18:21.765044  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.765614  5023 client-test.cc:1287] Scanning with a client-side limit of 1060, expecting 1060 rows
I20250901 14:18:21.790192  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.790673  5023 client-test.cc:1287] Scanning with a client-side limit of 790, expecting 790 rows
I20250901 14:18:21.813709  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.814128  5023 client-test.cc:1287] Scanning with a client-side limit of 1733, expecting 1421 rows
I20250901 14:18:21.840009  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.840440  5023 client-test.cc:1287] Scanning with a client-side limit of 1679, expecting 1421 rows
I20250901 14:18:21.866333  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.866739  5023 client-test.cc:1287] Scanning with a client-side limit of 637, expecting 637 rows
I20250901 14:18:21.888267  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.888691  5023 client-test.cc:1287] Scanning with a client-side limit of 1180, expecting 1180 rows
I20250901 14:18:21.912557  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.912993  5023 client-test.cc:1287] Scanning with a client-side limit of 302, expecting 302 rows
I20250901 14:18:21.931957  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.932371  5023 client-test.cc:1287] Scanning with a client-side limit of 2052, expecting 1421 rows
I20250901 14:18:21.957368  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.957818  5023 client-test.cc:1287] Scanning with a client-side limit of 248, expecting 248 rows
I20250901 14:18:21.976850  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:21.977265  5023 client-test.cc:1287] Scanning with a client-side limit of 1999, expecting 1421 rows
I20250901 14:18:22.002137  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.002564  5023 client-test.cc:1287] Scanning with a client-side limit of 722, expecting 722 rows
I20250901 14:18:22.023607  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.024005  5023 client-test.cc:1287] Scanning with a client-side limit of 654, expecting 654 rows
I20250901 14:18:22.045396  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.045910  5023 client-test.cc:1287] Scanning with a client-side limit of 1692, expecting 1421 rows
I20250901 14:18:22.070719  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.071173  5023 client-test.cc:1287] Scanning with a client-side limit of 463, expecting 463 rows
I20250901 14:18:22.091594  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.092136  5023 client-test.cc:1287] Scanning with a client-side limit of 1081, expecting 1081 rows
I20250901 14:18:22.116043  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.116421  5023 client-test.cc:1287] Scanning with a client-side limit of 1376, expecting 1376 rows
I20250901 14:18:22.142371  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.142831  5023 client-test.cc:1287] Scanning with a client-side limit of 1560, expecting 1421 rows
I20250901 14:18:22.169371  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.169818  5023 client-test.cc:1287] Scanning with a client-side limit of 867, expecting 867 rows
I20250901 14:18:22.194191  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.194604  5023 client-test.cc:1287] Scanning with a client-side limit of 1122, expecting 1122 rows
I20250901 14:18:22.219666  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.220070  5023 client-test.cc:1287] Scanning with a client-side limit of 1439, expecting 1421 rows
I20250901 14:18:22.249711  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.250146  5023 client-test.cc:1287] Scanning with a client-side limit of 620, expecting 620 rows
I20250901 14:18:22.271292  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.271958  5023 client-test.cc:1287] Scanning with a client-side limit of 2511, expecting 1421 rows
I20250901 14:18:22.297821  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.298250  5023 client-test.cc:1287] Scanning with a client-side limit of 1375, expecting 1375 rows
I20250901 14:18:22.323438  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.323848  5023 client-test.cc:1287] Scanning with a client-side limit of 304, expecting 304 rows
I20250901 14:18:22.344165  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.344672  5023 client-test.cc:1287] Scanning with a client-side limit of 1676, expecting 1421 rows
I20250901 14:18:22.370200  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.370579  5023 client-test.cc:1287] Scanning with a client-side limit of 219, expecting 219 rows
I20250901 14:18:22.390223  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.390723  5023 client-test.cc:1287] Scanning with a client-side limit of 1827, expecting 1421 rows
I20250901 14:18:22.415874  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.416251  5023 client-test.cc:1287] Scanning with a client-side limit of 364, expecting 364 rows
I20250901 14:18:22.436740  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.437170  5023 client-test.cc:1287] Scanning with a client-side limit of 1600, expecting 1421 rows
I20250901 14:18:22.462224  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.462623  5023 client-test.cc:1287] Scanning with a client-side limit of 1416, expecting 1416 rows
I20250901 14:18:22.487701  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.488220  5023 client-test.cc:1287] Scanning with a client-side limit of 196, expecting 196 rows
I20250901 14:18:22.507894  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.508339  5023 client-test.cc:1287] Scanning with a client-side limit of 2547, expecting 1421 rows
I20250901 14:18:22.536129  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.536540  5023 client-test.cc:1287] Scanning with a client-side limit of 225, expecting 225 rows
I20250901 14:18:22.556124  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.556565  5023 client-test.cc:1287] Scanning with a client-side limit of 2672, expecting 1421 rows
I20250901 14:18:22.582149  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.582612  5023 client-test.cc:1287] Scanning with a client-side limit of 828, expecting 828 rows
I20250901 14:18:22.605612  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.606024  5023 client-test.cc:1287] Scanning with a client-side limit of 918, expecting 918 rows
I20250901 14:18:22.629915  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.630314  5023 client-test.cc:1287] Scanning with a client-side limit of 999, expecting 999 rows
I20250901 14:18:22.653221  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.653707  5023 client-test.cc:1287] Scanning with a client-side limit of 1747, expecting 1421 rows
I20250901 14:18:22.678807  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.679261  5023 client-test.cc:1287] Scanning with a client-side limit of 636, expecting 636 rows
I20250901 14:18:22.701004  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.701475  5023 client-test.cc:1287] Scanning with a client-side limit of 935, expecting 935 rows
I20250901 14:18:22.724289  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.724701  5023 client-test.cc:1287] Scanning with a client-side limit of 2305, expecting 1421 rows
I20250901 14:18:22.748862  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.749238  5023 client-test.cc:1287] Scanning with a client-side limit of 376, expecting 376 rows
I20250901 14:18:22.769434  5023 client-test.cc:1275] Total bytes read on tserver so far: 18336
I20250901 14:18:22.773751  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:22.802781  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:22.803381  5023 tablet_replica.cc:331] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545: stopping tablet replica
I20250901 14:18:22.804016  5023 raft_consensus.cc:2241] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:22.804546  5023 raft_consensus.cc:2270] T 7250aae0af78470e8bc33597a8b13ed3 P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:22.806304  5023 tablet_replica.cc:331] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545: stopping tablet replica
I20250901 14:18:22.806751  5023 raft_consensus.cc:2241] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:22.807129  5023 raft_consensus.cc:2270] T ec1e0c5f2c7e44529d1dcf7e7804fd4c P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:22.808636  5023 tablet_replica.cc:331] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545: stopping tablet replica
I20250901 14:18:22.809039  5023 raft_consensus.cc:2241] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:22.809496  5023 raft_consensus.cc:2270] T d23ef30561624dd7ab515d88eac3da11 P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:22.811393  5023 tablet_replica.cc:331] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545: stopping tablet replica
I20250901 14:18:22.811846  5023 raft_consensus.cc:2241] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:22.812316  5023 raft_consensus.cc:2270] T b03968af92be4b489247ac44cf8d0eda P 839ef01d38034d379a65947c71b7f545 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:22.832504  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:22.850769  5023 master.cc:561] Master@127.4.231.254:42369 shutting down...
I20250901 14:18:22.872273  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:22.872884  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:22.873191  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P f1511985fc68416c9d173125eaf13379: stopping tablet replica
I20250901 14:18:22.891780  5023 master.cc:583] Master@127.4.231.254:42369 shutdown complete.
[       OK ] ClientTest.TestRandomizedLimitScans (6983 ms)
[ RUN      ] ClientTest.TestProjectInvalidColumn
I20250901 14:18:22.922588  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:45307
I20250901 14:18:22.923640  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:22.929565  5426 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:22.929772  5427 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:22.930524  5429 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:22.931856  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:22.932617  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:22.932796  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:22.932907  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736302932898 us; error 0 us; skew 500 ppm
I20250901 14:18:22.933321  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:22.935572  5023 webserver.cc:480] Webserver started at http://127.4.231.254:44041/ using document root <none> and password file <none>
I20250901 14:18:22.935992  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:22.936141  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:22.936343  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:22.937479  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestProjectInvalidColumn.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "f85b633502c449ee861bca8acc9dbcb6"
format_stamp: "Formatted at 2025-09-01 14:18:22 on dist-test-slave-9gf0"
I20250901 14:18:22.941715  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.000s
I20250901 14:18:22.944823  5434 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:22.945667  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:22.945928  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestProjectInvalidColumn.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "f85b633502c449ee861bca8acc9dbcb6"
format_stamp: "Formatted at 2025-09-01 14:18:22 on dist-test-slave-9gf0"
I20250901 14:18:22.946192  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestProjectInvalidColumn.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestProjectInvalidColumn.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestProjectInvalidColumn.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:22.966353  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:22.967584  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:23.010663  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:45307
I20250901 14:18:23.010748  5495 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:45307 every 8 connection(s)
I20250901 14:18:23.014385  5496 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:23.024921  5496 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6: Bootstrap starting.
I20250901 14:18:23.029363  5496 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:23.033470  5496 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6: No bootstrap required, opened a new log
I20250901 14:18:23.035562  5496 raft_consensus.cc:357] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f85b633502c449ee861bca8acc9dbcb6" member_type: VOTER }
I20250901 14:18:23.036017  5496 raft_consensus.cc:383] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:23.036248  5496 raft_consensus.cc:738] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: f85b633502c449ee861bca8acc9dbcb6, State: Initialized, Role: FOLLOWER
I20250901 14:18:23.036800  5496 consensus_queue.cc:260] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f85b633502c449ee861bca8acc9dbcb6" member_type: VOTER }
I20250901 14:18:23.037276  5496 raft_consensus.cc:397] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:23.037496  5496 raft_consensus.cc:491] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:23.037812  5496 raft_consensus.cc:3058] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:23.042183  5496 raft_consensus.cc:513] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f85b633502c449ee861bca8acc9dbcb6" member_type: VOTER }
I20250901 14:18:23.042688  5496 leader_election.cc:304] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: f85b633502c449ee861bca8acc9dbcb6; no voters: 
I20250901 14:18:23.043848  5496 leader_election.cc:290] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:23.044132  5499 raft_consensus.cc:2802] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:23.045449  5499 raft_consensus.cc:695] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 1 LEADER]: Becoming Leader. State: Replica: f85b633502c449ee861bca8acc9dbcb6, State: Running, Role: LEADER
I20250901 14:18:23.046154  5499 consensus_queue.cc:237] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f85b633502c449ee861bca8acc9dbcb6" member_type: VOTER }
I20250901 14:18:23.046830  5496 sys_catalog.cc:564] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:23.049108  5500 sys_catalog.cc:455] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "f85b633502c449ee861bca8acc9dbcb6" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f85b633502c449ee861bca8acc9dbcb6" member_type: VOTER } }
I20250901 14:18:23.049163  5501 sys_catalog.cc:455] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [sys.catalog]: SysCatalogTable state changed. Reason: New leader f85b633502c449ee861bca8acc9dbcb6. Latest consensus state: current_term: 1 leader_uuid: "f85b633502c449ee861bca8acc9dbcb6" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f85b633502c449ee861bca8acc9dbcb6" member_type: VOTER } }
I20250901 14:18:23.049841  5501 sys_catalog.cc:458] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:23.052282  5500 sys_catalog.cc:458] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:23.052646  5505 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:23.057390  5505 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:23.061995  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:23.066321  5505 catalog_manager.cc:1349] Generated new cluster ID: 6779e55a4a7b4f1d8bb694b127d26353
I20250901 14:18:23.066622  5505 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:23.087751  5505 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:23.089056  5505 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:23.108464  5505 catalog_manager.cc:5955] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6: Generated new TSK 0
I20250901 14:18:23.109023  5505 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:23.129351  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:23.135306  5517 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:23.136507  5518 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:23.138121  5520 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:23.138216  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:23.139051  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:23.139221  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:23.139384  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736303139364 us; error 0 us; skew 500 ppm
I20250901 14:18:23.139887  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:23.142117  5023 webserver.cc:480] Webserver started at http://127.4.231.193:38919/ using document root <none> and password file <none>
I20250901 14:18:23.142612  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:23.142904  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:23.143189  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:23.144219  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestProjectInvalidColumn.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "b12f335150344b22b96587e59b06c8ee"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:23.148509  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.006s	sys 0.000s
I20250901 14:18:23.151623  5525 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:23.152338  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:23.152623  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestProjectInvalidColumn.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "b12f335150344b22b96587e59b06c8ee"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:23.152901  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestProjectInvalidColumn.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestProjectInvalidColumn.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestProjectInvalidColumn.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:23.188767  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:23.190040  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:23.194739  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:23.195142  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:23.195538  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:23.195850  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:23.246886  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:40145
I20250901 14:18:23.246966  5595 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:40145 every 8 connection(s)
I20250901 14:18:23.251688  5596 heartbeater.cc:344] Connected to a master server at 127.4.231.254:45307
I20250901 14:18:23.252012  5596 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:23.252656  5596 heartbeater.cc:507] Master 127.4.231.254:45307 requested a full tablet report, sending...
I20250901 14:18:23.254472  5451 ts_manager.cc:194] Registered new tserver with Master: b12f335150344b22b96587e59b06c8ee (127.4.231.193:40145)
I20250901 14:18:23.255072  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004886444s
I20250901 14:18:23.256173  5451 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:33344
I20250901 14:18:23.268573  5596 heartbeater.cc:499] Master 127.4.231.254:45307 was elected leader, sending a full tablet report...
I20250901 14:18:23.275650  5450 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:33364:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:23.308326  5561 tablet_service.cc:1468] Processing CreateTablet for tablet 61e36ad610314ca6899bfe8c7a07bd41 (DEFAULT_TABLE table=client-testtb [id=903c8738057a45b1be83ac0dab6e3a45]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:23.308558  5559 tablet_service.cc:1468] Processing CreateTablet for tablet 5727b00949d0434990df5f44ddb5c4b6 (DEFAULT_TABLE table=client-testtb [id=903c8738057a45b1be83ac0dab6e3a45]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:23.309721  5561 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 61e36ad610314ca6899bfe8c7a07bd41. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:23.310212  5559 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 5727b00949d0434990df5f44ddb5c4b6. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:23.324019  5606 tablet_bootstrap.cc:492] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee: Bootstrap starting.
I20250901 14:18:23.329046  5606 tablet_bootstrap.cc:654] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:23.333714  5606 tablet_bootstrap.cc:492] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee: No bootstrap required, opened a new log
I20250901 14:18:23.334059  5606 ts_tablet_manager.cc:1397] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee: Time spent bootstrapping tablet: real 0.010s	user 0.009s	sys 0.000s
I20250901 14:18:23.335853  5606 raft_consensus.cc:357] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b12f335150344b22b96587e59b06c8ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40145 } }
I20250901 14:18:23.336251  5606 raft_consensus.cc:383] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:23.336499  5606 raft_consensus.cc:738] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: b12f335150344b22b96587e59b06c8ee, State: Initialized, Role: FOLLOWER
I20250901 14:18:23.337033  5606 consensus_queue.cc:260] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b12f335150344b22b96587e59b06c8ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40145 } }
I20250901 14:18:23.337706  5606 raft_consensus.cc:397] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:23.337966  5606 raft_consensus.cc:491] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:23.338292  5606 raft_consensus.cc:3058] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:23.344208  5606 raft_consensus.cc:513] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b12f335150344b22b96587e59b06c8ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40145 } }
I20250901 14:18:23.344729  5606 leader_election.cc:304] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: b12f335150344b22b96587e59b06c8ee; no voters: 
I20250901 14:18:23.345880  5606 leader_election.cc:290] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:23.346257  5608 raft_consensus.cc:2802] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:23.347672  5608 raft_consensus.cc:695] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 1 LEADER]: Becoming Leader. State: Replica: b12f335150344b22b96587e59b06c8ee, State: Running, Role: LEADER
I20250901 14:18:23.348369  5606 ts_tablet_manager.cc:1428] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee: Time spent starting tablet: real 0.014s	user 0.006s	sys 0.008s
I20250901 14:18:23.348429  5608 consensus_queue.cc:237] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b12f335150344b22b96587e59b06c8ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40145 } }
I20250901 14:18:23.349088  5606 tablet_bootstrap.cc:492] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee: Bootstrap starting.
I20250901 14:18:23.355201  5606 tablet_bootstrap.cc:654] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:23.355398  5450 catalog_manager.cc:5582] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee reported cstate change: term changed from 0 to 1, leader changed from <none> to b12f335150344b22b96587e59b06c8ee (127.4.231.193). New cstate: current_term: 1 leader_uuid: "b12f335150344b22b96587e59b06c8ee" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b12f335150344b22b96587e59b06c8ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40145 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:23.359668  5606 tablet_bootstrap.cc:492] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee: No bootstrap required, opened a new log
I20250901 14:18:23.360029  5606 ts_tablet_manager.cc:1397] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee: Time spent bootstrapping tablet: real 0.011s	user 0.009s	sys 0.000s
I20250901 14:18:23.362010  5606 raft_consensus.cc:357] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b12f335150344b22b96587e59b06c8ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40145 } }
I20250901 14:18:23.362506  5606 raft_consensus.cc:383] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:23.362718  5606 raft_consensus.cc:738] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: b12f335150344b22b96587e59b06c8ee, State: Initialized, Role: FOLLOWER
I20250901 14:18:23.363226  5606 consensus_queue.cc:260] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b12f335150344b22b96587e59b06c8ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40145 } }
I20250901 14:18:23.363662  5606 raft_consensus.cc:397] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:23.363893  5606 raft_consensus.cc:491] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:23.364207  5606 raft_consensus.cc:3058] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:23.369202  5606 raft_consensus.cc:513] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b12f335150344b22b96587e59b06c8ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40145 } }
I20250901 14:18:23.369865  5606 leader_election.cc:304] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: b12f335150344b22b96587e59b06c8ee; no voters: 
I20250901 14:18:23.370350  5606 leader_election.cc:290] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:23.370473  5608 raft_consensus.cc:2802] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:23.371158  5608 raft_consensus.cc:695] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 1 LEADER]: Becoming Leader. State: Replica: b12f335150344b22b96587e59b06c8ee, State: Running, Role: LEADER
I20250901 14:18:23.371711  5606 ts_tablet_manager.cc:1428] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee: Time spent starting tablet: real 0.011s	user 0.011s	sys 0.000s
I20250901 14:18:23.371773  5608 consensus_queue.cc:237] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b12f335150344b22b96587e59b06c8ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40145 } }
I20250901 14:18:23.376641  5450 catalog_manager.cc:5582] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee reported cstate change: term changed from 0 to 1, leader changed from <none> to b12f335150344b22b96587e59b06c8ee (127.4.231.193). New cstate: current_term: 1 leader_uuid: "b12f335150344b22b96587e59b06c8ee" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b12f335150344b22b96587e59b06c8ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40145 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:23.391542  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:23.409435  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:23.410153  5023 tablet_replica.cc:331] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee: stopping tablet replica
I20250901 14:18:23.410632  5023 raft_consensus.cc:2241] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:23.411026  5023 raft_consensus.cc:2270] T 5727b00949d0434990df5f44ddb5c4b6 P b12f335150344b22b96587e59b06c8ee [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:23.412833  5023 tablet_replica.cc:331] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee: stopping tablet replica
I20250901 14:18:23.413249  5023 raft_consensus.cc:2241] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:23.413669  5023 raft_consensus.cc:2270] T 61e36ad610314ca6899bfe8c7a07bd41 P b12f335150344b22b96587e59b06c8ee [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:23.423053  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:23.432586  5023 master.cc:561] Master@127.4.231.254:45307 shutting down...
I20250901 14:18:23.448908  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:23.449440  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:23.449880  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P f85b633502c449ee861bca8acc9dbcb6: stopping tablet replica
I20250901 14:18:23.468868  5023 master.cc:583] Master@127.4.231.254:45307 shutdown complete.
[       OK ] ClientTest.TestProjectInvalidColumn (565 ms)
[ RUN      ] ClientTest.TestScanFaultTolerance
I20250901 14:18:23.488132  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:34121
I20250901 14:18:23.489151  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:23.493685  5615 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:23.494992  5616 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:23.495265  5618 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:23.495994  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:23.496814  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:23.496982  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:23.497097  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736303497086 us; error 0 us; skew 500 ppm
I20250901 14:18:23.497566  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:23.499928  5023 webserver.cc:480] Webserver started at http://127.4.231.254:45157/ using document root <none> and password file <none>
I20250901 14:18:23.500332  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:23.500495  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:23.500710  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:23.501804  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "47c635c1c1cb42fc837ae985f119c2ad"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:23.505986  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.004s	sys 0.000s
I20250901 14:18:23.509191  5623 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:23.509941  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:23.510210  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "47c635c1c1cb42fc837ae985f119c2ad"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:23.510470  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:23.535884  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:23.536978  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:23.576480  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:34121
I20250901 14:18:23.576581  5684 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:34121 every 8 connection(s)
I20250901 14:18:23.580044  5685 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:23.589619  5685 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad: Bootstrap starting.
I20250901 14:18:23.593518  5685 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:23.597244  5685 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad: No bootstrap required, opened a new log
I20250901 14:18:23.599102  5685 raft_consensus.cc:357] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "47c635c1c1cb42fc837ae985f119c2ad" member_type: VOTER }
I20250901 14:18:23.599467  5685 raft_consensus.cc:383] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:23.599718  5685 raft_consensus.cc:738] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 47c635c1c1cb42fc837ae985f119c2ad, State: Initialized, Role: FOLLOWER
I20250901 14:18:23.600219  5685 consensus_queue.cc:260] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "47c635c1c1cb42fc837ae985f119c2ad" member_type: VOTER }
I20250901 14:18:23.600628  5685 raft_consensus.cc:397] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:23.600854  5685 raft_consensus.cc:491] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:23.601115  5685 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:23.605369  5685 raft_consensus.cc:513] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "47c635c1c1cb42fc837ae985f119c2ad" member_type: VOTER }
I20250901 14:18:23.605937  5685 leader_election.cc:304] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 47c635c1c1cb42fc837ae985f119c2ad; no voters: 
I20250901 14:18:23.607112  5685 leader_election.cc:290] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:23.607398  5688 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:23.608700  5688 raft_consensus.cc:695] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 1 LEADER]: Becoming Leader. State: Replica: 47c635c1c1cb42fc837ae985f119c2ad, State: Running, Role: LEADER
I20250901 14:18:23.609310  5688 consensus_queue.cc:237] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "47c635c1c1cb42fc837ae985f119c2ad" member_type: VOTER }
I20250901 14:18:23.609933  5685 sys_catalog.cc:564] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:23.611887  5689 sys_catalog.cc:455] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "47c635c1c1cb42fc837ae985f119c2ad" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "47c635c1c1cb42fc837ae985f119c2ad" member_type: VOTER } }
I20250901 14:18:23.611943  5690 sys_catalog.cc:455] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [sys.catalog]: SysCatalogTable state changed. Reason: New leader 47c635c1c1cb42fc837ae985f119c2ad. Latest consensus state: current_term: 1 leader_uuid: "47c635c1c1cb42fc837ae985f119c2ad" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "47c635c1c1cb42fc837ae985f119c2ad" member_type: VOTER } }
I20250901 14:18:23.612596  5689 sys_catalog.cc:458] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:23.612691  5690 sys_catalog.cc:458] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:23.618422  5693 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:23.623214  5693 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:23.625833  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:23.631124  5693 catalog_manager.cc:1349] Generated new cluster ID: ede64acf84084b4b99e8505392e9ef62
I20250901 14:18:23.631448  5693 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:23.644762  5693 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:23.646088  5693 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:23.658419  5693 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad: Generated new TSK 0
I20250901 14:18:23.659004  5693 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:23.692598  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:23.698079  5706 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:23.699314  5707 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:23.700722  5709 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:23.701174  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:23.701906  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:23.702078  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:23.702195  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736303702185 us; error 0 us; skew 500 ppm
I20250901 14:18:23.702621  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:23.704749  5023 webserver.cc:480] Webserver started at http://127.4.231.193:42181/ using document root <none> and password file <none>
I20250901 14:18:23.705178  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:23.705348  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:23.705601  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:23.706590  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:23.710707  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:23.713806  5714 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:23.714533  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:23.714847  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:23.715137  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:23.731768  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:23.732769  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:23.737383  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:23.737782  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.001s	sys 0.000s
I20250901 14:18:23.738138  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:23.738394  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:23.794162  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:37489
I20250901 14:18:23.794250  5784 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:37489 every 8 connection(s)
I20250901 14:18:23.798542  5785 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:23.798923  5785 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:23.799649  5785 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:23.801406  5640 ts_manager.cc:194] Registered new tserver with Master: 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489)
I20250901 14:18:23.802052  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004872484s
I20250901 14:18:23.803651  5640 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57812
I20250901 14:18:23.816323  5785 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:23.823640  5639 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:57832:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:23.853889  5750 tablet_service.cc:1468] Processing CreateTablet for tablet 1783e1d6604d4105ae855ba92e83a53a (DEFAULT_TABLE table=client-testtb [id=bdf9c8035aa6439fb625860cbfb2bac6]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:23.854218  5749 tablet_service.cc:1468] Processing CreateTablet for tablet b6da5a53e9d44dc9aa1db6a6cb0a6209 (DEFAULT_TABLE table=client-testtb [id=bdf9c8035aa6439fb625860cbfb2bac6]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:23.855086  5750 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 1783e1d6604d4105ae855ba92e83a53a. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:23.855538  5749 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet b6da5a53e9d44dc9aa1db6a6cb0a6209. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:23.868191  5795 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:23.872195  5795 tablet_bootstrap.cc:654] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:23.876446  5795 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: No bootstrap required, opened a new log
I20250901 14:18:23.876797  5795 ts_tablet_manager.cc:1397] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.009s	user 0.003s	sys 0.004s
I20250901 14:18:23.878551  5795 raft_consensus.cc:357] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:23.878970  5795 raft_consensus.cc:383] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:23.879164  5795 raft_consensus.cc:738] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:23.879648  5795 consensus_queue.cc:260] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:23.880059  5795 raft_consensus.cc:397] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:23.880241  5795 raft_consensus.cc:491] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:23.880453  5795 raft_consensus.cc:3058] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:23.885805  5795 raft_consensus.cc:513] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:23.886336  5795 leader_election.cc:304] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314; no voters: 
I20250901 14:18:23.887440  5795 leader_election.cc:290] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:23.887727  5797 raft_consensus.cc:2802] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:23.889564  5795 ts_tablet_manager.cc:1428] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.013s	user 0.009s	sys 0.005s
I20250901 14:18:23.889703  5797 raft_consensus.cc:695] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 LEADER]: Becoming Leader. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Running, Role: LEADER
I20250901 14:18:23.890268  5795 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:23.890328  5797 consensus_queue.cc:237] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:23.896226  5795 tablet_bootstrap.cc:654] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:23.897312  5640 catalog_manager.cc:5582] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 reported cstate change: term changed from 0 to 1, leader changed from <none> to 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:23.900949  5795 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: No bootstrap required, opened a new log
I20250901 14:18:23.901291  5795 ts_tablet_manager.cc:1397] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.011s	user 0.009s	sys 0.000s
I20250901 14:18:23.903424  5795 raft_consensus.cc:357] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:23.905016  5795 raft_consensus.cc:383] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:23.905350  5795 raft_consensus.cc:738] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:23.906075  5795 consensus_queue.cc:260] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:23.906621  5795 raft_consensus.cc:397] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:23.906853  5795 raft_consensus.cc:491] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:23.907099  5795 raft_consensus.cc:3058] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:23.912925  5795 raft_consensus.cc:513] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:23.913470  5795 leader_election.cc:304] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314; no voters: 
I20250901 14:18:23.913985  5795 leader_election.cc:290] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:23.914124  5797 raft_consensus.cc:2802] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:23.914600  5797 raft_consensus.cc:695] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 LEADER]: Becoming Leader. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Running, Role: LEADER
I20250901 14:18:23.915373  5795 ts_tablet_manager.cc:1428] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.014s	user 0.013s	sys 0.000s
I20250901 14:18:23.915254  5797 consensus_queue.cc:237] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:23.920562  5640 catalog_manager.cc:5582] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 reported cstate change: term changed from 0 to 1, leader changed from <none> to 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:23.940501  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:23.946938  5804 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:23.948648  5805 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:23.951143  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:23.951478  5807 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:23.952224  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:23.952421  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:23.952610  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736303952592 us; error 0 us; skew 500 ppm
I20250901 14:18:23.953083  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:23.955211  5023 webserver.cc:480] Webserver started at http://127.4.231.194:36543/ using document root <none> and password file <none>
I20250901 14:18:23.955703  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:23.955893  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:23.956167  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:23.957473  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root/instance:
uuid: "6677188f0c07429b85c6c5693f84aba2"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:23.961838  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.000s
I20250901 14:18:23.964828  5812 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:23.965678  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.001s
I20250901 14:18:23.965945  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
uuid: "6677188f0c07429b85c6c5693f84aba2"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:23.966215  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:23.985692  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:23.986970  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:23.995016  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:23.995349  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:23.995690  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:23.995901  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:24.090226  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.194:45665
I20250901 14:18:24.090313  5881 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.194:45665 every 8 connection(s)
I20250901 14:18:24.094933  5882 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:24.095268  5882 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:24.095911  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
I20250901 14:18:24.096138  5882 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:24.097977  5640 ts_manager.cc:194] Registered new tserver with Master: 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665)
I20250901 14:18:24.099504  5640 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57836
W20250901 14:18:24.102466  5884 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:24.105330  5885 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:24.108067  5887 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:24.109387  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:24.110213  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:24.110392  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:24.110525  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736304110513 us; error 0 us; skew 500 ppm
I20250901 14:18:24.111053  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:24.112811  5882 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:24.113374  5023 webserver.cc:480] Webserver started at http://127.4.231.195:36863/ using document root <none> and password file <none>
I20250901 14:18:24.114025  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:24.114269  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:24.114636  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:24.116155  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root/instance:
uuid: "8218002e4fa24f2993fae33eeaf4b0bf"
format_stamp: "Formatted at 2025-09-01 14:18:24 on dist-test-slave-9gf0"
I20250901 14:18:24.120786  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.006s	sys 0.000s
I20250901 14:18:24.124352  5892 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:24.125141  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:24.125427  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
uuid: "8218002e4fa24f2993fae33eeaf4b0bf"
format_stamp: "Formatted at 2025-09-01 14:18:24 on dist-test-slave-9gf0"
I20250901 14:18:24.125770  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:24.150475  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:24.151607  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:24.159392  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:24.159777  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:24.160070  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:24.160214  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:24.257761  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.195:33159
I20250901 14:18:24.257843  5961 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.195:33159 every 8 connection(s)
I20250901 14:18:24.263305  5962 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:24.263644  5962 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:24.264526  5962 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:24.266361  5640 ts_manager.cc:194] Registered new tserver with Master: 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
I20250901 14:18:24.266558  5023 internal_mini_cluster.cc:371] 3 TS(s) registered with all masters after 0.004173462s
I20250901 14:18:24.267913  5640 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57858
I20250901 14:18:24.269160  5639 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:57832:
name: "TestScanFaultTolerance"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 3
split_rows_range_bounds {
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
W20250901 14:18:24.271095  5639 catalog_manager.cc:6944] The number of live tablet servers is not enough to re-replicate a tablet replica of the newly created table TestScanFaultTolerance in case of a server failure: 4 tablet servers would be needed, 3 are available. Consider bringing up more tablet servers.
I20250901 14:18:24.280426  5962 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:24.296716  5750 tablet_service.cc:1468] Processing CreateTablet for tablet 8f63f95c6bf9404797c6321a821dfbcf (DEFAULT_TABLE table=TestScanFaultTolerance [id=be7021365051486aaadf868f5d9a8958]), partition=RANGE (key) PARTITION UNBOUNDED
I20250901 14:18:24.297971  5750 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 8f63f95c6bf9404797c6321a821dfbcf. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:24.304657  5847 tablet_service.cc:1468] Processing CreateTablet for tablet 8f63f95c6bf9404797c6321a821dfbcf (DEFAULT_TABLE table=TestScanFaultTolerance [id=be7021365051486aaadf868f5d9a8958]), partition=RANGE (key) PARTITION UNBOUNDED
I20250901 14:18:24.306072  5847 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 8f63f95c6bf9404797c6321a821dfbcf. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:24.320384  5795 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:24.320324  5927 tablet_service.cc:1468] Processing CreateTablet for tablet 8f63f95c6bf9404797c6321a821dfbcf (DEFAULT_TABLE table=TestScanFaultTolerance [id=be7021365051486aaadf868f5d9a8958]), partition=RANGE (key) PARTITION UNBOUNDED
I20250901 14:18:24.321715  5927 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 8f63f95c6bf9404797c6321a821dfbcf. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:24.323132  5967 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Bootstrap starting.
I20250901 14:18:24.328902  5795 tablet_bootstrap.cc:654] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:24.335170  5967 tablet_bootstrap.cc:654] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:24.338002  5795 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: No bootstrap required, opened a new log
I20250901 14:18:24.338579  5795 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.018s	user 0.012s	sys 0.002s
I20250901 14:18:24.341629  5795 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:24.342517  5795 raft_consensus.cc:383] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:24.342891  5795 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:24.343894  5967 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: No bootstrap required, opened a new log
I20250901 14:18:24.343729  5795 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:24.344362  5967 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Time spent bootstrapping tablet: real 0.021s	user 0.010s	sys 0.005s
I20250901 14:18:24.347898  5795 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.009s	user 0.004s	sys 0.002s
I20250901 14:18:24.350315  5970 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap starting.
I20250901 14:18:24.350085  5967 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:24.352654  5967 raft_consensus.cc:383] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:24.353027  5967 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 6677188f0c07429b85c6c5693f84aba2, State: Initialized, Role: FOLLOWER
I20250901 14:18:24.354344  5967 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:24.357046  5967 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Time spent starting tablet: real 0.012s	user 0.007s	sys 0.000s
I20250901 14:18:24.358714  5970 tablet_bootstrap.cc:654] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:24.364252  5970 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: No bootstrap required, opened a new log
I20250901 14:18:24.364622  5970 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent bootstrapping tablet: real 0.015s	user 0.005s	sys 0.006s
I20250901 14:18:24.366688  5970 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:24.367321  5970 raft_consensus.cc:383] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:24.367478  5797 raft_consensus.cc:491] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Starting pre-election (no leader contacted us within the election timeout)
I20250901 14:18:24.367846  5970 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8218002e4fa24f2993fae33eeaf4b0bf, State: Initialized, Role: FOLLOWER
I20250901 14:18:24.367997  5797 raft_consensus.cc:513] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:24.368623  5970 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:24.376077  5797 leader_election.cc:290] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 1 pre-election: Requested pre-vote from peers 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665), 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
I20250901 14:18:24.381316  5970 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent starting tablet: real 0.016s	user 0.003s	sys 0.006s
I20250901 14:18:24.385093  5857 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "8f63f95c6bf9404797c6321a821dfbcf" candidate_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "6677188f0c07429b85c6c5693f84aba2" is_pre_election: true
I20250901 14:18:24.386152  5857 raft_consensus.cc:2466] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 0eb26b9ecbd843d9bfacf0d2bd7b3314 in term 0.
I20250901 14:18:24.387490  5717 leader_election.cc:304] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 1 pre-election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314, 6677188f0c07429b85c6c5693f84aba2; no voters: 
I20250901 14:18:24.388345  5797 raft_consensus.cc:2802] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Leader pre-election won for term 1
I20250901 14:18:24.389088  5797 raft_consensus.cc:491] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Starting leader election (no leader contacted us within the election timeout)
I20250901 14:18:24.389462  5797 raft_consensus.cc:3058] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:24.389950  5937 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "8f63f95c6bf9404797c6321a821dfbcf" candidate_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" is_pre_election: true
I20250901 14:18:24.390751  5937 raft_consensus.cc:2466] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 0eb26b9ecbd843d9bfacf0d2bd7b3314 in term 0.
I20250901 14:18:24.395488  5797 raft_consensus.cc:513] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:24.397162  5797 leader_election.cc:290] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 1 election: Requested vote from peers 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665), 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
I20250901 14:18:24.397854  5857 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "8f63f95c6bf9404797c6321a821dfbcf" candidate_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "6677188f0c07429b85c6c5693f84aba2"
I20250901 14:18:24.398299  5857 raft_consensus.cc:3058] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:24.398140  5937 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "8f63f95c6bf9404797c6321a821dfbcf" candidate_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "8218002e4fa24f2993fae33eeaf4b0bf"
I20250901 14:18:24.398725  5937 raft_consensus.cc:3058] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:24.406715  5937 raft_consensus.cc:2466] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 0eb26b9ecbd843d9bfacf0d2bd7b3314 in term 1.
I20250901 14:18:24.407663  5716 leader_election.cc:304] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314, 8218002e4fa24f2993fae33eeaf4b0bf; no voters: 
I20250901 14:18:24.407778  5857 raft_consensus.cc:2466] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 0eb26b9ecbd843d9bfacf0d2bd7b3314 in term 1.
I20250901 14:18:24.408479  5797 raft_consensus.cc:2802] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:24.408955  5797 raft_consensus.cc:695] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 LEADER]: Becoming Leader. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Running, Role: LEADER
I20250901 14:18:24.409723  5797 consensus_queue.cc:237] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 2, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:24.417922  5640 catalog_manager.cc:5582] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 reported cstate change: term changed from 0 to 1, leader changed from <none> to 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" committed_config { opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:24.456506  5797 consensus_queue.cc:1035] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Connected to new peer: Peer: permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.001s
I20250901 14:18:24.472780  5799 consensus_queue.cc:1035] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Connected to new peer: Peer: permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.001s
I20250901 14:18:25.115710  5023 client-test.cc:1911] Doing a scan while restarting a tserver and waiting for it to come up...
I20250901 14:18:25.204156  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:25.209582  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:25.210233  5023 client-test.cc:907] Restarting TS at 127.4.231.195:33159
I20250901 14:18:25.210574  5023 tablet_server.cc:178] TabletServer@127.4.231.195:0 shutting down...
W20250901 14:18:25.232983  5716 proxy.cc:239] Call had error, refreshing address and retrying: Remote error: Service unavailable: service kudu.consensus.ConsensusService not registered on TabletServer
I20250901 14:18:25.233650  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:25.234292  5023 tablet_replica.cc:331] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: stopping tablet replica
I20250901 14:18:25.234818  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:25.235227  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Raft consensus is shut down!
W20250901 14:18:25.239421  5716 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Couldn't send request to peer 8218002e4fa24f2993fae33eeaf4b0bf. Status: Remote error: Service unavailable: service kudu.consensus.ConsensusService not registered on TabletServer. This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:25.254774  5023 tablet_server.cc:195] TabletServer@127.4.231.195:0 shutdown complete.
I20250901 14:18:25.269438  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:25.276969  5993 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:25.280543  5994 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:25.283253  5996 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:25.283499  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:25.285560  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:25.285812  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:25.285974  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736305285959 us; error 0 us; skew 500 ppm
I20250901 14:18:25.286531  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:25.288865  5023 webserver.cc:480] Webserver started at http://127.4.231.195:36863/ using document root <none> and password file <none>
I20250901 14:18:25.289390  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:25.289611  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:25.293587  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.003s	user 0.003s	sys 0.000s
I20250901 14:18:25.296439  6001 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:25.297250  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:25.297573  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
uuid: "8218002e4fa24f2993fae33eeaf4b0bf"
format_stamp: "Formatted at 2025-09-01 14:18:24 on dist-test-slave-9gf0"
I20250901 14:18:25.297874  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:25.311407  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:25.312520  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:25.346678  6011 ts_tablet_manager.cc:542] Loading tablet metadata (0/1 complete)
I20250901 14:18:25.355273  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (1 total tablets, 1 live tablets)
I20250901 14:18:25.355595  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.038s	user 0.004s	sys 0.000s
I20250901 14:18:25.355938  5023 ts_tablet_manager.cc:594] Registering tablets (0/1 complete)
I20250901 14:18:25.361820  5023 ts_tablet_manager.cc:610] Registered 1 tablets
I20250901 14:18:25.362084  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.006s	user 0.003s	sys 0.003s
I20250901 14:18:25.362257  6011 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap starting.
I20250901 14:18:25.485195  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.195:33159
I20250901 14:18:25.485482  6073 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.195:33159 every 8 connection(s)
I20250901 14:18:25.490923  5023 client-test.cc:911] Waiting for TS 127.4.231.195:33159 to finish bootstrapping
I20250901 14:18:25.491775  6074 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:25.492094  6074 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:25.492748  6074 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:25.495424  5638 ts_manager.cc:194] Re-registered known tserver with Master: 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
I20250901 14:18:25.500478  5638 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57878
I20250901 14:18:25.513728  6074 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
W20250901 14:18:25.520676  5716 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Couldn't send request to peer 8218002e4fa24f2993fae33eeaf4b0bf. Error code: TABLET_NOT_RUNNING (12). Status: Illegal state: Tablet not RUNNING: BOOTSTRAPPING. This is attempt 6: this message will repeat every 5th retry.
I20250901 14:18:25.629854  6011 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap replayed 1/1 log segments. Stats: ops{read=2 overwritten=0 applied=2 ignored=0} inserts{seen=1000 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:25.630736  6011 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap complete.
I20250901 14:18:25.631278  6011 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent bootstrapping tablet: real 0.269s	user 0.228s	sys 0.029s
I20250901 14:18:25.632856  6011 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:25.633397  6011 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8218002e4fa24f2993fae33eeaf4b0bf, State: Initialized, Role: FOLLOWER
I20250901 14:18:25.634018  6011 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 2, Last appended: 1.2, Last appended by leader: 2, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:25.636222  6011 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent starting tablet: real 0.005s	user 0.005s	sys 0.000s
I20250901 14:18:25.637085  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:25.646854  6029 tablet_service.cc:3073] Scan: Not found: Scanner f9913af377cf4c1eb076776637a39628 not found (it may have expired): call sequence id=1, remote={username='slave'} at 127.0.0.1:56152
W20250901 14:18:25.647909  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  } elsewhere.
I20250901 14:18:25.712911  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:25.716652  5023 client-test.cc:1918] Doing a scan while restarting a tserver...
I20250901 14:18:25.784085  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:25.787953  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:25.788480  5023 client-test.cc:907] Restarting TS at 127.4.231.195:33159
I20250901 14:18:25.788841  5023 tablet_server.cc:178] TabletServer@127.4.231.195:33159 shutting down...
I20250901 14:18:25.811702  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
W20250901 14:18:25.811676  5716 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Couldn't send request to peer 8218002e4fa24f2993fae33eeaf4b0bf. Status: Remote error: Service unavailable: service kudu.consensus.ConsensusService not registered on TabletServer. This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:25.812669  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:25.813205  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:25.813622  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:25.831133  5023 tablet_server.cc:195] TabletServer@127.4.231.195:33159 shutdown complete.
I20250901 14:18:25.845413  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:25.853379  6080 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:25.853742  6079 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:25.855062  6082 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:25.855949  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:25.857072  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:25.857335  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:25.857559  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736305857521 us; error 0 us; skew 500 ppm
I20250901 14:18:25.858227  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:25.860760  5023 webserver.cc:480] Webserver started at http://127.4.231.195:36863/ using document root <none> and password file <none>
I20250901 14:18:25.861394  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:25.861642  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:25.865623  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.003s	user 0.004s	sys 0.000s
I20250901 14:18:25.868763  6087 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:25.869590  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:25.869889  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
uuid: "8218002e4fa24f2993fae33eeaf4b0bf"
format_stamp: "Formatted at 2025-09-01 14:18:24 on dist-test-slave-9gf0"
I20250901 14:18:25.870151  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:25.893296  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:25.894346  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:25.910808  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (1 total tablets, 1 live tablets)
I20250901 14:18:25.911114  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.012s	user 0.002s	sys 0.000s
I20250901 14:18:25.916167  5023 ts_tablet_manager.cc:610] Registered 1 tablets
I20250901 14:18:25.916426  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.005s	user 0.004s	sys 0.000s
I20250901 14:18:25.921425  6096 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap starting.
I20250901 14:18:25.990237  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.195:33159
I20250901 14:18:25.990347  6158 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.195:33159 every 8 connection(s)
I20250901 14:18:25.995963  6159 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:25.996249  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:25.996506  6159 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:25.997570  6159 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:26.001071  5638 ts_manager.cc:194] Re-registered known tserver with Master: 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
I20250901 14:18:26.004585  5638 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57902
I20250901 14:18:26.014577  6114 tablet_service.cc:3073] Scan: Not found: Scanner 3b5ba74a63f24ce99f2bb5189598697c not found (it may have expired): call sequence id=1, remote={username='slave'} at 127.0.0.1:56220
W20250901 14:18:26.015981  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  } elsewhere.
I20250901 14:18:26.017786  6159 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
W20250901 14:18:26.073387  5716 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Couldn't send request to peer 8218002e4fa24f2993fae33eeaf4b0bf. Error code: TABLET_NOT_RUNNING (12). Status: Illegal state: Tablet not RUNNING: BOOTSTRAPPING. This is attempt 6: this message will repeat every 5th retry.
I20250901 14:18:26.095256  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:26.186520  6096 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap replayed 1/1 log segments. Stats: ops{read=2 overwritten=0 applied=2 ignored=0} inserts{seen=1000 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:26.187297  6096 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap complete.
I20250901 14:18:26.187809  6096 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent bootstrapping tablet: real 0.267s	user 0.238s	sys 0.020s
I20250901 14:18:26.189322  6096 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:26.189867  6096 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8218002e4fa24f2993fae33eeaf4b0bf, State: Initialized, Role: FOLLOWER
I20250901 14:18:26.190402  6096 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 2, Last appended: 1.2, Last appended by leader: 2, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:26.192248  6096 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent starting tablet: real 0.004s	user 0.006s	sys 0.000s
I20250901 14:18:26.193117  5023 client-test.cc:1928] Doing a scan while killing a tserver...
I20250901 14:18:26.222048  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:26.230651  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:26.231186  5023 client-test.cc:916] Killing TS 8218002e4fa24f2993fae33eeaf4b0bf at 127.4.231.195:33159
I20250901 14:18:26.231524  5023 tablet_server.cc:178] TabletServer@127.4.231.195:33159 shutting down...
W20250901 14:18:26.246856  5716 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Couldn't send request to peer 8218002e4fa24f2993fae33eeaf4b0bf. Status: Remote error: Service unavailable: service kudu.consensus.ConsensusService not registered on TabletServer. This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:26.255936  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:26.256853  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:26.257391  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:26.257838  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:26.276616  5023 tablet_server.cc:195] TabletServer@127.4.231.195:33159 shutdown complete.
I20250901 14:18:26.289809  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:26.291932  5023 meta_cache.cc:1510] marking tablet server 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159) as failed
W20250901 14:18:26.292421  5023 meta_cache.cc:302] tablet 8f63f95c6bf9404797c6321a821dfbcf: replica 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159) has failed: Network error: TS failed: Client connection negotiation failed: client connection to 127.4.231.195:33159: connect: Connection refused (error 111)
W20250901 14:18:26.292685  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  } elsewhere.
I20250901 14:18:26.346145  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:26.350967  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:26.356813  6166 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:26.358027  6167 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:26.359375  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:26.359745  6169 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:26.360517  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:26.360749  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:26.360909  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736306360891 us; error 0 us; skew 500 ppm
I20250901 14:18:26.361420  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:26.364277  5023 webserver.cc:480] Webserver started at http://127.4.231.195:36863/ using document root <none> and password file <none>
I20250901 14:18:26.364925  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:26.365145  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:26.368847  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.003s	user 0.002s	sys 0.000s
I20250901 14:18:26.371496  6174 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:26.372202  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:26.372486  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
uuid: "8218002e4fa24f2993fae33eeaf4b0bf"
format_stamp: "Formatted at 2025-09-01 14:18:24 on dist-test-slave-9gf0"
I20250901 14:18:26.372776  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:26.389580  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:26.390753  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:26.399334  6183 ts_tablet_manager.cc:542] Loading tablet metadata (0/1 complete)
I20250901 14:18:26.408149  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (1 total tablets, 1 live tablets)
I20250901 14:18:26.408530  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.010s	user 0.000s	sys 0.001s
I20250901 14:18:26.408874  5023 ts_tablet_manager.cc:594] Registering tablets (0/1 complete)
I20250901 14:18:26.413040  5023 ts_tablet_manager.cc:610] Registered 1 tablets
I20250901 14:18:26.413337  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.004s	user 0.004s	sys 0.000s
I20250901 14:18:26.413398  6183 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap starting.
I20250901 14:18:26.488979  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.195:33159
I20250901 14:18:26.489059  6245 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.195:33159 every 8 connection(s)
I20250901 14:18:26.498417  6246 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:26.498795  6246 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:26.499491  6246 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:26.502135  5639 ts_manager.cc:194] Re-registered known tserver with Master: 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
I20250901 14:18:26.505300  5639 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57932
I20250901 14:18:26.520566  6246 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
W20250901 14:18:26.524101  5716 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Couldn't send request to peer 8218002e4fa24f2993fae33eeaf4b0bf. Error code: TABLET_NOT_RUNNING (12). Status: Illegal state: Tablet not RUNNING: BOOTSTRAPPING. This is attempt 6: this message will repeat every 5th retry.
I20250901 14:18:26.673516  6183 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap replayed 1/1 log segments. Stats: ops{read=2 overwritten=0 applied=2 ignored=0} inserts{seen=1000 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:26.674327  6183 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap complete.
I20250901 14:18:26.674851  6183 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent bootstrapping tablet: real 0.262s	user 0.235s	sys 0.019s
I20250901 14:18:26.676378  6183 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:26.676862  6183 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8218002e4fa24f2993fae33eeaf4b0bf, State: Initialized, Role: FOLLOWER
I20250901 14:18:26.677348  6183 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 2, Last appended: 1.2, Last appended by leader: 2, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:26.679260  6183 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent starting tablet: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:26.680707  5023 client-test.cc:1911] Doing a scan while restarting a tserver and waiting for it to come up...
I20250901 14:18:26.742934  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:26.747224  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:26.747859  5023 client-test.cc:907] Restarting TS at 127.4.231.194:45665
I20250901 14:18:26.748324  5023 tablet_server.cc:178] TabletServer@127.4.231.194:0 shutting down...
W20250901 14:18:26.776153  5717 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665): Couldn't send request to peer 6677188f0c07429b85c6c5693f84aba2. Status: Remote error: Service unavailable: service kudu.consensus.ConsensusService not registered on TabletServer. This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:26.777580  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:26.778184  5023 tablet_replica.cc:331] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: stopping tablet replica
I20250901 14:18:26.778820  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:26.779332  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:26.796362  5023 tablet_server.cc:195] TabletServer@127.4.231.194:0 shutdown complete.
I20250901 14:18:26.812150  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:26.817966  6250 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:26.819845  6253 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:26.820446  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:26.820473  6251 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:26.822638  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:26.822850  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:26.822978  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736306822968 us; error 0 us; skew 500 ppm
I20250901 14:18:26.823500  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:26.826133  5023 webserver.cc:480] Webserver started at http://127.4.231.194:36543/ using document root <none> and password file <none>
I20250901 14:18:26.826733  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:26.826960  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:26.830588  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.003s	user 0.001s	sys 0.003s
I20250901 14:18:26.833501  6258 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:26.834444  5023 fs_manager.cc:730] Time spent opening block manager: real 0.003s	user 0.002s	sys 0.001s
I20250901 14:18:26.835572  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
uuid: "6677188f0c07429b85c6c5693f84aba2"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:26.835984  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:26.868587  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:26.869796  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:26.885202  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (1 total tablets, 1 live tablets)
I20250901 14:18:26.885596  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.009s	user 0.001s	sys 0.000s
I20250901 14:18:26.894519  6267 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Bootstrap starting.
I20250901 14:18:26.895116  5023 ts_tablet_manager.cc:610] Registered 1 tablets
I20250901 14:18:26.895481  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.010s	user 0.008s	sys 0.000s
I20250901 14:18:26.982409  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.194:45665
I20250901 14:18:26.982551  6329 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.194:45665 every 8 connection(s)
I20250901 14:18:26.989450  5023 client-test.cc:911] Waiting for TS 127.4.231.194:45665 to finish bootstrapping
I20250901 14:18:26.991750  6330 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:26.992112  6330 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:26.992780  6330 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:26.995317  5636 ts_manager.cc:194] Re-registered known tserver with Master: 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665)
I20250901 14:18:26.997640  5636 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57940
I20250901 14:18:27.015978  6330 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
W20250901 14:18:27.041352  5717 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665): Couldn't send request to peer 6677188f0c07429b85c6c5693f84aba2. Error code: TABLET_NOT_RUNNING (12). Status: Illegal state: Tablet not RUNNING: BOOTSTRAPPING. This is attempt 6: this message will repeat every 5th retry.
I20250901 14:18:27.160380  6267 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Bootstrap replayed 1/1 log segments. Stats: ops{read=2 overwritten=0 applied=2 ignored=0} inserts{seen=1000 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:27.161171  6267 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Bootstrap complete.
I20250901 14:18:27.161731  6267 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Time spent bootstrapping tablet: real 0.268s	user 0.235s	sys 0.024s
I20250901 14:18:27.163252  6267 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:27.163740  6267 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Becoming Follower/Learner. State: Replica: 6677188f0c07429b85c6c5693f84aba2, State: Initialized, Role: FOLLOWER
I20250901 14:18:27.164237  6267 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 2, Last appended: 1.2, Last appended by leader: 2, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:27.166270  6267 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Time spent starting tablet: real 0.004s	user 0.004s	sys 0.000s
I20250901 14:18:27.167210  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:27.178834  6285 tablet_service.cc:3073] Scan: Not found: Scanner 7ca2bd24ab354218a4a2327231e01f67 not found (it may have expired): call sequence id=1, remote={username='slave'} at 127.0.0.1:52574
W20250901 14:18:27.179828  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  LIMIT 500 } elsewhere.
I20250901 14:18:27.214473  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:27.216306  5023 client-test.cc:1918] Doing a scan while restarting a tserver...
I20250901 14:18:27.251488  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:27.256124  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:27.256783  5023 client-test.cc:907] Restarting TS at 127.4.231.194:45665
I20250901 14:18:27.257195  5023 tablet_server.cc:178] TabletServer@127.4.231.194:45665 shutting down...
I20250901 14:18:27.274237  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:27.275161  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:27.275681  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:27.276063  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:27.284332  5023 tablet_server.cc:195] TabletServer@127.4.231.194:45665 shutdown complete.
W20250901 14:18:27.294081  5717 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665): Couldn't send request to peer 6677188f0c07429b85c6c5693f84aba2. Status: Network error: Client connection negotiation failed: client connection to 127.4.231.194:45665: connect: Connection refused (error 111). This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:27.299712  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:27.306346  6335 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:27.306707  6336 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:27.309998  6338 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:27.310127  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:27.311333  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:27.311565  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:27.311718  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736307311704 us; error 0 us; skew 500 ppm
I20250901 14:18:27.312270  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:27.314520  5023 webserver.cc:480] Webserver started at http://127.4.231.194:36543/ using document root <none> and password file <none>
I20250901 14:18:27.314975  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:27.315132  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:27.319082  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.003s	user 0.004s	sys 0.000s
I20250901 14:18:27.321856  6343 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:27.322568  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:27.322858  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
uuid: "6677188f0c07429b85c6c5693f84aba2"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:27.323136  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:27.351856  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:27.352851  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:27.366817  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (1 total tablets, 1 live tablets)
I20250901 14:18:27.367076  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.007s	user 0.001s	sys 0.000s
I20250901 14:18:27.371604  5023 ts_tablet_manager.cc:610] Registered 1 tablets
I20250901 14:18:27.371958  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.005s	user 0.005s	sys 0.000s
I20250901 14:18:27.371969  6352 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Bootstrap starting.
I20250901 14:18:27.458334  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.194:45665
I20250901 14:18:27.458489  6414 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.194:45665 every 8 connection(s)
I20250901 14:18:27.462332  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:27.463960  6415 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:27.464344  6415 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:27.465159  6415 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:27.467953  5638 ts_manager.cc:194] Re-registered known tserver with Master: 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665)
I20250901 14:18:27.471330  5638 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57964
I20250901 14:18:27.479715  6370 tablet_service.cc:3073] Scan: Not found: Scanner 7fb57831610240acb8f513b27800cb9d not found (it may have expired): call sequence id=1, remote={username='slave'} at 127.0.0.1:52646
W20250901 14:18:27.480765  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  LIMIT 500 } elsewhere.
I20250901 14:18:27.484787  6415 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:27.528962  5023 client-test.cc:1860] Verifying results from scan.
W20250901 14:18:27.550376  5717 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665): Couldn't send request to peer 6677188f0c07429b85c6c5693f84aba2. Error code: TABLET_NOT_RUNNING (12). Status: Illegal state: Tablet not RUNNING: BOOTSTRAPPING. This is attempt 6: this message will repeat every 5th retry.
I20250901 14:18:27.638140  6352 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Bootstrap replayed 1/1 log segments. Stats: ops{read=2 overwritten=0 applied=2 ignored=0} inserts{seen=1000 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:27.638938  6352 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Bootstrap complete.
I20250901 14:18:27.639457  6352 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Time spent bootstrapping tablet: real 0.268s	user 0.237s	sys 0.023s
I20250901 14:18:27.641465  6352 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:27.642133  6352 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Becoming Follower/Learner. State: Replica: 6677188f0c07429b85c6c5693f84aba2, State: Initialized, Role: FOLLOWER
I20250901 14:18:27.642750  6352 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 2, Last appended: 1.2, Last appended by leader: 2, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:27.644984  6352 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Time spent starting tablet: real 0.005s	user 0.006s	sys 0.000s
I20250901 14:18:27.646584  5023 client-test.cc:1928] Doing a scan while killing a tserver...
I20250901 14:18:27.669348  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:27.674227  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:27.674823  5023 client-test.cc:916] Killing TS 6677188f0c07429b85c6c5693f84aba2 at 127.4.231.194:45665
I20250901 14:18:27.675165  5023 tablet_server.cc:178] TabletServer@127.4.231.194:45665 shutting down...
I20250901 14:18:27.693281  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:27.695087  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:27.695755  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:27.696213  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Raft consensus is shut down!
W20250901 14:18:27.703570  5717 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665): Couldn't send request to peer 6677188f0c07429b85c6c5693f84aba2. Status: Network error: Client connection negotiation failed: client connection to 127.4.231.194:45665: connect: Connection refused (error 111). This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:27.706146  5023 tablet_server.cc:195] TabletServer@127.4.231.194:45665 shutdown complete.
I20250901 14:18:27.720527  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:27.722412  5023 meta_cache.cc:1510] marking tablet server 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665) as failed
W20250901 14:18:27.722764  5023 meta_cache.cc:302] tablet 8f63f95c6bf9404797c6321a821dfbcf: replica 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665) has failed: Network error: TS failed: Client connection negotiation failed: client connection to 127.4.231.194:45665: connect: Connection refused (error 111)
W20250901 14:18:27.723035  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  LIMIT 500 } elsewhere.
I20250901 14:18:27.754148  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:27.757373  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:27.763610  6419 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:27.763936  6420 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:27.764966  6422 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:27.766599  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:27.767498  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:27.767704  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:27.767900  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736307767882 us; error 0 us; skew 500 ppm
I20250901 14:18:27.768419  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:27.770716  5023 webserver.cc:480] Webserver started at http://127.4.231.194:36543/ using document root <none> and password file <none>
I20250901 14:18:27.771296  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:27.771534  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:27.775019  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.003s	user 0.004s	sys 0.000s
I20250901 14:18:27.777596  6427 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:27.778321  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:27.778645  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
uuid: "6677188f0c07429b85c6c5693f84aba2"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:27.778888  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-1-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:27.791261  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:27.792325  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:27.802220  6436 ts_tablet_manager.cc:542] Loading tablet metadata (0/1 complete)
I20250901 14:18:27.811630  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (1 total tablets, 1 live tablets)
I20250901 14:18:27.812088  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.011s	user 0.000s	sys 0.001s
I20250901 14:18:27.812409  5023 ts_tablet_manager.cc:594] Registering tablets (0/1 complete)
I20250901 14:18:27.817337  5023 ts_tablet_manager.cc:610] Registered 1 tablets
I20250901 14:18:27.817595  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.005s	user 0.004s	sys 0.002s
I20250901 14:18:27.817723  6436 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Bootstrap starting.
I20250901 14:18:27.918018  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.194:45665
I20250901 14:18:27.918154  6498 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.194:45665 every 8 connection(s)
I20250901 14:18:27.923424  6499 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:27.923875  6499 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:27.924741  6499 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:27.927771  5636 ts_manager.cc:194] Re-registered known tserver with Master: 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665)
I20250901 14:18:27.930941  5636 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57984
W20250901 14:18:27.944509  5717 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665): Couldn't send request to peer 6677188f0c07429b85c6c5693f84aba2. Error code: TABLET_NOT_RUNNING (12). Status: Illegal state: Tablet not RUNNING: BOOTSTRAPPING. This is attempt 6: this message will repeat every 5th retry.
I20250901 14:18:27.944797  6499 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:28.074959  6436 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Bootstrap replayed 1/1 log segments. Stats: ops{read=2 overwritten=0 applied=2 ignored=0} inserts{seen=1000 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:28.075703  6436 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Bootstrap complete.
I20250901 14:18:28.076174  6436 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Time spent bootstrapping tablet: real 0.259s	user 0.205s	sys 0.041s
I20250901 14:18:28.077750  6436 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:28.078275  6436 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Becoming Follower/Learner. State: Replica: 6677188f0c07429b85c6c5693f84aba2, State: Initialized, Role: FOLLOWER
I20250901 14:18:28.078883  6436 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 2, Last appended: 1.2, Last appended by leader: 2, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:28.081970  6436 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: Time spent starting tablet: real 0.006s	user 0.008s	sys 0.000s
I20250901 14:18:28.574231  5786 maintenance_manager.cc:419] P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Scheduling UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf): 6397 bytes on disk
I20250901 14:18:28.576112  5719 maintenance_manager.cc:643] P 0eb26b9ecbd843d9bfacf0d2bd7b3314: UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf) complete. Timing: real 0.001s	user 0.000s	sys 0.000s Metrics: {"cfile_init":1,"lbm_read_time_us":323,"lbm_reads_lt_1ms":4}
I20250901 14:18:28.674610  6500 maintenance_manager.cc:419] P 6677188f0c07429b85c6c5693f84aba2: Scheduling UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf): 6397 bytes on disk
I20250901 14:18:28.676594  6432 maintenance_manager.cc:643] P 6677188f0c07429b85c6c5693f84aba2: UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf) complete. Timing: real 0.001s	user 0.001s	sys 0.000s Metrics: {"cfile_init":1,"lbm_read_time_us":319,"lbm_reads_lt_1ms":4}
I20250901 14:18:28.822856  5023 client-test.cc:1911] Doing a scan while restarting a tserver and waiting for it to come up...
I20250901 14:18:28.841117  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:28.846372  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:28.846833  5023 client-test.cc:907] Restarting TS at 127.4.231.193:37489
I20250901 14:18:28.847138  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:28.868777  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:28.869375  5023 tablet_replica.cc:331] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: stopping tablet replica
I20250901 14:18:28.870009  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:28.870831  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:28.872937  5023 tablet_replica.cc:331] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: stopping tablet replica
I20250901 14:18:28.873412  5023 raft_consensus.cc:2241] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:28.873881  5023 raft_consensus.cc:2270] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:28.875569  5023 tablet_replica.cc:331] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: stopping tablet replica
I20250901 14:18:28.876000  5023 raft_consensus.cc:2241] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:28.876410  5023 raft_consensus.cc:2270] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:28.895113  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:28.910267  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:28.915920  6509 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:28.916900  6510 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:28.918689  6512 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:28.919512  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:28.923056  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:28.923280  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:28.923449  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736308923431 us; error 0 us; skew 500 ppm
I20250901 14:18:28.924010  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:28.926930  5023 webserver.cc:480] Webserver started at http://127.4.231.193:42181/ using document root <none> and password file <none>
I20250901 14:18:28.927413  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:28.927640  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:28.931392  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.003s	user 0.004s	sys 0.000s
I20250901 14:18:28.945216  6519 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:28.946031  5023 fs_manager.cc:730] Time spent opening block manager: real 0.013s	user 0.003s	sys 0.000s
I20250901 14:18:28.946321  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:28.946592  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 6
Total live bytes: 23162
Total live bytes (after alignment): 40960
Total number of LBM containers: 7 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:28.957711  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:28.958598  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:28.971377  6529 ts_tablet_manager.cc:542] Loading tablet metadata (0/3 complete)
I20250901 14:18:28.974009  6530 raft_consensus.cc:491] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Starting pre-election (detected failure of leader 0eb26b9ecbd843d9bfacf0d2bd7b3314)
I20250901 14:18:28.974576  6530 raft_consensus.cc:513] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:28.977586  6530 leader_election.cc:290] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [CANDIDATE]: Term 2 pre-election: Requested pre-vote from peers 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159), 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489)
I20250901 14:18:29.002655  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (3 total tablets, 3 live tablets)
I20250901 14:18:29.002974  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.039s	user 0.003s	sys 0.004s
I20250901 14:18:29.002861  6221 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "8f63f95c6bf9404797c6321a821dfbcf" candidate_uuid: "6677188f0c07429b85c6c5693f84aba2" candidate_term: 2 candidate_status { last_received { term: 1 index: 2 } } ignore_live_leader: false dest_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" is_pre_election: true
I20250901 14:18:29.003432  5023 ts_tablet_manager.cc:594] Registering tablets (0/3 complete)
I20250901 14:18:29.006398  6247 maintenance_manager.cc:419] P 8218002e4fa24f2993fae33eeaf4b0bf: Scheduling UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf): 6397 bytes on disk
I20250901 14:18:29.008319  6179 maintenance_manager.cc:643] P 8218002e4fa24f2993fae33eeaf4b0bf: UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf) complete. Timing: real 0.001s	user 0.000s	sys 0.000s Metrics: {"cfile_init":1,"lbm_read_time_us":364,"lbm_reads_lt_1ms":4}
W20250901 14:18:29.011700  6431 leader_election.cc:336] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [CANDIDATE]: Term 2 pre-election: RPC error from VoteRequest() call to peer 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489): Network error: Client connection negotiation failed: client connection to 127.4.231.193:37489: connect: Connection refused (error 111)
I20250901 14:18:29.012220  6431 leader_election.cc:304] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [CANDIDATE]: Term 2 pre-election: Election decided. Result: candidate lost. Election summary: received 3 responses out of 3 voters: 1 yes votes; 2 no votes. yes voters: 6677188f0c07429b85c6c5693f84aba2; no voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314, 8218002e4fa24f2993fae33eeaf4b0bf
I20250901 14:18:29.013335  6530 raft_consensus.cc:2747] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Leader pre-election lost for term 2. Reason: could not achieve majority
I20250901 14:18:29.013439  6536 raft_consensus.cc:491] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Starting pre-election (detected failure of leader 0eb26b9ecbd843d9bfacf0d2bd7b3314)
I20250901 14:18:29.014017  6536 raft_consensus.cc:513] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.015962  6529 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:29.022475  6536 leader_election.cc:290] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [CANDIDATE]: Term 2 pre-election: Requested pre-vote from peers 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665), 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489)
I20250901 14:18:29.029088  5023 ts_tablet_manager.cc:610] Registered 3 tablets
I20250901 14:18:29.029481  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.026s	user 0.022s	sys 0.001s
I20250901 14:18:29.053830  6529 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap replayed 1/1 log segments. Stats: ops{read=1 overwritten=0 applied=1 ignored=0} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:29.055291  6529 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap complete.
I20250901 14:18:29.056169  6529 ts_tablet_manager.cc:1397] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.040s	user 0.020s	sys 0.005s
I20250901 14:18:29.059882  6474 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "8f63f95c6bf9404797c6321a821dfbcf" candidate_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" candidate_term: 2 candidate_status { last_received { term: 1 index: 2 } } ignore_live_leader: false dest_uuid: "6677188f0c07429b85c6c5693f84aba2" is_pre_election: true
I20250901 14:18:29.060604  6474 raft_consensus.cc:2466] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 8218002e4fa24f2993fae33eeaf4b0bf in term 1.
I20250901 14:18:29.062449  6177 leader_election.cc:304] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [CANDIDATE]: Term 2 pre-election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 6677188f0c07429b85c6c5693f84aba2, 8218002e4fa24f2993fae33eeaf4b0bf; no voters: 
I20250901 14:18:29.063467  6536 raft_consensus.cc:2802] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Leader pre-election won for term 2
I20250901 14:18:29.063925  6536 raft_consensus.cc:491] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Starting leader election (detected failure of leader 0eb26b9ecbd843d9bfacf0d2bd7b3314)
I20250901 14:18:29.064368  6536 raft_consensus.cc:3058] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 1 FOLLOWER]: Advancing to term 2
I20250901 14:18:29.076159  6529 raft_consensus.cc:357] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.076823  6529 raft_consensus.cc:738] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:29.077574  6529 consensus_queue.cc:260] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 1, Last appended: 1.1, Last appended by leader: 1, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.078226  6529 raft_consensus.cc:397] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:29.078557  6529 raft_consensus.cc:491] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:29.078920  6529 raft_consensus.cc:3058] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Advancing to term 2
W20250901 14:18:29.078945  6178 leader_election.cc:336] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [CANDIDATE]: Term 2 pre-election: RPC error from VoteRequest() call to peer 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489): Network error: Client connection negotiation failed: client connection to 127.4.231.193:37489: connect: Connection refused (error 111)
I20250901 14:18:29.079787  6536 raft_consensus.cc:513] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 2 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.081755  6536 leader_election.cc:290] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [CANDIDATE]: Term 2 election: Requested vote from peers 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665), 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489)
I20250901 14:18:29.082870  6474 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "8f63f95c6bf9404797c6321a821dfbcf" candidate_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" candidate_term: 2 candidate_status { last_received { term: 1 index: 2 } } ignore_live_leader: false dest_uuid: "6677188f0c07429b85c6c5693f84aba2"
I20250901 14:18:29.083637  6474 raft_consensus.cc:3058] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 1 FOLLOWER]: Advancing to term 2
I20250901 14:18:29.090776  6529 raft_consensus.cc:513] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.091359  6529 leader_election.cc:304] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 2 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314; no voters: 
I20250901 14:18:29.094569  6474 raft_consensus.cc:2466] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 2 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 8218002e4fa24f2993fae33eeaf4b0bf in term 2.
I20250901 14:18:29.096159  6177 leader_election.cc:304] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [CANDIDATE]: Term 2 election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 6677188f0c07429b85c6c5693f84aba2, 8218002e4fa24f2993fae33eeaf4b0bf; no voters: 
I20250901 14:18:29.098742  6536 raft_consensus.cc:2802] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 2 FOLLOWER]: Leader election won for term 2
W20250901 14:18:29.100086  6178 leader_election.cc:336] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [CANDIDATE]: Term 2 election: RPC error from VoteRequest() call to peer 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489): Network error: Client connection negotiation failed: client connection to 127.4.231.193:37489: connect: Connection refused (error 111)
I20250901 14:18:29.100598  6536 raft_consensus.cc:695] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 2 LEADER]: Becoming Leader. State: Replica: 8218002e4fa24f2993fae33eeaf4b0bf, State: Running, Role: LEADER
I20250901 14:18:29.102854  6536 consensus_queue.cc:237] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 2, Committed index: 2, Last appended: 1.2, Last appended by leader: 2, Current term: 2, Majority size: 2, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.106340  6529 leader_election.cc:290] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 2 election: Requested vote from peers 
I20250901 14:18:29.106606  6549 raft_consensus.cc:2802] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Leader election won for term 2
I20250901 14:18:29.111580  6549 raft_consensus.cc:695] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 LEADER]: Becoming Leader. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Running, Role: LEADER
I20250901 14:18:29.114289  6549 consensus_queue.cc:237] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 1, Committed index: 1, Last appended: 1.1, Last appended by leader: 1, Current term: 2, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.112748  5636 catalog_manager.cc:5582] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf reported cstate change: term changed from 1 to 2, leader changed from 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193) to 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195). New cstate: current_term: 2 leader_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" committed_config { opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } health_report { overall_health: HEALTHY } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: UNKNOWN } } }
I20250901 14:18:29.120069  6529 ts_tablet_manager.cc:1428] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.063s	user 0.017s	sys 0.020s
I20250901 14:18:29.120838  6529 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:29.150220  6474 raft_consensus.cc:1273] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 2 FOLLOWER]: Refusing update from remote peer 8218002e4fa24f2993fae33eeaf4b0bf: Log matching property violated. Preceding OpId in replica: term: 1 index: 2. Preceding OpId from leader: term: 2 index: 3. (index mismatch)
I20250901 14:18:29.151932  6536 consensus_queue.cc:1035] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [LEADER]: Connected to new peer: Peer: permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 3, Last known committed idx: 2, Time since last communication: 0.000s
W20250901 14:18:29.175292  6178 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf -> Peer 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489): Couldn't send request to peer 0eb26b9ecbd843d9bfacf0d2bd7b3314. Status: Network error: Client connection negotiation failed: client connection to 127.4.231.193:37489: connect: Connection refused (error 111). This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:29.210795  6529 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap replayed 1/1 log segments. Stats: ops{read=2 overwritten=0 applied=2 ignored=1} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:29.211949  6529 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap complete.
I20250901 14:18:29.212667  6529 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.092s	user 0.076s	sys 0.007s
I20250901 14:18:29.215348  6529 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.216212  6529 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:29.217062  6529 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 2, Last appended: 1.2, Last appended by leader: 2, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.219808  6529 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.007s	user 0.006s	sys 0.000s
I20250901 14:18:29.220856  6529 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:29.228511  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:37489
I20250901 14:18:29.228605  6615 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:37489 every 8 connection(s)
I20250901 14:18:29.235543  6616 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:29.235931  6616 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:29.236857  6616 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:29.239089  5023 client-test.cc:911] Waiting for TS 127.4.231.193:37489 to finish bootstrapping
I20250901 14:18:29.240398  5636 ts_manager.cc:194] Re-registered known tserver with Master: 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489)
I20250901 14:18:29.240988  6618 maintenance_manager.cc:419] P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Scheduling UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf): 6397 bytes on disk
I20250901 14:18:29.242842  6524 maintenance_manager.cc:643] P 0eb26b9ecbd843d9bfacf0d2bd7b3314: UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf) complete. Timing: real 0.001s	user 0.001s	sys 0.000s Metrics: {"cfile_init":1,"lbm_read_time_us":333,"lbm_reads_lt_1ms":4}
I20250901 14:18:29.242677  5636 catalog_manager.cc:5582] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 reported cstate change: term changed from 1 to 2. New cstate: current_term: 2 leader_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:29.243757  6529 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap replayed 1/1 log segments. Stats: ops{read=1 overwritten=0 applied=1 ignored=0} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:29.245107  6529 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap complete.
I20250901 14:18:29.245599  6529 ts_tablet_manager.cc:1397] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.025s	user 0.021s	sys 0.000s
I20250901 14:18:29.247263  6529 raft_consensus.cc:357] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.247664  6529 raft_consensus.cc:738] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:29.248817  6529 consensus_queue.cc:260] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 1, Last appended: 1.1, Last appended by leader: 1, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.249328  6529 raft_consensus.cc:397] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:29.249622  6529 raft_consensus.cc:491] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:29.249927  6529 raft_consensus.cc:3058] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Advancing to term 2
I20250901 14:18:29.252233  5636 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57996
I20250901 14:18:29.254511  6616 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:29.254693  6529 raft_consensus.cc:513] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.255290  6529 leader_election.cc:304] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 2 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314; no voters: 
I20250901 14:18:29.255823  6529 leader_election.cc:290] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 2 election: Requested vote from peers 
I20250901 14:18:29.255923  6549 raft_consensus.cc:2802] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Leader election won for term 2
I20250901 14:18:29.256362  6549 raft_consensus.cc:695] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 LEADER]: Becoming Leader. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Running, Role: LEADER
I20250901 14:18:29.257004  6549 consensus_queue.cc:237] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 1, Committed index: 1, Last appended: 1.1, Last appended by leader: 1, Current term: 2, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.257683  6529 ts_tablet_manager.cc:1428] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.012s	user 0.012s	sys 0.000s
I20250901 14:18:29.260073  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:29.277010  5636 catalog_manager.cc:5582] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 reported cstate change: term changed from 1 to 2. New cstate: current_term: 2 leader_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:29.284303  6591 raft_consensus.cc:3058] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 1 FOLLOWER]: Advancing to term 2
I20250901 14:18:29.287801  6564 tablet_service.cc:3073] Scan: Not found: Scanner aef34033238a46fa9a0d3d9c79301c80 not found (it may have expired): call sequence id=1, remote={username='slave'} at 127.0.0.1:48504
W20250901 14:18:29.288853  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  } elsewhere.
I20250901 14:18:29.343073  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:29.345886  5023 client-test.cc:1918] Doing a scan while restarting a tserver...
I20250901 14:18:29.407624  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:29.411617  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:29.412091  5023 client-test.cc:907] Restarting TS at 127.4.231.193:37489
I20250901 14:18:29.412432  5023 tablet_server.cc:178] TabletServer@127.4.231.193:37489 shutting down...
I20250901 14:18:29.431757  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:29.432513  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:29.433182  5023 raft_consensus.cc:2241] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 LEADER]: Raft consensus shutting down.
I20250901 14:18:29.433647  5023 raft_consensus.cc:2270] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:29.435895  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:29.436504  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:29.436964  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:29.439510  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:29.439940  5023 raft_consensus.cc:2241] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 LEADER]: Raft consensus shutting down.
I20250901 14:18:29.440274  5023 raft_consensus.cc:2270] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Raft consensus is shut down!
W20250901 14:18:29.444664  6178 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf -> Peer 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489): Couldn't send request to peer 0eb26b9ecbd843d9bfacf0d2bd7b3314. Status: Remote error: Service unavailable: service kudu.consensus.ConsensusService not registered on TabletServer. This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:29.465402  5023 tablet_server.cc:195] TabletServer@127.4.231.193:37489 shutdown complete.
I20250901 14:18:29.480775  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:29.485896  6624 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:29.487154  6625 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:29.488159  6627 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:29.488881  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:29.489874  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:29.490128  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:29.490314  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736309490298 us; error 0 us; skew 500 ppm
I20250901 14:18:29.490837  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:29.493022  5023 webserver.cc:480] Webserver started at http://127.4.231.193:42181/ using document root <none> and password file <none>
I20250901 14:18:29.493472  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:29.493700  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:29.498741  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.004s	user 0.004s	sys 0.000s
I20250901 14:18:29.512584  6634 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:29.513582  5023 fs_manager.cc:730] Time spent opening block manager: real 0.013s	user 0.004s	sys 0.000s
I20250901 14:18:29.513899  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:29.514325  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 6
Total live bytes: 23162
Total live bytes (after alignment): 40960
Total number of LBM containers: 7 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:29.543884  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:29.544932  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:29.574600  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (3 total tablets, 3 live tablets)
I20250901 14:18:29.574860  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.025s	user 0.001s	sys 0.000s
I20250901 14:18:29.579599  6643 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:29.593669  5023 ts_tablet_manager.cc:610] Registered 3 tablets
I20250901 14:18:29.594079  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.019s	user 0.016s	sys 0.000s
I20250901 14:18:29.599761  6643 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap replayed 1/1 log segments. Stats: ops{read=2 overwritten=0 applied=2 ignored=0} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:29.600847  6643 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap complete.
I20250901 14:18:29.601634  6643 ts_tablet_manager.cc:1397] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.022s	user 0.013s	sys 0.001s
I20250901 14:18:29.603549  6643 raft_consensus.cc:357] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.604027  6643 raft_consensus.cc:738] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:29.604662  6643 consensus_queue.cc:260] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 2, Last appended: 2.2, Last appended by leader: 2, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.605247  6643 raft_consensus.cc:397] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:29.605598  6643 raft_consensus.cc:491] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:29.606051  6643 raft_consensus.cc:3058] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Advancing to term 3
I20250901 14:18:29.614900  6643 raft_consensus.cc:513] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.615588  6643 leader_election.cc:304] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 3 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314; no voters: 
I20250901 14:18:29.616866  6643 leader_election.cc:290] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 3 election: Requested vote from peers 
I20250901 14:18:29.617226  6662 raft_consensus.cc:2802] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Leader election won for term 3
I20250901 14:18:29.625643  6662 raft_consensus.cc:695] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 LEADER]: Becoming Leader. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Running, Role: LEADER
I20250901 14:18:29.626668  6643 ts_tablet_manager.cc:1428] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.025s	user 0.016s	sys 0.003s
I20250901 14:18:29.626689  6662 consensus_queue.cc:237] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 2, Committed index: 2, Last appended: 2.2, Last appended by leader: 2, Current term: 3, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.627753  6643 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:29.690232  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:37489
I20250901 14:18:29.690325  6711 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:37489 every 8 connection(s)
I20250901 14:18:29.694048  6643 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap replayed 1/1 log segments. Stats: ops{read=3 overwritten=0 applied=3 ignored=1} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:29.695107  6643 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap complete.
I20250901 14:18:29.695804  6643 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.068s	user 0.045s	sys 0.016s
I20250901 14:18:29.697999  6643 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.698762  6643 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:29.700114  6643 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 3, Last appended: 2.3, Last appended by leader: 3, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.701257  6712 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:29.701727  6712 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:29.702180  6643 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.006s	user 0.005s	sys 0.000s
I20250901 14:18:29.702672  6712 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:29.703529  6643 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:29.705682  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:29.707518  6713 maintenance_manager.cc:419] P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Scheduling UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf): 6397 bytes on disk
I20250901 14:18:29.708834  5639 ts_manager.cc:194] Re-registered known tserver with Master: 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489)
I20250901 14:18:29.711269  5639 catalog_manager.cc:5582] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 reported cstate change: term changed from 2 to 3. New cstate: current_term: 3 leader_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:29.716684  6639 maintenance_manager.cc:643] P 0eb26b9ecbd843d9bfacf0d2bd7b3314: UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf) complete. Timing: real 0.001s	user 0.000s	sys 0.000s Metrics: {"cfile_init":1,"lbm_read_time_us":362,"lbm_reads_lt_1ms":4}
I20250901 14:18:29.720510  6643 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap replayed 1/1 log segments. Stats: ops{read=2 overwritten=0 applied=2 ignored=0} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:29.721477  6643 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap complete.
I20250901 14:18:29.722122  6643 ts_tablet_manager.cc:1397] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.019s	user 0.016s	sys 0.000s
I20250901 14:18:29.725330  5639 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:58024
I20250901 14:18:29.725946  6643 raft_consensus.cc:357] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.726426  6643 raft_consensus.cc:738] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:29.726959  6643 consensus_queue.cc:260] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 2, Last appended: 2.2, Last appended by leader: 2, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.727391  6643 raft_consensus.cc:397] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:29.727650  6643 raft_consensus.cc:491] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:29.727957  6643 raft_consensus.cc:3058] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Advancing to term 3
I20250901 14:18:29.728452  6661 tablet_service.cc:3073] Scan: Not found: Scanner 9b9aa9c743104a5990ec2523a56fe441 not found (it may have expired): call sequence id=1, remote={username='slave'} at 127.0.0.1:48600
W20250901 14:18:29.731173  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  } elsewhere.
I20250901 14:18:29.732172  6712 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:29.733306  6643 raft_consensus.cc:513] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.734018  6643 leader_election.cc:304] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 3 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314; no voters: 
I20250901 14:18:29.734663  6643 leader_election.cc:290] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 3 election: Requested vote from peers 
I20250901 14:18:29.734795  6662 raft_consensus.cc:2802] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Leader election won for term 3
I20250901 14:18:29.735303  6662 raft_consensus.cc:695] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 LEADER]: Becoming Leader. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Running, Role: LEADER
I20250901 14:18:29.735998  6662 consensus_queue.cc:237] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 2, Committed index: 2, Last appended: 2.2, Last appended by leader: 2, Current term: 3, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:29.736330  6643 ts_tablet_manager.cc:1428] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.014s	user 0.011s	sys 0.000s
I20250901 14:18:29.749383  5636 catalog_manager.cc:5582] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 reported cstate change: term changed from 2 to 3. New cstate: current_term: 3 leader_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:29.820398  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:29.824016  5023 client-test.cc:1928] Doing a scan while killing a tserver...
I20250901 14:18:29.861411  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:29.865444  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:29.865890  5023 client-test.cc:916] Killing TS 0eb26b9ecbd843d9bfacf0d2bd7b3314 at 127.4.231.193:37489
I20250901 14:18:29.866204  5023 tablet_server.cc:178] TabletServer@127.4.231.193:37489 shutting down...
I20250901 14:18:29.885228  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:29.886945  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:29.887634  5023 raft_consensus.cc:2241] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 LEADER]: Raft consensus shutting down.
I20250901 14:18:29.888329  5023 raft_consensus.cc:2270] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:29.891273  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:29.891757  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:29.892091  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:29.894444  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:29.895012  5023 raft_consensus.cc:2241] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 LEADER]: Raft consensus shutting down.
I20250901 14:18:29.895534  5023 raft_consensus.cc:2270] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Raft consensus is shut down!
W20250901 14:18:29.896751  6178 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf -> Peer 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489): Couldn't send request to peer 0eb26b9ecbd843d9bfacf0d2bd7b3314. Status: Remote error: Service unavailable: service kudu.consensus.ConsensusService not registered on TabletServer. This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:29.911867  5023 tablet_server.cc:195] TabletServer@127.4.231.193:37489 shutdown complete.
I20250901 14:18:29.933480  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:29.936224  5023 meta_cache.cc:1510] marking tablet server 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489) as failed
W20250901 14:18:29.936714  5023 meta_cache.cc:302] tablet 8f63f95c6bf9404797c6321a821dfbcf: replica 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489) has failed: Network error: TS failed: Client connection negotiation failed: client connection to 127.4.231.193:37489: connect: Connection refused (error 111)
W20250901 14:18:29.937160  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  } elsewhere.
I20250901 14:18:30.003357  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:30.006781  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:30.012506  6720 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:30.013182  6721 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:30.015829  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:30.017145  6724 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:30.018160  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:30.018379  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:30.018545  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736310018526 us; error 0 us; skew 500 ppm
I20250901 14:18:30.019166  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:30.021821  5023 webserver.cc:480] Webserver started at http://127.4.231.193:42181/ using document root <none> and password file <none>
I20250901 14:18:30.022385  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:30.022608  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:30.026907  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.003s	user 0.002s	sys 0.002s
I20250901 14:18:30.040566  6730 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:30.041364  5023 fs_manager.cc:730] Time spent opening block manager: real 0.013s	user 0.002s	sys 0.001s
I20250901 14:18:30.041713  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314"
format_stamp: "Formatted at 2025-09-01 14:18:23 on dist-test-slave-9gf0"
I20250901 14:18:30.042002  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 6
Total live bytes: 23162
Total live bytes (after alignment): 40960
Total number of LBM containers: 7 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:30.071784  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:30.072937  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:30.080758  6739 ts_tablet_manager.cc:542] Loading tablet metadata (0/3 complete)
I20250901 14:18:30.099751  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (3 total tablets, 3 live tablets)
I20250901 14:18:30.100057  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.020s	user 0.001s	sys 0.001s
I20250901 14:18:30.100347  5023 ts_tablet_manager.cc:594] Registering tablets (0/3 complete)
I20250901 14:18:30.106062  6739 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
W20250901 14:18:30.114161  6178 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf -> Peer 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489): Couldn't send request to peer 0eb26b9ecbd843d9bfacf0d2bd7b3314. Status: Network error: Client connection negotiation failed: client connection to 127.4.231.193:37489: connect: Connection refused (error 111). This is attempt 6: this message will repeat every 5th retry.
I20250901 14:18:30.118209  5023 ts_tablet_manager.cc:610] Registered 3 tablets
I20250901 14:18:30.118515  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.018s	user 0.017s	sys 0.000s
I20250901 14:18:30.125352  6739 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap replayed 1/1 log segments. Stats: ops{read=3 overwritten=0 applied=3 ignored=0} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:30.126761  6739 tablet_bootstrap.cc:492] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap complete.
I20250901 14:18:30.127720  6739 ts_tablet_manager.cc:1397] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.022s	user 0.011s	sys 0.008s
I20250901 14:18:30.130718  6739 raft_consensus.cc:357] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.131361  6739 raft_consensus.cc:738] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:30.131966  6739 consensus_queue.cc:260] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 3, Last appended: 3.3, Last appended by leader: 3, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.132457  6739 raft_consensus.cc:397] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:30.132759  6739 raft_consensus.cc:491] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:30.133081  6739 raft_consensus.cc:3058] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Advancing to term 4
I20250901 14:18:30.139277  6739 raft_consensus.cc:513] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 4 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.139868  6739 leader_election.cc:304] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 4 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314; no voters: 
I20250901 14:18:30.141268  6739 leader_election.cc:290] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 4 election: Requested vote from peers 
I20250901 14:18:30.143550  6759 raft_consensus.cc:2802] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 4 FOLLOWER]: Leader election won for term 4
I20250901 14:18:30.147393  6759 raft_consensus.cc:695] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 4 LEADER]: Becoming Leader. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Running, Role: LEADER
I20250901 14:18:30.148519  6759 consensus_queue.cc:237] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 3, Committed index: 3, Last appended: 3.3, Last appended by leader: 3, Current term: 4, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.152640  6739 ts_tablet_manager.cc:1428] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.025s	user 0.011s	sys 0.014s
I20250901 14:18:30.153827  6739 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:30.215188  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:37489
I20250901 14:18:30.215298  6807 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:37489 every 8 connection(s)
I20250901 14:18:30.219393  6739 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap replayed 1/1 log segments. Stats: ops{read=3 overwritten=0 applied=3 ignored=1} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:30.220446  6739 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap complete.
I20250901 14:18:30.221160  6739 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.068s	user 0.056s	sys 0.008s
I20250901 14:18:30.220991  6808 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:30.225014  6808 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:30.225939  6808 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:30.226841  6739 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.227535  6739 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:30.228122  6739 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 3, Last appended: 2.3, Last appended by leader: 3, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.229808  6739 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.005s	user 0.004s	sys 0.001s
I20250901 14:18:30.230401  6739 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap starting.
I20250901 14:18:30.230329  5638 ts_manager.cc:194] Re-registered known tserver with Master: 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489)
I20250901 14:18:30.232548  5638 catalog_manager.cc:5582] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 reported cstate change: term changed from 3 to 4. New cstate: current_term: 4 leader_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:30.241487  5638 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:58036
I20250901 14:18:30.244434  6808 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:30.245028  6739 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap replayed 1/1 log segments. Stats: ops{read=3 overwritten=0 applied=3 ignored=0} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:30.248014  6739 tablet_bootstrap.cc:492] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Bootstrap complete.
I20250901 14:18:30.248597  6739 ts_tablet_manager.cc:1397] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent bootstrapping tablet: real 0.018s	user 0.014s	sys 0.000s
I20250901 14:18:30.250614  6739 raft_consensus.cc:357] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.251111  6739 raft_consensus.cc:738] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Initialized, Role: FOLLOWER
I20250901 14:18:30.251677  6739 consensus_queue.cc:260] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 3, Last appended: 3.3, Last appended by leader: 3, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.252256  6739 raft_consensus.cc:397] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:30.252523  6739 raft_consensus.cc:491] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:30.252810  6739 raft_consensus.cc:3058] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Advancing to term 4
I20250901 14:18:30.258394  6739 raft_consensus.cc:513] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 4 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.259032  6739 leader_election.cc:304] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 4 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314; no voters: 
I20250901 14:18:30.259585  6739 leader_election.cc:290] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 4 election: Requested vote from peers 
I20250901 14:18:30.259680  6759 raft_consensus.cc:2802] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 4 FOLLOWER]: Leader election won for term 4
I20250901 14:18:30.260149  6759 raft_consensus.cc:695] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 4 LEADER]: Becoming Leader. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Running, Role: LEADER
I20250901 14:18:30.260860  6759 consensus_queue.cc:237] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 3, Committed index: 3, Last appended: 3.3, Last appended by leader: 3, Current term: 4, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.261195  6739 ts_tablet_manager.cc:1428] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Time spent starting tablet: real 0.012s	user 0.010s	sys 0.000s
I20250901 14:18:30.263913  5023 tablet.cc:1620] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314: MemRowSet was empty: no flush needed.
I20250901 14:18:30.272650  5636 catalog_manager.cc:5582] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 reported cstate change: term changed from 3 to 4. New cstate: current_term: 4 leader_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:30.276167  5023 tablet.cc:1620] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2: MemRowSet was empty: no flush needed.
I20250901 14:18:30.286386  5023 tablet.cc:1620] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: MemRowSet was empty: no flush needed.
I20250901 14:18:30.295583  5023 client-test.cc:1911] Doing a scan while restarting a tserver and waiting for it to come up...
I20250901 14:18:30.321103  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:30.325026  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:30.325479  5023 client-test.cc:907] Restarting TS at 127.4.231.195:33159
I20250901 14:18:30.325888  5023 tablet_server.cc:178] TabletServer@127.4.231.195:33159 shutting down...
I20250901 14:18:30.348860  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:30.349457  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:30.349992  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 2 LEADER]: Raft consensus shutting down.
I20250901 14:18:30.350628  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 2 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:30.370862  5023 tablet_server.cc:195] TabletServer@127.4.231.195:33159 shutdown complete.
I20250901 14:18:30.384258  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:30.390672  6818 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:30.390482  6817 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:30.391695  6820 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:30.393553  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:30.395794  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:30.396021  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:30.396184  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736310396167 us; error 0 us; skew 500 ppm
I20250901 14:18:30.396667  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:30.399309  5023 webserver.cc:480] Webserver started at http://127.4.231.195:36863/ using document root <none> and password file <none>
I20250901 14:18:30.399870  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:30.400090  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:30.403847  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.003s	user 0.002s	sys 0.002s
I20250901 14:18:30.417804  6827 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:30.418592  5023 fs_manager.cc:730] Time spent opening block manager: real 0.013s	user 0.002s	sys 0.001s
I20250901 14:18:30.418908  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
uuid: "8218002e4fa24f2993fae33eeaf4b0bf"
format_stamp: "Formatted at 2025-09-01 14:18:24 on dist-test-slave-9gf0"
I20250901 14:18:30.419195  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root/data
Total live blocks: 6
Total live bytes: 23162
Total live bytes (after alignment): 40960
Total number of LBM containers: 7 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:30.437383  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:30.438347  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:30.452253  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (1 total tablets, 1 live tablets)
I20250901 14:18:30.452557  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.008s	user 0.001s	sys 0.000s
I20250901 14:18:30.458562  5023 ts_tablet_manager.cc:610] Registered 1 tablets
I20250901 14:18:30.458833  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.006s	user 0.005s	sys 0.000s
I20250901 14:18:30.458922  6836 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap starting.
I20250901 14:18:30.479060  6809 maintenance_manager.cc:419] P 0eb26b9ecbd843d9bfacf0d2bd7b3314: Scheduling UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf): 6397 bytes on disk
I20250901 14:18:30.481297  6762 raft_consensus.cc:491] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Starting pre-election (detected failure of leader 8218002e4fa24f2993fae33eeaf4b0bf)
I20250901 14:18:30.481155  6735 maintenance_manager.cc:643] P 0eb26b9ecbd843d9bfacf0d2bd7b3314: UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf) complete. Timing: real 0.001s	user 0.000s	sys 0.000s Metrics: {"cfile_init":1,"lbm_read_time_us":348,"lbm_reads_lt_1ms":4}
I20250901 14:18:30.481868  6762 raft_consensus.cc:513] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.489854  6762 leader_election.cc:290] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 3 pre-election: Requested pre-vote from peers 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665), 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
W20250901 14:18:30.492442  6732 proxy.cc:239] Call had error, refreshing address and retrying: Network error: Client connection negotiation failed: client connection to 127.4.231.195:33159: connect: Connection refused (error 111) [suppressed 44 similar messages]
W20250901 14:18:30.497283  6732 leader_election.cc:336] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 3 pre-election: RPC error from VoteRequest() call to peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Network error: Client connection negotiation failed: client connection to 127.4.231.195:33159: connect: Connection refused (error 111)
I20250901 14:18:30.507117  6474 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "8f63f95c6bf9404797c6321a821dfbcf" candidate_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" candidate_term: 3 candidate_status { last_received { term: 2 index: 3 } } ignore_live_leader: false dest_uuid: "6677188f0c07429b85c6c5693f84aba2" is_pre_election: true
I20250901 14:18:30.507951  6474 raft_consensus.cc:2466] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 2 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 0eb26b9ecbd843d9bfacf0d2bd7b3314 in term 2.
I20250901 14:18:30.509284  6733 leader_election.cc:304] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 3 pre-election: Election decided. Result: candidate won. Election summary: received 3 responses out of 3 voters: 2 yes votes; 1 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314, 6677188f0c07429b85c6c5693f84aba2; no voters: 8218002e4fa24f2993fae33eeaf4b0bf
I20250901 14:18:30.510268  6762 raft_consensus.cc:2802] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Leader pre-election won for term 3
I20250901 14:18:30.510738  6762 raft_consensus.cc:491] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Starting leader election (detected failure of leader 8218002e4fa24f2993fae33eeaf4b0bf)
I20250901 14:18:30.511261  6762 raft_consensus.cc:3058] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 2 FOLLOWER]: Advancing to term 3
I20250901 14:18:30.515038  6855 raft_consensus.cc:491] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 2 FOLLOWER]: Starting pre-election (detected failure of leader 8218002e4fa24f2993fae33eeaf4b0bf)
I20250901 14:18:30.515552  6855 raft_consensus.cc:513] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 2 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.520088  6762 raft_consensus.cc:513] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.523921  6474 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "8f63f95c6bf9404797c6321a821dfbcf" candidate_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" candidate_term: 3 candidate_status { last_received { term: 2 index: 3 } } ignore_live_leader: false dest_uuid: "6677188f0c07429b85c6c5693f84aba2"
I20250901 14:18:30.524768  6474 raft_consensus.cc:3058] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 2 FOLLOWER]: Advancing to term 3
I20250901 14:18:30.534505  6474 raft_consensus.cc:2466] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 3 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 0eb26b9ecbd843d9bfacf0d2bd7b3314 in term 3.
I20250901 14:18:30.536126  6733 leader_election.cc:304] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 3 election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314, 6677188f0c07429b85c6c5693f84aba2; no voters: 
I20250901 14:18:30.537185  6759 raft_consensus.cc:2802] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Leader election won for term 3
I20250901 14:18:30.537808  6759 raft_consensus.cc:695] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 LEADER]: Becoming Leader. State: Replica: 0eb26b9ecbd843d9bfacf0d2bd7b3314, State: Running, Role: LEADER
I20250901 14:18:30.544060  6836 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap replayed 1/1 log segments. Stats: ops{read=3 overwritten=0 applied=3 ignored=1} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:30.544720  6759 consensus_queue.cc:237] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 3, Committed index: 3, Last appended: 2.3, Last appended by leader: 3, Current term: 3, Majority size: 2, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.546471  6855 leader_election.cc:290] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [CANDIDATE]: Term 3 pre-election: Requested pre-vote from peers 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159), 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193:37489)
I20250901 14:18:30.550348  6762 leader_election.cc:290] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 3 election: Requested vote from peers 6677188f0c07429b85c6c5693f84aba2 (127.4.231.194:45665), 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
I20250901 14:18:30.545403  6836 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap complete.
I20250901 14:18:30.551376  6836 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent bootstrapping tablet: real 0.093s	user 0.071s	sys 0.012s
I20250901 14:18:30.554119  6836 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 2 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.554841  6836 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 2 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8218002e4fa24f2993fae33eeaf4b0bf, State: Initialized, Role: FOLLOWER
I20250901 14:18:30.556339  6836 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 3, Last appended: 2.3, Last appended by leader: 3, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.566097  6836 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent starting tablet: real 0.014s	user 0.006s	sys 0.008s
I20250901 14:18:30.566108  6783 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "8f63f95c6bf9404797c6321a821dfbcf" candidate_uuid: "6677188f0c07429b85c6c5693f84aba2" candidate_term: 3 candidate_status { last_received { term: 2 index: 3 } } ignore_live_leader: false dest_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" is_pre_election: true
W20250901 14:18:30.566319  6732 leader_election.cc:336] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [CANDIDATE]: Term 3 election: RPC error from VoteRequest() call to peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Network error: Client connection negotiation failed: client connection to 127.4.231.195:33159: connect: Connection refused (error 111)
I20250901 14:18:30.553607  5638 catalog_manager.cc:5582] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 reported cstate change: term changed from 2 to 3, leader changed from 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195) to 0eb26b9ecbd843d9bfacf0d2bd7b3314 (127.4.231.193). New cstate: current_term: 3 leader_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" committed_config { opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } health_report { overall_health: HEALTHY } } }
W20250901 14:18:30.571296  6429 leader_election.cc:336] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [CANDIDATE]: Term 3 pre-election: RPC error from VoteRequest() call to peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Network error: Client connection negotiation failed: client connection to 127.4.231.195:33159: connect: Connection refused (error 111)
I20250901 14:18:30.571811  6429 leader_election.cc:304] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [CANDIDATE]: Term 3 pre-election: Election decided. Result: candidate lost. Election summary: received 3 responses out of 3 voters: 1 yes votes; 2 no votes. yes voters: 6677188f0c07429b85c6c5693f84aba2; no voters: 0eb26b9ecbd843d9bfacf0d2bd7b3314, 8218002e4fa24f2993fae33eeaf4b0bf
I20250901 14:18:30.572592  6855 raft_consensus.cc:2747] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 3 FOLLOWER]: Leader pre-election lost for term 3. Reason: could not achieve majority
I20250901 14:18:30.591758  6474 raft_consensus.cc:1273] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 3 FOLLOWER]: Refusing update from remote peer 0eb26b9ecbd843d9bfacf0d2bd7b3314: Log matching property violated. Preceding OpId in replica: term: 2 index: 3. Preceding OpId from leader: term: 3 index: 4. (index mismatch)
W20250901 14:18:30.593039  6732 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Couldn't send request to peer 8218002e4fa24f2993fae33eeaf4b0bf. Status: Network error: Client connection negotiation failed: client connection to 127.4.231.195:33159: connect: Connection refused (error 111). This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:30.593086  6762 consensus_queue.cc:1035] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [LEADER]: Connected to new peer: Peer: permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 4, Last known committed idx: 3, Time since last communication: 0.001s
I20250901 14:18:30.612840  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.195:33159
I20250901 14:18:30.612924  6907 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.195:33159 every 8 connection(s)
I20250901 14:18:30.616163  5023 client-test.cc:911] Waiting for TS 127.4.231.195:33159 to finish bootstrapping
I20250901 14:18:30.616883  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:30.617542  6908 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:30.617992  6908 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:30.618592  6909 maintenance_manager.cc:419] P 8218002e4fa24f2993fae33eeaf4b0bf: Scheduling UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf): 6397 bytes on disk
I20250901 14:18:30.619035  6908 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:30.622341  6832 maintenance_manager.cc:643] P 8218002e4fa24f2993fae33eeaf4b0bf: UndoDeltaBlockGCOp(8f63f95c6bf9404797c6321a821dfbcf) complete. Timing: real 0.001s	user 0.001s	sys 0.000s Metrics: {"cfile_init":1,"lbm_read_time_us":401,"lbm_reads_lt_1ms":4}
I20250901 14:18:30.623854  5636 ts_manager.cc:194] Re-registered known tserver with Master: 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
I20250901 14:18:30.626950  5636 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:58060
I20250901 14:18:30.630599  6858 tablet_service.cc:3073] Scan: Not found: Scanner 3d00a2572a2d4315bb75c712b6229eaa not found (it may have expired): call sequence id=1, remote={username='slave'} at 127.0.0.1:56430
W20250901 14:18:30.631677  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  LIMIT 500 } elsewhere.
I20250901 14:18:30.637557  6882 raft_consensus.cc:3058] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 2 FOLLOWER]: Advancing to term 3
I20250901 14:18:30.640743  6908 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:30.675366  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:30.677196  5023 client-test.cc:1918] Doing a scan while restarting a tserver...
I20250901 14:18:30.728577  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:30.732789  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:30.733234  5023 client-test.cc:907] Restarting TS at 127.4.231.195:33159
I20250901 14:18:30.733561  5023 tablet_server.cc:178] TabletServer@127.4.231.195:33159 shutting down...
I20250901 14:18:30.759620  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:30.760324  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:30.760994  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 3 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:30.761396  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 3 FOLLOWER]: Raft consensus is shut down!
W20250901 14:18:30.762822  6732 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Couldn't send request to peer 8218002e4fa24f2993fae33eeaf4b0bf. Status: Remote error: Service unavailable: service kudu.consensus.ConsensusService not registered on TabletServer. This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:30.772441  5023 tablet_server.cc:195] TabletServer@127.4.231.195:33159 shutdown complete.
I20250901 14:18:30.787236  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:30.793000  6915 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:30.793159  6914 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:30.794488  6917 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:30.795703  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:30.796456  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:30.796728  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:30.796924  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736310796909 us; error 0 us; skew 500 ppm
I20250901 14:18:30.797624  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:30.801983  5023 webserver.cc:480] Webserver started at http://127.4.231.195:36863/ using document root <none> and password file <none>
I20250901 14:18:30.802456  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:30.802614  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:30.807204  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.004s	user 0.004s	sys 0.000s
I20250901 14:18:30.820755  6924 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:30.821723  5023 fs_manager.cc:730] Time spent opening block manager: real 0.013s	user 0.003s	sys 0.000s
I20250901 14:18:30.822012  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
uuid: "8218002e4fa24f2993fae33eeaf4b0bf"
format_stamp: "Formatted at 2025-09-01 14:18:24 on dist-test-slave-9gf0"
I20250901 14:18:30.822322  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root/data
Total live blocks: 6
Total live bytes: 23162
Total live bytes (after alignment): 40960
Total number of LBM containers: 7 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:30.840340  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:30.841248  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:30.858023  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (1 total tablets, 1 live tablets)
I20250901 14:18:30.858280  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.012s	user 0.000s	sys 0.001s
I20250901 14:18:30.863075  6933 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap starting.
I20250901 14:18:30.862682  5023 ts_tablet_manager.cc:610] Registered 1 tablets
I20250901 14:18:30.864199  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.006s	user 0.004s	sys 0.001s
I20250901 14:18:30.945434  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.195:33159
I20250901 14:18:30.945495  6995 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.195:33159 every 8 connection(s)
I20250901 14:18:30.951258  6996 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:30.951762  6996 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:30.952693  6996 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:30.957868  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:30.958103  5638 ts_manager.cc:194] Re-registered known tserver with Master: 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
I20250901 14:18:30.960879  6933 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap replayed 1/1 log segments. Stats: ops{read=4 overwritten=0 applied=4 ignored=1} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:30.960980  5638 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:58082
I20250901 14:18:30.961894  6933 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap complete.
I20250901 14:18:30.962620  6933 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent bootstrapping tablet: real 0.100s	user 0.078s	sys 0.019s
I20250901 14:18:30.964777  6933 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 3 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.965299  6933 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 3 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8218002e4fa24f2993fae33eeaf4b0bf, State: Initialized, Role: FOLLOWER
I20250901 14:18:30.965956  6933 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 4, Last appended: 3.4, Last appended by leader: 4, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:30.968773  6933 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent starting tablet: real 0.006s	user 0.004s	sys 0.000s
I20250901 14:18:30.969625  6996 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:30.971609  6951 tablet_service.cc:3073] Scan: Not found: Scanner 717c5c2e40124d62ba3e1f5ffa4248d3 not found (it may have expired): call sequence id=1, remote={username='slave'} at 127.0.0.1:56512
W20250901 14:18:30.972872  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  LIMIT 500 } elsewhere.
I20250901 14:18:31.011334  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:31.013732  5023 client-test.cc:1928] Doing a scan while killing a tserver...
I20250901 14:18:31.032707  5023 client-test.cc:1834] Setting up scanner.
I20250901 14:18:31.037814  5023 client-test.cc:1842] Calling callback.
I20250901 14:18:31.038246  5023 client-test.cc:916] Killing TS 8218002e4fa24f2993fae33eeaf4b0bf at 127.4.231.195:33159
I20250901 14:18:31.038570  5023 tablet_server.cc:178] TabletServer@127.4.231.195:33159 shutting down...
I20250901 14:18:31.058969  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:31.059981  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:31.060627  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 3 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:31.061218  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 3 FOLLOWER]: Raft consensus is shut down!
W20250901 14:18:31.071933  6732 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Couldn't send request to peer 8218002e4fa24f2993fae33eeaf4b0bf. Status: Network error: Client connection negotiation failed: client connection to 127.4.231.195:33159: connect: Connection refused (error 111). This is attempt 1: this message will repeat every 5th retry.
I20250901 14:18:31.082701  5023 tablet_server.cc:195] TabletServer@127.4.231.195:33159 shutdown complete.
I20250901 14:18:31.095933  5023 client-test.cc:1851] Checking that we can still read the next batch.
I20250901 14:18:31.097702  5023 meta_cache.cc:1510] marking tablet server 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159) as failed
W20250901 14:18:31.097954  5023 meta_cache.cc:302] tablet 8f63f95c6bf9404797c6321a821dfbcf: replica 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159) has failed: Network error: TS failed: Client connection negotiation failed: client connection to 127.4.231.195:33159: connect: Connection refused (error 111)
W20250901 14:18:31.098161  5023 client.cc:2239] Attempting to retry Scanner { table: TestScanFaultTolerance, tablet: 8f63f95c6bf9404797c6321a821dfbcf, projection: (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
), scan_spec:  LIMIT 500 } elsewhere.
I20250901 14:18:31.132249  5023 client-test.cc:1860] Verifying results from scan.
I20250901 14:18:31.134658  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:31.142802  7000 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:31.143177  7001 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:31.144728  7003 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:31.145552  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:31.146301  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:31.146497  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:31.146658  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736311146644 us; error 0 us; skew 500 ppm
I20250901 14:18:31.147125  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:31.149600  5023 webserver.cc:480] Webserver started at http://127.4.231.195:36863/ using document root <none> and password file <none>
I20250901 14:18:31.150091  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:31.150241  5023 fs_manager.cc:365] Using existing metadata directory in first data directory
I20250901 14:18:31.153942  5023 fs_manager.cc:714] Time spent opening directory manager: real 0.003s	user 0.003s	sys 0.000s
I20250901 14:18:31.169456  7010 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:31.170398  5023 fs_manager.cc:730] Time spent opening block manager: real 0.015s	user 0.003s	sys 0.000s
I20250901 14:18:31.170710  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
uuid: "8218002e4fa24f2993fae33eeaf4b0bf"
format_stamp: "Formatted at 2025-09-01 14:18:24 on dist-test-slave-9gf0"
I20250901 14:18:31.170986  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/minicluster-data/ts-2-root/data
Total live blocks: 6
Total live bytes: 23162
Total live bytes (after alignment): 40960
Total number of LBM containers: 7 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:31.215020  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:31.219091  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:31.235946  7020 ts_tablet_manager.cc:542] Loading tablet metadata (0/1 complete)
I20250901 14:18:31.248476  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (1 total tablets, 1 live tablets)
I20250901 14:18:31.248769  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.014s	user 0.002s	sys 0.000s
I20250901 14:18:31.249111  5023 ts_tablet_manager.cc:594] Registering tablets (0/1 complete)
I20250901 14:18:31.254769  5023 ts_tablet_manager.cc:610] Registered 1 tablets
I20250901 14:18:31.255070  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.006s	user 0.005s	sys 0.000s
I20250901 14:18:31.255168  7020 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap starting.
W20250901 14:18:31.315300  6732 consensus_peers.cc:489] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 -> Peer 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159): Couldn't send request to peer 8218002e4fa24f2993fae33eeaf4b0bf. Status: Network error: Client connection negotiation failed: client connection to 127.4.231.195:33159: connect: Connection refused (error 111). This is attempt 6: this message will repeat every 5th retry.
I20250901 14:18:31.351130  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.195:33159
I20250901 14:18:31.351269  7082 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.195:33159 every 8 connection(s)
I20250901 14:18:31.365094  7020 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap replayed 1/1 log segments. Stats: ops{read=4 overwritten=0 applied=4 ignored=1} inserts{seen=0 ignored=0} mutations{seen=0 ignored=0} orphaned_commits=0. Pending: 0 replicates
I20250901 14:18:31.366619  7020 tablet_bootstrap.cc:492] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Bootstrap complete.
I20250901 14:18:31.367516  7020 ts_tablet_manager.cc:1397] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent bootstrapping tablet: real 0.113s	user 0.079s	sys 0.019s
I20250901 14:18:31.371162  7083 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34121
I20250901 14:18:31.371608  7083 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:31.370916  7020 raft_consensus.cc:357] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 3 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:31.372229  7020 raft_consensus.cc:738] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 3 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8218002e4fa24f2993fae33eeaf4b0bf, State: Initialized, Role: FOLLOWER
I20250901 14:18:31.372470  7083 heartbeater.cc:507] Master 127.4.231.254:34121 requested a full tablet report, sending...
I20250901 14:18:31.373071  7020 consensus_queue.cc:260] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 4, Last appended: 3.4, Last appended by leader: 4, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6677188f0c07429b85c6c5693f84aba2" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 45665 } } peers { permanent_uuid: "8218002e4fa24f2993fae33eeaf4b0bf" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 33159 } } peers { permanent_uuid: "0eb26b9ecbd843d9bfacf0d2bd7b3314" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37489 } }
I20250901 14:18:31.376039  7020 ts_tablet_manager.cc:1428] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf: Time spent starting tablet: real 0.008s	user 0.005s	sys 0.004s
I20250901 14:18:31.376693  5639 ts_manager.cc:194] Re-registered known tserver with Master: 8218002e4fa24f2993fae33eeaf4b0bf (127.4.231.195:33159)
I20250901 14:18:31.380913  5639 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:58094
I20250901 14:18:31.384159  7083 heartbeater.cc:499] Master 127.4.231.254:34121 was elected leader, sending a full tablet report...
I20250901 14:18:31.385313  5023 tablet_server.cc:178] TabletServer@127.4.231.193:37489 shutting down...
I20250901 14:18:31.422529  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:31.423257  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:31.424211  5023 raft_consensus.cc:2241] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 4 LEADER]: Raft consensus shutting down.
I20250901 14:18:31.425063  5023 raft_consensus.cc:2270] T b6da5a53e9d44dc9aa1db6a6cb0a6209 P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 4 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:31.427268  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:31.427843  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 LEADER]: Raft consensus shutting down.
I20250901 14:18:31.428610  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 3 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:31.431375  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:31.431820  5023 raft_consensus.cc:2241] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 4 LEADER]: Raft consensus shutting down.
I20250901 14:18:31.432247  5023 raft_consensus.cc:2270] T 1783e1d6604d4105ae855ba92e83a53a P 0eb26b9ecbd843d9bfacf0d2bd7b3314 [term 4 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:31.444867  5023 tablet_server.cc:195] TabletServer@127.4.231.193:37489 shutdown complete.
I20250901 14:18:31.462668  5023 tablet_server.cc:178] TabletServer@127.4.231.194:45665 shutting down...
I20250901 14:18:31.488168  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:31.488754  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:31.489284  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 3 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:31.489822  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 6677188f0c07429b85c6c5693f84aba2 [term 3 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:31.511412  5023 tablet_server.cc:195] TabletServer@127.4.231.194:45665 shutdown complete.
I20250901 14:18:31.527645  5023 tablet_server.cc:178] TabletServer@127.4.231.195:33159 shutting down...
I20250901 14:18:31.548888  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:31.549679  5023 tablet_replica.cc:331] stopping tablet replica
I20250901 14:18:31.550241  5023 raft_consensus.cc:2241] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 3 FOLLOWER]: Raft consensus shutting down.
I20250901 14:18:31.550635  5023 raft_consensus.cc:2270] T 8f63f95c6bf9404797c6321a821dfbcf P 8218002e4fa24f2993fae33eeaf4b0bf [term 3 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:31.569739  5023 tablet_server.cc:195] TabletServer@127.4.231.195:33159 shutdown complete.
I20250901 14:18:31.583331  5023 master.cc:561] Master@127.4.231.254:34121 shutting down...
I20250901 14:18:31.607331  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:31.607939  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:31.608239  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 47c635c1c1cb42fc837ae985f119c2ad: stopping tablet replica
W20250901 14:18:31.627225  5023 rolling_log.cc:182] Unable to compress old log file: Not found: Unable to open input file to compress: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestScanFaultTolerance.1756736290962133-5023-0/client-test.dist-test-slave-9gf0.slave.diagnostics.20250901-141823.0.5023: No such file or directory (error 2)
I20250901 14:18:31.629001  5023 master.cc:583] Master@127.4.231.254:34121 shutdown complete.
[       OK ] ClientTest.TestScanFaultTolerance (8174 ms)
[ RUN      ] ClientTest.TestEqualRangeBounds
I20250901 14:18:31.662875  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:42131
I20250901 14:18:31.663909  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:31.669301  7087 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:31.670377  7088 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:31.671177  7090 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:31.673130  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:31.675523  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:31.675709  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:31.675825  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736311675815 us; error 0 us; skew 500 ppm
I20250901 14:18:31.676294  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:31.678655  5023 webserver.cc:480] Webserver started at http://127.4.231.254:38577/ using document root <none> and password file <none>
I20250901 14:18:31.679095  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:31.679250  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:31.679458  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:31.680472  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestEqualRangeBounds.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "b0daa9ee831b41e2a5c5e41b64ba9f8d"
format_stamp: "Formatted at 2025-09-01 14:18:31 on dist-test-slave-9gf0"
I20250901 14:18:31.684724  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.006s	sys 0.000s
I20250901 14:18:31.687987  7095 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:31.688743  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.000s	sys 0.002s
I20250901 14:18:31.688990  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestEqualRangeBounds.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "b0daa9ee831b41e2a5c5e41b64ba9f8d"
format_stamp: "Formatted at 2025-09-01 14:18:31 on dist-test-slave-9gf0"
I20250901 14:18:31.689232  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestEqualRangeBounds.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestEqualRangeBounds.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestEqualRangeBounds.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:31.703951  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:31.705096  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:31.749469  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:42131
I20250901 14:18:31.749616  7156 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:42131 every 8 connection(s)
I20250901 14:18:31.753329  7157 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:31.763823  7157 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d: Bootstrap starting.
I20250901 14:18:31.768330  7157 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:31.772534  7157 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d: No bootstrap required, opened a new log
I20250901 14:18:31.774680  7157 raft_consensus.cc:357] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b0daa9ee831b41e2a5c5e41b64ba9f8d" member_type: VOTER }
I20250901 14:18:31.775080  7157 raft_consensus.cc:383] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:31.775336  7157 raft_consensus.cc:738] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: b0daa9ee831b41e2a5c5e41b64ba9f8d, State: Initialized, Role: FOLLOWER
I20250901 14:18:31.775897  7157 consensus_queue.cc:260] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b0daa9ee831b41e2a5c5e41b64ba9f8d" member_type: VOTER }
I20250901 14:18:31.776330  7157 raft_consensus.cc:397] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:31.776548  7157 raft_consensus.cc:491] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:31.776808  7157 raft_consensus.cc:3058] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:31.781453  7157 raft_consensus.cc:513] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b0daa9ee831b41e2a5c5e41b64ba9f8d" member_type: VOTER }
I20250901 14:18:31.782047  7157 leader_election.cc:304] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: b0daa9ee831b41e2a5c5e41b64ba9f8d; no voters: 
I20250901 14:18:31.783327  7157 leader_election.cc:290] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:31.783749  7160 raft_consensus.cc:2802] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:31.785233  7160 raft_consensus.cc:695] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 1 LEADER]: Becoming Leader. State: Replica: b0daa9ee831b41e2a5c5e41b64ba9f8d, State: Running, Role: LEADER
I20250901 14:18:31.785933  7160 consensus_queue.cc:237] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b0daa9ee831b41e2a5c5e41b64ba9f8d" member_type: VOTER }
I20250901 14:18:31.786463  7157 sys_catalog.cc:564] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:31.788547  7161 sys_catalog.cc:455] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "b0daa9ee831b41e2a5c5e41b64ba9f8d" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b0daa9ee831b41e2a5c5e41b64ba9f8d" member_type: VOTER } }
I20250901 14:18:31.788699  7162 sys_catalog.cc:455] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [sys.catalog]: SysCatalogTable state changed. Reason: New leader b0daa9ee831b41e2a5c5e41b64ba9f8d. Latest consensus state: current_term: 1 leader_uuid: "b0daa9ee831b41e2a5c5e41b64ba9f8d" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "b0daa9ee831b41e2a5c5e41b64ba9f8d" member_type: VOTER } }
I20250901 14:18:31.789197  7161 sys_catalog.cc:458] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:31.789345  7162 sys_catalog.cc:458] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:31.793277  7165 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:31.798346  7165 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:31.801860  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:31.807281  7165 catalog_manager.cc:1349] Generated new cluster ID: 428d0978be2c423bb40a8dae364417a8
I20250901 14:18:31.807562  7165 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:31.821481  7165 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:31.822728  7165 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:31.841243  7165 catalog_manager.cc:5955] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d: Generated new TSK 0
I20250901 14:18:31.841861  7165 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:31.868990  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:31.875135  7178 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:31.876497  7179 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:31.878364  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:31.878711  7181 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:31.879591  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:31.879809  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:31.879962  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736311879945 us; error 0 us; skew 500 ppm
I20250901 14:18:31.880457  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:31.882699  5023 webserver.cc:480] Webserver started at http://127.4.231.193:44979/ using document root <none> and password file <none>
I20250901 14:18:31.883147  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:31.883314  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:31.883555  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:31.884636  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestEqualRangeBounds.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "bcd59d0a77f24a02a709e5792435c82e"
format_stamp: "Formatted at 2025-09-01 14:18:31 on dist-test-slave-9gf0"
I20250901 14:18:31.889083  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.006s	sys 0.000s
I20250901 14:18:31.892241  7186 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:31.892956  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:31.893215  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestEqualRangeBounds.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "bcd59d0a77f24a02a709e5792435c82e"
format_stamp: "Formatted at 2025-09-01 14:18:31 on dist-test-slave-9gf0"
I20250901 14:18:31.893496  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestEqualRangeBounds.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestEqualRangeBounds.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestEqualRangeBounds.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:31.916679  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:31.917820  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:31.922542  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:31.922883  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:31.923231  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:31.923466  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:31.973934  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:45985
I20250901 14:18:31.974012  7256 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:45985 every 8 connection(s)
I20250901 14:18:31.978391  7257 heartbeater.cc:344] Connected to a master server at 127.4.231.254:42131
I20250901 14:18:31.978760  7257 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:31.979491  7257 heartbeater.cc:507] Master 127.4.231.254:42131 requested a full tablet report, sending...
I20250901 14:18:31.981354  7112 ts_manager.cc:194] Registered new tserver with Master: bcd59d0a77f24a02a709e5792435c82e (127.4.231.193:45985)
I20250901 14:18:31.981765  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004751948s
I20250901 14:18:31.983139  7112 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:58460
I20250901 14:18:31.995545  7257 heartbeater.cc:499] Master 127.4.231.254:42131 was elected leader, sending a full tablet report...
I20250901 14:18:32.003250  7111 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:58482:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:32.038777  7222 tablet_service.cc:1468] Processing CreateTablet for tablet f75a88008caf47c691bde8b584880e04 (DEFAULT_TABLE table=client-testtb [id=b95bff8ced524c84a2683e4f83cb8040]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:32.038993  7221 tablet_service.cc:1468] Processing CreateTablet for tablet 065683ff7c6b48d4a5e124e0843c584f (DEFAULT_TABLE table=client-testtb [id=b95bff8ced524c84a2683e4f83cb8040]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:32.040297  7222 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet f75a88008caf47c691bde8b584880e04. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:32.040908  7221 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 065683ff7c6b48d4a5e124e0843c584f. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:32.054432  7267 tablet_bootstrap.cc:492] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e: Bootstrap starting.
I20250901 14:18:32.058805  7267 tablet_bootstrap.cc:654] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:32.063264  7267 tablet_bootstrap.cc:492] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e: No bootstrap required, opened a new log
I20250901 14:18:32.063652  7267 ts_tablet_manager.cc:1397] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e: Time spent bootstrapping tablet: real 0.010s	user 0.007s	sys 0.000s
I20250901 14:18:32.065635  7267 raft_consensus.cc:357] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bcd59d0a77f24a02a709e5792435c82e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45985 } }
I20250901 14:18:32.066061  7267 raft_consensus.cc:383] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:32.066275  7267 raft_consensus.cc:738] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: bcd59d0a77f24a02a709e5792435c82e, State: Initialized, Role: FOLLOWER
I20250901 14:18:32.066766  7267 consensus_queue.cc:260] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bcd59d0a77f24a02a709e5792435c82e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45985 } }
I20250901 14:18:32.067222  7267 raft_consensus.cc:397] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:32.067454  7267 raft_consensus.cc:491] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:32.067704  7267 raft_consensus.cc:3058] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:32.073045  7267 raft_consensus.cc:513] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bcd59d0a77f24a02a709e5792435c82e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45985 } }
I20250901 14:18:32.073640  7267 leader_election.cc:304] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: bcd59d0a77f24a02a709e5792435c82e; no voters: 
I20250901 14:18:32.074831  7267 leader_election.cc:290] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:32.075250  7269 raft_consensus.cc:2802] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:32.077402  7267 ts_tablet_manager.cc:1428] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e: Time spent starting tablet: real 0.014s	user 0.014s	sys 0.000s
I20250901 14:18:32.077553  7269 raft_consensus.cc:695] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 1 LEADER]: Becoming Leader. State: Replica: bcd59d0a77f24a02a709e5792435c82e, State: Running, Role: LEADER
I20250901 14:18:32.078329  7267 tablet_bootstrap.cc:492] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e: Bootstrap starting.
I20250901 14:18:32.078346  7269 consensus_queue.cc:237] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bcd59d0a77f24a02a709e5792435c82e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45985 } }
I20250901 14:18:32.084388  7267 tablet_bootstrap.cc:654] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:32.085516  7111 catalog_manager.cc:5582] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e reported cstate change: term changed from 0 to 1, leader changed from <none> to bcd59d0a77f24a02a709e5792435c82e (127.4.231.193). New cstate: current_term: 1 leader_uuid: "bcd59d0a77f24a02a709e5792435c82e" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bcd59d0a77f24a02a709e5792435c82e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45985 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:32.089326  7267 tablet_bootstrap.cc:492] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e: No bootstrap required, opened a new log
I20250901 14:18:32.089776  7267 ts_tablet_manager.cc:1397] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e: Time spent bootstrapping tablet: real 0.012s	user 0.007s	sys 0.004s
I20250901 14:18:32.091818  7267 raft_consensus.cc:357] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bcd59d0a77f24a02a709e5792435c82e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45985 } }
I20250901 14:18:32.092351  7267 raft_consensus.cc:383] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:32.092576  7267 raft_consensus.cc:738] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: bcd59d0a77f24a02a709e5792435c82e, State: Initialized, Role: FOLLOWER
I20250901 14:18:32.093111  7267 consensus_queue.cc:260] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bcd59d0a77f24a02a709e5792435c82e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45985 } }
I20250901 14:18:32.093744  7267 raft_consensus.cc:397] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:32.094002  7267 raft_consensus.cc:491] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:32.094216  7267 raft_consensus.cc:3058] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:32.099211  7267 raft_consensus.cc:513] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bcd59d0a77f24a02a709e5792435c82e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45985 } }
I20250901 14:18:32.099720  7267 leader_election.cc:304] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: bcd59d0a77f24a02a709e5792435c82e; no voters: 
I20250901 14:18:32.100172  7267 leader_election.cc:290] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:32.100329  7269 raft_consensus.cc:2802] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:32.100849  7269 raft_consensus.cc:695] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 1 LEADER]: Becoming Leader. State: Replica: bcd59d0a77f24a02a709e5792435c82e, State: Running, Role: LEADER
I20250901 14:18:32.101636  7267 ts_tablet_manager.cc:1428] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e: Time spent starting tablet: real 0.012s	user 0.008s	sys 0.004s
I20250901 14:18:32.101506  7269 consensus_queue.cc:237] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bcd59d0a77f24a02a709e5792435c82e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45985 } }
I20250901 14:18:32.106629  7111 catalog_manager.cc:5582] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e reported cstate change: term changed from 0 to 1, leader changed from <none> to bcd59d0a77f24a02a709e5792435c82e (127.4.231.193). New cstate: current_term: 1 leader_uuid: "bcd59d0a77f24a02a709e5792435c82e" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bcd59d0a77f24a02a709e5792435c82e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45985 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:32.121263  7111 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:58482:
name: "TestEqualRangeBounds"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "value"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\010\001\n\000\000\000\t\001\n\000\000\000""\010\001\n\000\000\000\t\001\n\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:32.125233  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:32.143023  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:32.143774  5023 tablet_replica.cc:331] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e: stopping tablet replica
I20250901 14:18:32.144465  5023 raft_consensus.cc:2241] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:32.144881  5023 raft_consensus.cc:2270] T f75a88008caf47c691bde8b584880e04 P bcd59d0a77f24a02a709e5792435c82e [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:32.146981  5023 tablet_replica.cc:331] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e: stopping tablet replica
I20250901 14:18:32.147410  5023 raft_consensus.cc:2241] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:32.147805  5023 raft_consensus.cc:2270] T 065683ff7c6b48d4a5e124e0843c584f P bcd59d0a77f24a02a709e5792435c82e [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:32.167347  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:32.177589  5023 master.cc:561] Master@127.4.231.254:42131 shutting down...
I20250901 14:18:32.195016  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:32.195516  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:32.195919  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P b0daa9ee831b41e2a5c5e41b64ba9f8d: stopping tablet replica
I20250901 14:18:32.215137  5023 master.cc:583] Master@127.4.231.254:42131 shutdown complete.
[       OK ] ClientTest.TestEqualRangeBounds (576 ms)
[ RUN      ] ClientTest.TestMetaCacheLookupNoLeaders
I20250901 14:18:32.239075  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:44767
I20250901 14:18:32.240049  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:32.244807  7277 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:32.244796  7276 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:32.246068  7279 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:32.247228  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:32.248363  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:32.248569  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:32.248723  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736312248706 us; error 0 us; skew 500 ppm
I20250901 14:18:32.249203  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:32.251438  5023 webserver.cc:480] Webserver started at http://127.4.231.254:36797/ using document root <none> and password file <none>
I20250901 14:18:32.251905  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:32.252079  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:32.252321  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:32.253327  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestMetaCacheLookupNoLeaders.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "2656026afa744b2aaa4bbd19d445501e"
format_stamp: "Formatted at 2025-09-01 14:18:32 on dist-test-slave-9gf0"
I20250901 14:18:32.257473  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.001s	sys 0.002s
I20250901 14:18:32.260604  7284 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:32.261374  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:32.261725  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestMetaCacheLookupNoLeaders.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "2656026afa744b2aaa4bbd19d445501e"
format_stamp: "Formatted at 2025-09-01 14:18:32 on dist-test-slave-9gf0"
I20250901 14:18:32.262009  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestMetaCacheLookupNoLeaders.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestMetaCacheLookupNoLeaders.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestMetaCacheLookupNoLeaders.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:32.275218  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:32.276222  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:32.315992  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:44767
I20250901 14:18:32.316097  7345 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:44767 every 8 connection(s)
I20250901 14:18:32.319561  7346 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:32.329610  7346 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e: Bootstrap starting.
I20250901 14:18:32.333757  7346 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:32.337587  7346 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e: No bootstrap required, opened a new log
I20250901 14:18:32.339481  7346 raft_consensus.cc:357] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2656026afa744b2aaa4bbd19d445501e" member_type: VOTER }
I20250901 14:18:32.339852  7346 raft_consensus.cc:383] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:32.340070  7346 raft_consensus.cc:738] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 2656026afa744b2aaa4bbd19d445501e, State: Initialized, Role: FOLLOWER
I20250901 14:18:32.340624  7346 consensus_queue.cc:260] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2656026afa744b2aaa4bbd19d445501e" member_type: VOTER }
I20250901 14:18:32.341037  7346 raft_consensus.cc:397] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:32.341254  7346 raft_consensus.cc:491] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:32.341512  7346 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:32.345854  7346 raft_consensus.cc:513] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2656026afa744b2aaa4bbd19d445501e" member_type: VOTER }
I20250901 14:18:32.346354  7346 leader_election.cc:304] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 2656026afa744b2aaa4bbd19d445501e; no voters: 
I20250901 14:18:32.347700  7346 leader_election.cc:290] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:32.347982  7349 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:32.349400  7349 raft_consensus.cc:695] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 1 LEADER]: Becoming Leader. State: Replica: 2656026afa744b2aaa4bbd19d445501e, State: Running, Role: LEADER
I20250901 14:18:32.350055  7349 consensus_queue.cc:237] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2656026afa744b2aaa4bbd19d445501e" member_type: VOTER }
I20250901 14:18:32.350730  7346 sys_catalog.cc:564] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:32.352838  7351 sys_catalog.cc:455] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [sys.catalog]: SysCatalogTable state changed. Reason: New leader 2656026afa744b2aaa4bbd19d445501e. Latest consensus state: current_term: 1 leader_uuid: "2656026afa744b2aaa4bbd19d445501e" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2656026afa744b2aaa4bbd19d445501e" member_type: VOTER } }
I20250901 14:18:32.352928  7350 sys_catalog.cc:455] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "2656026afa744b2aaa4bbd19d445501e" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2656026afa744b2aaa4bbd19d445501e" member_type: VOTER } }
I20250901 14:18:32.353596  7350 sys_catalog.cc:458] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:32.353612  7351 sys_catalog.cc:458] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:32.356047  7354 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:32.360780  7354 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:32.366076  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:32.369951  7354 catalog_manager.cc:1349] Generated new cluster ID: 1b6d89c69f9845aeab9017e951e07f44
I20250901 14:18:32.370236  7354 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:32.389854  7354 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:32.391105  7354 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:32.404017  7354 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e: Generated new TSK 0
I20250901 14:18:32.404618  7354 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:32.433347  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:32.438815  7367 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:32.440028  7368 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:32.441594  7370 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:32.441941  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:32.443099  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:32.443272  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:32.443396  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736312443386 us; error 0 us; skew 500 ppm
I20250901 14:18:32.443822  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:32.445945  5023 webserver.cc:480] Webserver started at http://127.4.231.193:40245/ using document root <none> and password file <none>
I20250901 14:18:32.446349  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:32.446508  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:32.446713  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:32.447681  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestMetaCacheLookupNoLeaders.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "bd405360fce7497d856cbfcdc4ae3a2c"
format_stamp: "Formatted at 2025-09-01 14:18:32 on dist-test-slave-9gf0"
I20250901 14:18:32.451835  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:32.454947  7375 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:32.455588  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.001s	sys 0.000s
I20250901 14:18:32.455847  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestMetaCacheLookupNoLeaders.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "bd405360fce7497d856cbfcdc4ae3a2c"
format_stamp: "Formatted at 2025-09-01 14:18:32 on dist-test-slave-9gf0"
I20250901 14:18:32.456110  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestMetaCacheLookupNoLeaders.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestMetaCacheLookupNoLeaders.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestMetaCacheLookupNoLeaders.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:32.479425  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:32.480420  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:32.485080  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:32.485396  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:32.485718  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:32.485931  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:32.534765  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:35747
I20250901 14:18:32.534868  7445 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:35747 every 8 connection(s)
I20250901 14:18:32.539191  7446 heartbeater.cc:344] Connected to a master server at 127.4.231.254:44767
I20250901 14:18:32.539585  7446 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:32.540287  7446 heartbeater.cc:507] Master 127.4.231.254:44767 requested a full tablet report, sending...
I20250901 14:18:32.542219  7301 ts_manager.cc:194] Registered new tserver with Master: bd405360fce7497d856cbfcdc4ae3a2c (127.4.231.193:35747)
I20250901 14:18:32.542635  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004847007s
I20250901 14:18:32.543874  7301 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:41700
I20250901 14:18:32.556274  7446 heartbeater.cc:499] Master 127.4.231.254:44767 was elected leader, sending a full tablet report...
I20250901 14:18:32.564345  7301 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:41718:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:32.595491  7411 tablet_service.cc:1468] Processing CreateTablet for tablet ef598f6a11d646e182c69e33ee5dc279 (DEFAULT_TABLE table=client-testtb [id=6631a28e18174f58a225284346685313]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:32.595815  7410 tablet_service.cc:1468] Processing CreateTablet for tablet 9860911aa8fb450cb1e33a309503c8b8 (DEFAULT_TABLE table=client-testtb [id=6631a28e18174f58a225284346685313]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:32.596796  7411 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet ef598f6a11d646e182c69e33ee5dc279. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:32.597311  7410 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 9860911aa8fb450cb1e33a309503c8b8. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:32.610293  7456 tablet_bootstrap.cc:492] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c: Bootstrap starting.
I20250901 14:18:32.615535  7456 tablet_bootstrap.cc:654] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:32.619498  7456 tablet_bootstrap.cc:492] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c: No bootstrap required, opened a new log
I20250901 14:18:32.619870  7456 ts_tablet_manager.cc:1397] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c: Time spent bootstrapping tablet: real 0.010s	user 0.003s	sys 0.005s
I20250901 14:18:32.621682  7456 raft_consensus.cc:357] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35747 } }
I20250901 14:18:32.622138  7456 raft_consensus.cc:383] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:32.622422  7456 raft_consensus.cc:738] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: bd405360fce7497d856cbfcdc4ae3a2c, State: Initialized, Role: FOLLOWER
I20250901 14:18:32.623028  7456 consensus_queue.cc:260] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35747 } }
I20250901 14:18:32.623569  7456 raft_consensus.cc:397] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:32.623762  7456 raft_consensus.cc:491] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:32.623980  7456 raft_consensus.cc:3058] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:32.628998  7456 raft_consensus.cc:513] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35747 } }
I20250901 14:18:32.629505  7456 leader_election.cc:304] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: bd405360fce7497d856cbfcdc4ae3a2c; no voters: 
I20250901 14:18:32.630586  7456 leader_election.cc:290] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:32.630880  7458 raft_consensus.cc:2802] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:32.632994  7456 ts_tablet_manager.cc:1428] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c: Time spent starting tablet: real 0.013s	user 0.008s	sys 0.005s
I20250901 14:18:32.633615  7458 raft_consensus.cc:695] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 1 LEADER]: Becoming Leader. State: Replica: bd405360fce7497d856cbfcdc4ae3a2c, State: Running, Role: LEADER
I20250901 14:18:32.633904  7456 tablet_bootstrap.cc:492] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c: Bootstrap starting.
I20250901 14:18:32.634263  7458 consensus_queue.cc:237] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35747 } }
I20250901 14:18:32.639379  7456 tablet_bootstrap.cc:654] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:32.640689  7301 catalog_manager.cc:5582] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c reported cstate change: term changed from 0 to 1, leader changed from <none> to bd405360fce7497d856cbfcdc4ae3a2c (127.4.231.193). New cstate: current_term: 1 leader_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35747 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:32.644135  7456 tablet_bootstrap.cc:492] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c: No bootstrap required, opened a new log
I20250901 14:18:32.644575  7456 ts_tablet_manager.cc:1397] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c: Time spent bootstrapping tablet: real 0.011s	user 0.005s	sys 0.003s
I20250901 14:18:32.646854  7456 raft_consensus.cc:357] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35747 } }
I20250901 14:18:32.647248  7456 raft_consensus.cc:383] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:32.647500  7456 raft_consensus.cc:738] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: bd405360fce7497d856cbfcdc4ae3a2c, State: Initialized, Role: FOLLOWER
I20250901 14:18:32.648123  7456 consensus_queue.cc:260] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35747 } }
I20250901 14:18:32.648763  7456 raft_consensus.cc:397] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:32.649044  7456 raft_consensus.cc:491] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:32.649371  7456 raft_consensus.cc:3058] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:32.655537  7456 raft_consensus.cc:513] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35747 } }
I20250901 14:18:32.656066  7456 leader_election.cc:304] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: bd405360fce7497d856cbfcdc4ae3a2c; no voters: 
I20250901 14:18:32.656502  7456 leader_election.cc:290] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:32.656656  7458 raft_consensus.cc:2802] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:32.657159  7458 raft_consensus.cc:695] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 1 LEADER]: Becoming Leader. State: Replica: bd405360fce7497d856cbfcdc4ae3a2c, State: Running, Role: LEADER
I20250901 14:18:32.658002  7456 ts_tablet_manager.cc:1428] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c: Time spent starting tablet: real 0.013s	user 0.007s	sys 0.007s
I20250901 14:18:32.657889  7458 consensus_queue.cc:237] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35747 } }
I20250901 14:18:32.663597  7301 catalog_manager.cc:5582] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c reported cstate change: term changed from 0 to 1, leader changed from <none> to bd405360fce7497d856cbfcdc4ae3a2c (127.4.231.193). New cstate: current_term: 1 leader_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd405360fce7497d856cbfcdc4ae3a2c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35747 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:32.687285  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:32.705715  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:32.706409  5023 tablet_replica.cc:331] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c: stopping tablet replica
I20250901 14:18:32.706984  5023 raft_consensus.cc:2241] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:32.707414  5023 raft_consensus.cc:2270] T 9860911aa8fb450cb1e33a309503c8b8 P bd405360fce7497d856cbfcdc4ae3a2c [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:32.709483  5023 tablet_replica.cc:331] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c: stopping tablet replica
I20250901 14:18:32.709983  5023 raft_consensus.cc:2241] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:32.710345  5023 raft_consensus.cc:2270] T ef598f6a11d646e182c69e33ee5dc279 P bd405360fce7497d856cbfcdc4ae3a2c [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:32.730031  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:32.740227  5023 master.cc:561] Master@127.4.231.254:44767 shutting down...
I20250901 14:18:32.755957  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:32.756458  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:32.756883  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 2656026afa744b2aaa4bbd19d445501e: stopping tablet replica
I20250901 14:18:32.776252  5023 master.cc:583] Master@127.4.231.254:44767 shutdown complete.
[       OK ] ClientTest.TestMetaCacheLookupNoLeaders (557 ms)
[ RUN      ] ClientTest.TestInsertIgnore
I20250901 14:18:32.796402  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:37779
I20250901 14:18:32.797323  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:32.801668  7465 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:32.802310  7466 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:32.803194  7468 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:32.804477  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:32.805423  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:32.805675  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:32.805801  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736312805790 us; error 0 us; skew 500 ppm
I20250901 14:18:32.806224  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:32.808316  5023 webserver.cc:480] Webserver started at http://127.4.231.254:38821/ using document root <none> and password file <none>
I20250901 14:18:32.808719  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:32.808868  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:32.809067  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:32.810086  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertIgnore.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "24d49aa93a134f9dad9a749223074ae7"
format_stamp: "Formatted at 2025-09-01 14:18:32 on dist-test-slave-9gf0"
I20250901 14:18:32.814154  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.004s	sys 0.000s
I20250901 14:18:32.817288  7473 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:32.818099  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:32.818370  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertIgnore.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "24d49aa93a134f9dad9a749223074ae7"
format_stamp: "Formatted at 2025-09-01 14:18:32 on dist-test-slave-9gf0"
I20250901 14:18:32.818626  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertIgnore.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertIgnore.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertIgnore.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:32.836768  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:32.837792  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:32.877846  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:37779
I20250901 14:18:32.877923  7534 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:37779 every 8 connection(s)
I20250901 14:18:32.881453  7535 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:32.891816  7535 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7: Bootstrap starting.
I20250901 14:18:32.896065  7535 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:32.899718  7535 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7: No bootstrap required, opened a new log
I20250901 14:18:32.901599  7535 raft_consensus.cc:357] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "24d49aa93a134f9dad9a749223074ae7" member_type: VOTER }
I20250901 14:18:32.901986  7535 raft_consensus.cc:383] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:32.902163  7535 raft_consensus.cc:738] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 24d49aa93a134f9dad9a749223074ae7, State: Initialized, Role: FOLLOWER
I20250901 14:18:32.902695  7535 consensus_queue.cc:260] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "24d49aa93a134f9dad9a749223074ae7" member_type: VOTER }
I20250901 14:18:32.903124  7535 raft_consensus.cc:397] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:32.903307  7535 raft_consensus.cc:491] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:32.903515  7535 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:32.907883  7535 raft_consensus.cc:513] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "24d49aa93a134f9dad9a749223074ae7" member_type: VOTER }
I20250901 14:18:32.908390  7535 leader_election.cc:304] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 24d49aa93a134f9dad9a749223074ae7; no voters: 
I20250901 14:18:32.909410  7535 leader_election.cc:290] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:32.909799  7538 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:32.911105  7538 raft_consensus.cc:695] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 1 LEADER]: Becoming Leader. State: Replica: 24d49aa93a134f9dad9a749223074ae7, State: Running, Role: LEADER
I20250901 14:18:32.911772  7538 consensus_queue.cc:237] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "24d49aa93a134f9dad9a749223074ae7" member_type: VOTER }
I20250901 14:18:32.912349  7535 sys_catalog.cc:564] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:32.914659  7540 sys_catalog.cc:455] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [sys.catalog]: SysCatalogTable state changed. Reason: New leader 24d49aa93a134f9dad9a749223074ae7. Latest consensus state: current_term: 1 leader_uuid: "24d49aa93a134f9dad9a749223074ae7" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "24d49aa93a134f9dad9a749223074ae7" member_type: VOTER } }
I20250901 14:18:32.914588  7539 sys_catalog.cc:455] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "24d49aa93a134f9dad9a749223074ae7" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "24d49aa93a134f9dad9a749223074ae7" member_type: VOTER } }
I20250901 14:18:32.915299  7540 sys_catalog.cc:458] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:32.915494  7539 sys_catalog.cc:458] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:32.917851  7543 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:32.922618  7543 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:32.930819  7543 catalog_manager.cc:1349] Generated new cluster ID: 19a4a6166da74667bdc8a0570b4a2107
I20250901 14:18:32.931175  7543 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:32.931560  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:32.958307  7543 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:32.959532  7543 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:32.976167  7543 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7: Generated new TSK 0
I20250901 14:18:32.976750  7543 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:32.997750  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:33.003310  7556 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:33.005247  7557 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:33.005506  7559 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:33.005916  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:33.006654  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:33.006841  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:33.006963  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736313006951 us; error 0 us; skew 500 ppm
I20250901 14:18:33.007390  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:33.009596  5023 webserver.cc:480] Webserver started at http://127.4.231.193:45235/ using document root <none> and password file <none>
I20250901 14:18:33.010002  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:33.010151  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:33.010350  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:33.011452  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertIgnore.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "e390fdd2ab274f45bb4d31705af8991a"
format_stamp: "Formatted at 2025-09-01 14:18:33 on dist-test-slave-9gf0"
I20250901 14:18:33.015520  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:33.018589  7564 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:33.019253  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:33.019533  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertIgnore.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "e390fdd2ab274f45bb4d31705af8991a"
format_stamp: "Formatted at 2025-09-01 14:18:33 on dist-test-slave-9gf0"
I20250901 14:18:33.019796  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertIgnore.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertIgnore.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertIgnore.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:33.044768  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:33.045789  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:33.050412  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:33.050719  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:33.051012  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:33.051226  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:33.101022  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:37911
I20250901 14:18:33.101102  7634 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:37911 every 8 connection(s)
I20250901 14:18:33.106040  7635 heartbeater.cc:344] Connected to a master server at 127.4.231.254:37779
I20250901 14:18:33.106398  7635 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:33.107146  7635 heartbeater.cc:507] Master 127.4.231.254:37779 requested a full tablet report, sending...
I20250901 14:18:33.109117  7490 ts_manager.cc:194] Registered new tserver with Master: e390fdd2ab274f45bb4d31705af8991a (127.4.231.193:37911)
I20250901 14:18:33.109227  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.005048679s
I20250901 14:18:33.110838  7490 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:39968
I20250901 14:18:33.123471  7635 heartbeater.cc:499] Master 127.4.231.254:37779 was elected leader, sending a full tablet report...
I20250901 14:18:33.131278  7489 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:39976:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:33.163096  7600 tablet_service.cc:1468] Processing CreateTablet for tablet 073b1bd3f2f0493e8c107049a9b3780b (DEFAULT_TABLE table=client-testtb [id=6b5135c2b46040828caf309800e70108]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:33.163205  7599 tablet_service.cc:1468] Processing CreateTablet for tablet 3563647c3ca743738b509cef1762dbb9 (DEFAULT_TABLE table=client-testtb [id=6b5135c2b46040828caf309800e70108]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:33.164588  7600 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 073b1bd3f2f0493e8c107049a9b3780b. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:33.165413  7599 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 3563647c3ca743738b509cef1762dbb9. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:33.181823  7645 tablet_bootstrap.cc:492] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a: Bootstrap starting.
I20250901 14:18:33.185689  7645 tablet_bootstrap.cc:654] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:33.189647  7645 tablet_bootstrap.cc:492] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a: No bootstrap required, opened a new log
I20250901 14:18:33.190008  7645 ts_tablet_manager.cc:1397] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a: Time spent bootstrapping tablet: real 0.008s	user 0.008s	sys 0.000s
I20250901 14:18:33.192020  7645 raft_consensus.cc:357] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e390fdd2ab274f45bb4d31705af8991a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37911 } }
I20250901 14:18:33.192488  7645 raft_consensus.cc:383] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:33.192695  7645 raft_consensus.cc:738] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: e390fdd2ab274f45bb4d31705af8991a, State: Initialized, Role: FOLLOWER
I20250901 14:18:33.193161  7645 consensus_queue.cc:260] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e390fdd2ab274f45bb4d31705af8991a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37911 } }
I20250901 14:18:33.193631  7645 raft_consensus.cc:397] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:33.193858  7645 raft_consensus.cc:491] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:33.194095  7645 raft_consensus.cc:3058] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:33.198966  7645 raft_consensus.cc:513] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e390fdd2ab274f45bb4d31705af8991a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37911 } }
I20250901 14:18:33.199481  7645 leader_election.cc:304] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: e390fdd2ab274f45bb4d31705af8991a; no voters: 
I20250901 14:18:33.200619  7645 leader_election.cc:290] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:33.200927  7647 raft_consensus.cc:2802] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:33.202862  7645 ts_tablet_manager.cc:1428] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a: Time spent starting tablet: real 0.013s	user 0.011s	sys 0.001s
I20250901 14:18:33.203033  7647 raft_consensus.cc:695] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 1 LEADER]: Becoming Leader. State: Replica: e390fdd2ab274f45bb4d31705af8991a, State: Running, Role: LEADER
I20250901 14:18:33.203660  7645 tablet_bootstrap.cc:492] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a: Bootstrap starting.
I20250901 14:18:33.203706  7647 consensus_queue.cc:237] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e390fdd2ab274f45bb4d31705af8991a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37911 } }
I20250901 14:18:33.209514  7645 tablet_bootstrap.cc:654] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:33.214335  7645 tablet_bootstrap.cc:492] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a: No bootstrap required, opened a new log
I20250901 14:18:33.214717  7645 ts_tablet_manager.cc:1397] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a: Time spent bootstrapping tablet: real 0.011s	user 0.008s	sys 0.002s
I20250901 14:18:33.214394  7490 catalog_manager.cc:5582] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a reported cstate change: term changed from 0 to 1, leader changed from <none> to e390fdd2ab274f45bb4d31705af8991a (127.4.231.193). New cstate: current_term: 1 leader_uuid: "e390fdd2ab274f45bb4d31705af8991a" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e390fdd2ab274f45bb4d31705af8991a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37911 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:33.216778  7645 raft_consensus.cc:357] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e390fdd2ab274f45bb4d31705af8991a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37911 } }
I20250901 14:18:33.217326  7645 raft_consensus.cc:383] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:33.217672  7645 raft_consensus.cc:738] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: e390fdd2ab274f45bb4d31705af8991a, State: Initialized, Role: FOLLOWER
I20250901 14:18:33.218289  7645 consensus_queue.cc:260] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e390fdd2ab274f45bb4d31705af8991a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37911 } }
I20250901 14:18:33.218858  7645 raft_consensus.cc:397] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:33.219105  7645 raft_consensus.cc:491] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:33.219357  7645 raft_consensus.cc:3058] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:33.224834  7645 raft_consensus.cc:513] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e390fdd2ab274f45bb4d31705af8991a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37911 } }
I20250901 14:18:33.225554  7645 leader_election.cc:304] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: e390fdd2ab274f45bb4d31705af8991a; no voters: 
I20250901 14:18:33.226141  7645 leader_election.cc:290] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:33.226308  7647 raft_consensus.cc:2802] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:33.227008  7647 raft_consensus.cc:695] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 1 LEADER]: Becoming Leader. State: Replica: e390fdd2ab274f45bb4d31705af8991a, State: Running, Role: LEADER
I20250901 14:18:33.227952  7645 ts_tablet_manager.cc:1428] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a: Time spent starting tablet: real 0.013s	user 0.009s	sys 0.003s
I20250901 14:18:33.227722  7647 consensus_queue.cc:237] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e390fdd2ab274f45bb4d31705af8991a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37911 } }
I20250901 14:18:33.232879  7490 catalog_manager.cc:5582] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a reported cstate change: term changed from 0 to 1, leader changed from <none> to e390fdd2ab274f45bb4d31705af8991a (127.4.231.193). New cstate: current_term: 1 leader_uuid: "e390fdd2ab274f45bb4d31705af8991a" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e390fdd2ab274f45bb4d31705af8991a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37911 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:33.346133  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:33.364683  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:33.365360  5023 tablet_replica.cc:331] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a: stopping tablet replica
I20250901 14:18:33.365972  5023 raft_consensus.cc:2241] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:33.366427  5023 raft_consensus.cc:2270] T 073b1bd3f2f0493e8c107049a9b3780b P e390fdd2ab274f45bb4d31705af8991a [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:33.368388  5023 tablet_replica.cc:331] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a: stopping tablet replica
I20250901 14:18:33.368824  5023 raft_consensus.cc:2241] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:33.369179  5023 raft_consensus.cc:2270] T 3563647c3ca743738b509cef1762dbb9 P e390fdd2ab274f45bb4d31705af8991a [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:33.388736  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:33.399410  5023 master.cc:561] Master@127.4.231.254:37779 shutting down...
I20250901 14:18:33.415516  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:33.416026  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:33.416457  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 24d49aa93a134f9dad9a749223074ae7: stopping tablet replica
I20250901 14:18:33.435838  5023 master.cc:583] Master@127.4.231.254:37779 shutdown complete.
[       OK ] ClientTest.TestInsertIgnore (659 ms)
[ RUN      ] ClientTest.TestAsyncFlushResponseAfterSessionDropped
I20250901 14:18:33.455803  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:38529
I20250901 14:18:33.456732  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:33.461161  7656 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:33.461802  7657 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:33.462675  7659 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:33.463819  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:33.464650  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:33.464821  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:33.464962  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736313464943 us; error 0 us; skew 500 ppm
I20250901 14:18:33.465468  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:33.467722  5023 webserver.cc:480] Webserver started at http://127.4.231.254:45867/ using document root <none> and password file <none>
I20250901 14:18:33.468170  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:33.468353  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:33.468616  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:33.469727  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAsyncFlushResponseAfterSessionDropped.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "e79dc5c3aa0044dab88a782030e44d8f"
format_stamp: "Formatted at 2025-09-01 14:18:33 on dist-test-slave-9gf0"
I20250901 14:18:33.474035  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.000s
I20250901 14:18:33.477257  7664 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:33.477977  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:33.478247  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAsyncFlushResponseAfterSessionDropped.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "e79dc5c3aa0044dab88a782030e44d8f"
format_stamp: "Formatted at 2025-09-01 14:18:33 on dist-test-slave-9gf0"
I20250901 14:18:33.478554  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAsyncFlushResponseAfterSessionDropped.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAsyncFlushResponseAfterSessionDropped.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAsyncFlushResponseAfterSessionDropped.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:33.496968  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:33.498193  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:33.537889  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:38529
I20250901 14:18:33.537988  7725 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:38529 every 8 connection(s)
I20250901 14:18:33.541519  7726 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:33.551986  7726 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f: Bootstrap starting.
I20250901 14:18:33.556519  7726 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:33.560217  7726 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f: No bootstrap required, opened a new log
I20250901 14:18:33.562101  7726 raft_consensus.cc:357] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e79dc5c3aa0044dab88a782030e44d8f" member_type: VOTER }
I20250901 14:18:33.562479  7726 raft_consensus.cc:383] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:33.562654  7726 raft_consensus.cc:738] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: e79dc5c3aa0044dab88a782030e44d8f, State: Initialized, Role: FOLLOWER
I20250901 14:18:33.563135  7726 consensus_queue.cc:260] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e79dc5c3aa0044dab88a782030e44d8f" member_type: VOTER }
I20250901 14:18:33.563548  7726 raft_consensus.cc:397] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:33.563737  7726 raft_consensus.cc:491] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:33.563937  7726 raft_consensus.cc:3058] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:33.568295  7726 raft_consensus.cc:513] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e79dc5c3aa0044dab88a782030e44d8f" member_type: VOTER }
I20250901 14:18:33.568758  7726 leader_election.cc:304] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: e79dc5c3aa0044dab88a782030e44d8f; no voters: 
I20250901 14:18:33.569824  7726 leader_election.cc:290] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:33.570104  7729 raft_consensus.cc:2802] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:33.571496  7729 raft_consensus.cc:695] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 1 LEADER]: Becoming Leader. State: Replica: e79dc5c3aa0044dab88a782030e44d8f, State: Running, Role: LEADER
I20250901 14:18:33.572126  7729 consensus_queue.cc:237] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e79dc5c3aa0044dab88a782030e44d8f" member_type: VOTER }
I20250901 14:18:33.572733  7726 sys_catalog.cc:564] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:33.574874  7730 sys_catalog.cc:455] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "e79dc5c3aa0044dab88a782030e44d8f" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e79dc5c3aa0044dab88a782030e44d8f" member_type: VOTER } }
I20250901 14:18:33.574985  7731 sys_catalog.cc:455] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [sys.catalog]: SysCatalogTable state changed. Reason: New leader e79dc5c3aa0044dab88a782030e44d8f. Latest consensus state: current_term: 1 leader_uuid: "e79dc5c3aa0044dab88a782030e44d8f" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e79dc5c3aa0044dab88a782030e44d8f" member_type: VOTER } }
I20250901 14:18:33.575628  7731 sys_catalog.cc:458] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:33.575652  7730 sys_catalog.cc:458] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:33.579391  7734 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:33.583967  7734 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:33.589918  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:33.592079  7734 catalog_manager.cc:1349] Generated new cluster ID: ecc009e5b9614d45b04c1702b0eb6584
I20250901 14:18:33.592358  7734 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:33.637130  7734 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:33.638399  7734 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:33.652779  7734 catalog_manager.cc:5955] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f: Generated new TSK 0
I20250901 14:18:33.653388  7734 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:33.721128  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:33.726765  7747 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:33.727828  7748 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:33.729609  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:33.729975  7750 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:33.731003  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:33.731261  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:33.731426  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736313731410 us; error 0 us; skew 500 ppm
I20250901 14:18:33.731930  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:33.734187  5023 webserver.cc:480] Webserver started at http://127.4.231.193:33699/ using document root <none> and password file <none>
I20250901 14:18:33.734655  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:33.734828  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:33.735074  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:33.736089  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAsyncFlushResponseAfterSessionDropped.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd"
format_stamp: "Formatted at 2025-09-01 14:18:33 on dist-test-slave-9gf0"
I20250901 14:18:33.740324  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.006s	sys 0.000s
I20250901 14:18:33.743587  7755 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:33.744307  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:33.744585  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAsyncFlushResponseAfterSessionDropped.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd"
format_stamp: "Formatted at 2025-09-01 14:18:33 on dist-test-slave-9gf0"
I20250901 14:18:33.744889  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAsyncFlushResponseAfterSessionDropped.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAsyncFlushResponseAfterSessionDropped.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAsyncFlushResponseAfterSessionDropped.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:33.758378  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:33.759327  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:33.765141  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:33.765502  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:33.765787  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:33.765937  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:33.820320  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:43815
I20250901 14:18:33.820407  7825 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:43815 every 8 connection(s)
I20250901 14:18:33.824569  7826 heartbeater.cc:344] Connected to a master server at 127.4.231.254:38529
I20250901 14:18:33.824952  7826 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:33.825721  7826 heartbeater.cc:507] Master 127.4.231.254:38529 requested a full tablet report, sending...
I20250901 14:18:33.827739  7681 ts_manager.cc:194] Registered new tserver with Master: cbff68cc7dbd46c3be7e5de0a9f1fdbd (127.4.231.193:43815)
I20250901 14:18:33.828187  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004820402s
I20250901 14:18:33.829404  7681 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:59710
I20250901 14:18:33.841812  7826 heartbeater.cc:499] Master 127.4.231.254:38529 was elected leader, sending a full tablet report...
I20250901 14:18:33.849254  7680 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:59734:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:33.882807  7791 tablet_service.cc:1468] Processing CreateTablet for tablet 2948d3a73f194571a09c6539b030229a (DEFAULT_TABLE table=client-testtb [id=883b2f1ac1de4e15bd82b2ece2bc4d0a]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:33.883050  7790 tablet_service.cc:1468] Processing CreateTablet for tablet a9993e26aa1147d6a4713abb1ee57563 (DEFAULT_TABLE table=client-testtb [id=883b2f1ac1de4e15bd82b2ece2bc4d0a]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:33.884074  7791 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 2948d3a73f194571a09c6539b030229a. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:33.884709  7790 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet a9993e26aa1147d6a4713abb1ee57563. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:33.900791  7836 tablet_bootstrap.cc:492] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd: Bootstrap starting.
I20250901 14:18:33.904798  7836 tablet_bootstrap.cc:654] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:33.908857  7836 tablet_bootstrap.cc:492] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd: No bootstrap required, opened a new log
I20250901 14:18:33.909209  7836 ts_tablet_manager.cc:1397] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd: Time spent bootstrapping tablet: real 0.009s	user 0.003s	sys 0.004s
I20250901 14:18:33.910992  7836 raft_consensus.cc:357] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43815 } }
I20250901 14:18:33.911409  7836 raft_consensus.cc:383] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:33.911716  7836 raft_consensus.cc:738] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: cbff68cc7dbd46c3be7e5de0a9f1fdbd, State: Initialized, Role: FOLLOWER
I20250901 14:18:33.912264  7836 consensus_queue.cc:260] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43815 } }
I20250901 14:18:33.912698  7836 raft_consensus.cc:397] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:33.912911  7836 raft_consensus.cc:491] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:33.913157  7836 raft_consensus.cc:3058] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:33.918177  7836 raft_consensus.cc:513] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43815 } }
I20250901 14:18:33.918752  7836 leader_election.cc:304] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: cbff68cc7dbd46c3be7e5de0a9f1fdbd; no voters: 
I20250901 14:18:33.920003  7836 leader_election.cc:290] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:33.920284  7838 raft_consensus.cc:2802] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:33.921661  7838 raft_consensus.cc:695] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 1 LEADER]: Becoming Leader. State: Replica: cbff68cc7dbd46c3be7e5de0a9f1fdbd, State: Running, Role: LEADER
I20250901 14:18:33.922367  7836 ts_tablet_manager.cc:1428] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd: Time spent starting tablet: real 0.013s	user 0.013s	sys 0.000s
I20250901 14:18:33.922379  7838 consensus_queue.cc:237] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43815 } }
I20250901 14:18:33.923133  7836 tablet_bootstrap.cc:492] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd: Bootstrap starting.
I20250901 14:18:33.928967  7836 tablet_bootstrap.cc:654] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:33.929306  7681 catalog_manager.cc:5582] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd reported cstate change: term changed from 0 to 1, leader changed from <none> to cbff68cc7dbd46c3be7e5de0a9f1fdbd (127.4.231.193). New cstate: current_term: 1 leader_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43815 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:33.933789  7836 tablet_bootstrap.cc:492] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd: No bootstrap required, opened a new log
I20250901 14:18:33.934199  7836 ts_tablet_manager.cc:1397] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd: Time spent bootstrapping tablet: real 0.011s	user 0.009s	sys 0.001s
I20250901 14:18:33.936339  7836 raft_consensus.cc:357] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43815 } }
I20250901 14:18:33.936929  7836 raft_consensus.cc:383] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:33.937209  7836 raft_consensus.cc:738] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: cbff68cc7dbd46c3be7e5de0a9f1fdbd, State: Initialized, Role: FOLLOWER
I20250901 14:18:33.937887  7836 consensus_queue.cc:260] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43815 } }
I20250901 14:18:33.938328  7836 raft_consensus.cc:397] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:33.938555  7836 raft_consensus.cc:491] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:33.938817  7836 raft_consensus.cc:3058] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:33.944943  7836 raft_consensus.cc:513] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43815 } }
I20250901 14:18:33.945595  7836 leader_election.cc:304] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: cbff68cc7dbd46c3be7e5de0a9f1fdbd; no voters: 
I20250901 14:18:33.946169  7836 leader_election.cc:290] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:33.946331  7838 raft_consensus.cc:2802] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:33.947294  7838 raft_consensus.cc:695] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 1 LEADER]: Becoming Leader. State: Replica: cbff68cc7dbd46c3be7e5de0a9f1fdbd, State: Running, Role: LEADER
I20250901 14:18:33.947463  7836 ts_tablet_manager.cc:1428] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd: Time spent starting tablet: real 0.013s	user 0.010s	sys 0.002s
I20250901 14:18:33.947927  7838 consensus_queue.cc:237] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43815 } }
I20250901 14:18:33.953349  7680 catalog_manager.cc:5582] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd reported cstate change: term changed from 0 to 1, leader changed from <none> to cbff68cc7dbd46c3be7e5de0a9f1fdbd (127.4.231.193). New cstate: current_term: 1 leader_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cbff68cc7dbd46c3be7e5de0a9f1fdbd" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43815 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:33.998643  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:34.020867  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:34.021637  5023 tablet_replica.cc:331] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd: stopping tablet replica
I20250901 14:18:34.022220  5023 raft_consensus.cc:2241] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:34.022696  5023 raft_consensus.cc:2270] T 2948d3a73f194571a09c6539b030229a P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:34.024768  5023 tablet_replica.cc:331] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd: stopping tablet replica
I20250901 14:18:34.025192  5023 raft_consensus.cc:2241] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:34.025611  5023 raft_consensus.cc:2270] T a9993e26aa1147d6a4713abb1ee57563 P cbff68cc7dbd46c3be7e5de0a9f1fdbd [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:34.045473  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:34.056469  5023 master.cc:561] Master@127.4.231.254:38529 shutting down...
I20250901 14:18:34.071755  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:34.072227  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:34.072610  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P e79dc5c3aa0044dab88a782030e44d8f: stopping tablet replica
I20250901 14:18:34.092625  5023 master.cc:583] Master@127.4.231.254:38529 shutdown complete.
[       OK ] ClientTest.TestAsyncFlushResponseAfterSessionDropped (657 ms)
[ RUN      ] ClientTest.TestInsertDuplicateKeys
I20250901 14:18:34.113713  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:39427
I20250901 14:18:34.114704  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:34.119283  7849 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:34.120613  7850 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:34.120855  7852 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:34.121829  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:34.122737  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:34.122925  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:34.123080  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736314123062 us; error 0 us; skew 500 ppm
I20250901 14:18:34.123560  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:34.125856  5023 webserver.cc:480] Webserver started at http://127.4.231.254:39495/ using document root <none> and password file <none>
I20250901 14:18:34.126294  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:34.126463  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:34.126708  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:34.127732  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertDuplicateKeys.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "818f101197104bc1ab34130bf2d9b4d3"
format_stamp: "Formatted at 2025-09-01 14:18:34 on dist-test-slave-9gf0"
I20250901 14:18:34.131870  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.003s
I20250901 14:18:34.134949  7858 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:34.135648  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.001s	sys 0.000s
I20250901 14:18:34.135921  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertDuplicateKeys.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "818f101197104bc1ab34130bf2d9b4d3"
format_stamp: "Formatted at 2025-09-01 14:18:34 on dist-test-slave-9gf0"
I20250901 14:18:34.136165  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertDuplicateKeys.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertDuplicateKeys.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertDuplicateKeys.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:34.154783  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:34.155741  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:34.195839  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:39427
I20250901 14:18:34.195919  7919 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:39427 every 8 connection(s)
I20250901 14:18:34.199537  7920 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:34.209841  7920 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3: Bootstrap starting.
I20250901 14:18:34.213997  7920 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:34.217895  7920 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3: No bootstrap required, opened a new log
I20250901 14:18:34.219791  7920 raft_consensus.cc:357] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "818f101197104bc1ab34130bf2d9b4d3" member_type: VOTER }
I20250901 14:18:34.220181  7920 raft_consensus.cc:383] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:34.220420  7920 raft_consensus.cc:738] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 818f101197104bc1ab34130bf2d9b4d3, State: Initialized, Role: FOLLOWER
I20250901 14:18:34.220986  7920 consensus_queue.cc:260] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "818f101197104bc1ab34130bf2d9b4d3" member_type: VOTER }
I20250901 14:18:34.221593  7920 raft_consensus.cc:397] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:34.221930  7920 raft_consensus.cc:491] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:34.222288  7920 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:34.229514  7920 raft_consensus.cc:513] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "818f101197104bc1ab34130bf2d9b4d3" member_type: VOTER }
I20250901 14:18:34.230141  7920 leader_election.cc:304] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 818f101197104bc1ab34130bf2d9b4d3; no voters: 
I20250901 14:18:34.231282  7920 leader_election.cc:290] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:34.231587  7923 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:34.232940  7923 raft_consensus.cc:695] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 1 LEADER]: Becoming Leader. State: Replica: 818f101197104bc1ab34130bf2d9b4d3, State: Running, Role: LEADER
I20250901 14:18:34.233502  7923 consensus_queue.cc:237] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "818f101197104bc1ab34130bf2d9b4d3" member_type: VOTER }
I20250901 14:18:34.234189  7920 sys_catalog.cc:564] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:34.235769  7924 sys_catalog.cc:455] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "818f101197104bc1ab34130bf2d9b4d3" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "818f101197104bc1ab34130bf2d9b4d3" member_type: VOTER } }
I20250901 14:18:34.236217  7924 sys_catalog.cc:458] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:34.237946  7925 sys_catalog.cc:455] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [sys.catalog]: SysCatalogTable state changed. Reason: New leader 818f101197104bc1ab34130bf2d9b4d3. Latest consensus state: current_term: 1 leader_uuid: "818f101197104bc1ab34130bf2d9b4d3" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "818f101197104bc1ab34130bf2d9b4d3" member_type: VOTER } }
I20250901 14:18:34.238543  7925 sys_catalog.cc:458] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:34.241901  7927 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:34.247571  7927 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:34.248502  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:34.255597  7927 catalog_manager.cc:1349] Generated new cluster ID: 67161cf0233b44ab8ae696493a4000e9
I20250901 14:18:34.255885  7927 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:34.268939  7927 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:34.270226  7927 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:34.288314  7927 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3: Generated new TSK 0
I20250901 14:18:34.288924  7927 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:34.314980  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:34.320657  7941 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:34.321496  7942 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:34.323516  7944 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:34.324138  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:34.324916  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:34.325093  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:34.325212  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736314325201 us; error 0 us; skew 500 ppm
I20250901 14:18:34.325719  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:34.327808  5023 webserver.cc:480] Webserver started at http://127.4.231.193:35881/ using document root <none> and password file <none>
I20250901 14:18:34.328215  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:34.328368  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:34.328579  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:34.329638  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertDuplicateKeys.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "2330d94672e048338dae9a27055743fc"
format_stamp: "Formatted at 2025-09-01 14:18:34 on dist-test-slave-9gf0"
I20250901 14:18:34.333745  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.000s
I20250901 14:18:34.336872  7949 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:34.337689  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:34.337949  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertDuplicateKeys.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "2330d94672e048338dae9a27055743fc"
format_stamp: "Formatted at 2025-09-01 14:18:34 on dist-test-slave-9gf0"
I20250901 14:18:34.338234  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertDuplicateKeys.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertDuplicateKeys.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInsertDuplicateKeys.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:34.350736  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:34.351712  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:34.357072  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:34.357376  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.001s	sys 0.000s
I20250901 14:18:34.357722  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:34.357944  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:34.406450  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:35867
I20250901 14:18:34.406550  8019 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:35867 every 8 connection(s)
I20250901 14:18:34.411134  8020 heartbeater.cc:344] Connected to a master server at 127.4.231.254:39427
I20250901 14:18:34.411440  8020 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:34.412140  8020 heartbeater.cc:507] Master 127.4.231.254:39427 requested a full tablet report, sending...
I20250901 14:18:34.413962  7875 ts_manager.cc:194] Registered new tserver with Master: 2330d94672e048338dae9a27055743fc (127.4.231.193:35867)
I20250901 14:18:34.414355  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004830545s
I20250901 14:18:34.416136  7875 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:39398
I20250901 14:18:34.428814  8020 heartbeater.cc:499] Master 127.4.231.254:39427 was elected leader, sending a full tablet report...
I20250901 14:18:34.436185  7874 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:39416:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:34.471639  7980 tablet_service.cc:1468] Processing CreateTablet for tablet 230ab400346d469fa143a39342f16a20 (DEFAULT_TABLE table=client-testtb [id=c3e2b869fd4e49bf878708fa59b78bb4]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:34.472007  7979 tablet_service.cc:1468] Processing CreateTablet for tablet 1bd974672da94d97b6b8fa251f5a2198 (DEFAULT_TABLE table=client-testtb [id=c3e2b869fd4e49bf878708fa59b78bb4]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:34.473006  7980 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 230ab400346d469fa143a39342f16a20. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:34.473678  7979 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 1bd974672da94d97b6b8fa251f5a2198. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:34.490254  8030 tablet_bootstrap.cc:492] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc: Bootstrap starting.
I20250901 14:18:34.494158  8030 tablet_bootstrap.cc:654] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:34.498745  8030 tablet_bootstrap.cc:492] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc: No bootstrap required, opened a new log
I20250901 14:18:34.499243  8030 ts_tablet_manager.cc:1397] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc: Time spent bootstrapping tablet: real 0.009s	user 0.005s	sys 0.004s
I20250901 14:18:34.501022  8030 raft_consensus.cc:357] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.501397  8030 raft_consensus.cc:383] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:34.501632  8030 raft_consensus.cc:738] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 2330d94672e048338dae9a27055743fc, State: Initialized, Role: FOLLOWER
I20250901 14:18:34.502113  8030 consensus_queue.cc:260] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.502542  8030 raft_consensus.cc:397] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:34.502728  8030 raft_consensus.cc:491] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:34.502933  8030 raft_consensus.cc:3058] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:34.508140  8030 raft_consensus.cc:513] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.508647  8030 leader_election.cc:304] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 2330d94672e048338dae9a27055743fc; no voters: 
I20250901 14:18:34.509851  8030 leader_election.cc:290] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:34.510092  8032 raft_consensus.cc:2802] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:34.512233  8032 raft_consensus.cc:695] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 1 LEADER]: Becoming Leader. State: Replica: 2330d94672e048338dae9a27055743fc, State: Running, Role: LEADER
I20250901 14:18:34.512976  8030 ts_tablet_manager.cc:1428] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc: Time spent starting tablet: real 0.013s	user 0.009s	sys 0.003s
I20250901 14:18:34.512866  8032 consensus_queue.cc:237] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.513893  8030 tablet_bootstrap.cc:492] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc: Bootstrap starting.
I20250901 14:18:34.518208  7874 catalog_manager.cc:5582] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc reported cstate change: term changed from 0 to 1, leader changed from <none> to 2330d94672e048338dae9a27055743fc (127.4.231.193). New cstate: current_term: 1 leader_uuid: "2330d94672e048338dae9a27055743fc" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:34.519641  8030 tablet_bootstrap.cc:654] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:34.524569  8030 tablet_bootstrap.cc:492] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc: No bootstrap required, opened a new log
I20250901 14:18:34.525027  8030 ts_tablet_manager.cc:1397] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc: Time spent bootstrapping tablet: real 0.011s	user 0.009s	sys 0.000s
I20250901 14:18:34.527133  8030 raft_consensus.cc:357] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.527532  8030 raft_consensus.cc:383] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:34.527709  8030 raft_consensus.cc:738] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 2330d94672e048338dae9a27055743fc, State: Initialized, Role: FOLLOWER
I20250901 14:18:34.528185  8030 consensus_queue.cc:260] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.528615  8030 raft_consensus.cc:397] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:34.528815  8030 raft_consensus.cc:491] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:34.529013  8030 raft_consensus.cc:3058] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:34.534037  8030 raft_consensus.cc:513] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.534562  8030 leader_election.cc:304] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 2330d94672e048338dae9a27055743fc; no voters: 
I20250901 14:18:34.534998  8030 leader_election.cc:290] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:34.535197  8032 raft_consensus.cc:2802] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:34.535710  8032 raft_consensus.cc:695] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 1 LEADER]: Becoming Leader. State: Replica: 2330d94672e048338dae9a27055743fc, State: Running, Role: LEADER
I20250901 14:18:34.536448  8030 ts_tablet_manager.cc:1428] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc: Time spent starting tablet: real 0.011s	user 0.007s	sys 0.004s
I20250901 14:18:34.536338  8032 consensus_queue.cc:237] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.541596  7874 catalog_manager.cc:5582] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc reported cstate change: term changed from 0 to 1, leader changed from <none> to 2330d94672e048338dae9a27055743fc (127.4.231.193). New cstate: current_term: 1 leader_uuid: "2330d94672e048338dae9a27055743fc" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:34.557168  7875 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:39416:
name: "client-testtb2"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:34.575309  7980 tablet_service.cc:1468] Processing CreateTablet for tablet 904b7e2e9e61479a97bfe8c17f9e7594 (DEFAULT_TABLE table=client-testtb2 [id=5f3d1a55197b487ebb61f73c8184334c]), partition=RANGE (key) PARTITION UNBOUNDED
I20250901 14:18:34.576295  7980 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 904b7e2e9e61479a97bfe8c17f9e7594. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:34.588120  8030 tablet_bootstrap.cc:492] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc: Bootstrap starting.
I20250901 14:18:34.592869  8030 tablet_bootstrap.cc:654] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:34.597309  8030 tablet_bootstrap.cc:492] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc: No bootstrap required, opened a new log
I20250901 14:18:34.597730  8030 ts_tablet_manager.cc:1397] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc: Time spent bootstrapping tablet: real 0.010s	user 0.005s	sys 0.004s
I20250901 14:18:34.599699  8030 raft_consensus.cc:357] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.600136  8030 raft_consensus.cc:383] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:34.600353  8030 raft_consensus.cc:738] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 2330d94672e048338dae9a27055743fc, State: Initialized, Role: FOLLOWER
I20250901 14:18:34.600872  8030 consensus_queue.cc:260] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.601320  8030 raft_consensus.cc:397] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:34.601573  8030 raft_consensus.cc:491] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:34.601819  8030 raft_consensus.cc:3058] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:34.606513  8030 raft_consensus.cc:513] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.607040  8030 leader_election.cc:304] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 2330d94672e048338dae9a27055743fc; no voters: 
I20250901 14:18:34.607591  8030 leader_election.cc:290] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:34.607767  8032 raft_consensus.cc:2802] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:34.608242  8032 raft_consensus.cc:695] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 1 LEADER]: Becoming Leader. State: Replica: 2330d94672e048338dae9a27055743fc, State: Running, Role: LEADER
I20250901 14:18:34.608924  8032 consensus_queue.cc:237] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } }
I20250901 14:18:34.609475  8030 ts_tablet_manager.cc:1428] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc: Time spent starting tablet: real 0.012s	user 0.008s	sys 0.000s
I20250901 14:18:34.614640  7875 catalog_manager.cc:5582] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc reported cstate change: term changed from 0 to 1, leader changed from <none> to 2330d94672e048338dae9a27055743fc (127.4.231.193). New cstate: current_term: 1 leader_uuid: "2330d94672e048338dae9a27055743fc" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "2330d94672e048338dae9a27055743fc" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35867 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:34.661628  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:34.683722  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:34.684469  5023 tablet_replica.cc:331] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc: stopping tablet replica
I20250901 14:18:34.685027  5023 raft_consensus.cc:2241] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:34.685497  5023 raft_consensus.cc:2270] T 904b7e2e9e61479a97bfe8c17f9e7594 P 2330d94672e048338dae9a27055743fc [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:34.687753  5023 tablet_replica.cc:331] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc: stopping tablet replica
I20250901 14:18:34.688230  5023 raft_consensus.cc:2241] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:34.688621  5023 raft_consensus.cc:2270] T 1bd974672da94d97b6b8fa251f5a2198 P 2330d94672e048338dae9a27055743fc [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:34.690696  5023 tablet_replica.cc:331] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc: stopping tablet replica
I20250901 14:18:34.691123  5023 raft_consensus.cc:2241] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:34.691532  5023 raft_consensus.cc:2270] T 230ab400346d469fa143a39342f16a20 P 2330d94672e048338dae9a27055743fc [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:34.712800  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:34.724150  5023 master.cc:561] Master@127.4.231.254:39427 shutting down...
I20250901 14:18:34.742584  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:34.743173  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:34.743564  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 818f101197104bc1ab34130bf2d9b4d3: stopping tablet replica
I20250901 14:18:34.762197  5023 master.cc:583] Master@127.4.231.254:39427 shutdown complete.
[       OK ] ClientTest.TestInsertDuplicateKeys (670 ms)
[ RUN      ] ClientTest.TestSetSessionMutationBufferMaxNum
I20250901 14:18:34.784373  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:44555
I20250901 14:18:34.785447  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:34.790259  8045 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:34.790526  8046 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:34.791549  8048 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:34.792372  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:34.793438  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:34.793669  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:34.793817  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736314793798 us; error 0 us; skew 500 ppm
I20250901 14:18:34.794305  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:34.796484  5023 webserver.cc:480] Webserver started at http://127.4.231.254:37893/ using document root <none> and password file <none>
I20250901 14:18:34.796922  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:34.797093  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:34.797330  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:34.798374  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestSetSessionMutationBufferMaxNum.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "6aa020a7b7ec4a05a6d7196f36683265"
format_stamp: "Formatted at 2025-09-01 14:18:34 on dist-test-slave-9gf0"
I20250901 14:18:34.802500  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.004s	sys 0.001s
I20250901 14:18:34.805552  8053 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:34.806216  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:34.806499  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestSetSessionMutationBufferMaxNum.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "6aa020a7b7ec4a05a6d7196f36683265"
format_stamp: "Formatted at 2025-09-01 14:18:34 on dist-test-slave-9gf0"
I20250901 14:18:34.806751  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestSetSessionMutationBufferMaxNum.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestSetSessionMutationBufferMaxNum.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestSetSessionMutationBufferMaxNum.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:34.826333  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:34.827306  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:34.866641  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:44555
I20250901 14:18:34.866732  8114 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:44555 every 8 connection(s)
I20250901 14:18:34.870282  8115 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:34.880808  8115 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265: Bootstrap starting.
I20250901 14:18:34.884889  8115 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:34.888667  8115 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265: No bootstrap required, opened a new log
I20250901 14:18:34.890548  8115 raft_consensus.cc:357] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6aa020a7b7ec4a05a6d7196f36683265" member_type: VOTER }
I20250901 14:18:34.890918  8115 raft_consensus.cc:383] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:34.891101  8115 raft_consensus.cc:738] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 6aa020a7b7ec4a05a6d7196f36683265, State: Initialized, Role: FOLLOWER
I20250901 14:18:34.891539  8115 consensus_queue.cc:260] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6aa020a7b7ec4a05a6d7196f36683265" member_type: VOTER }
I20250901 14:18:34.891940  8115 raft_consensus.cc:397] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:34.892123  8115 raft_consensus.cc:491] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:34.892323  8115 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:34.896978  8115 raft_consensus.cc:513] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6aa020a7b7ec4a05a6d7196f36683265" member_type: VOTER }
I20250901 14:18:34.897483  8115 leader_election.cc:304] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 6aa020a7b7ec4a05a6d7196f36683265; no voters: 
I20250901 14:18:34.898619  8115 leader_election.cc:290] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:34.898950  8118 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:34.900344  8118 raft_consensus.cc:695] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 1 LEADER]: Becoming Leader. State: Replica: 6aa020a7b7ec4a05a6d7196f36683265, State: Running, Role: LEADER
I20250901 14:18:34.901085  8118 consensus_queue.cc:237] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6aa020a7b7ec4a05a6d7196f36683265" member_type: VOTER }
I20250901 14:18:34.901830  8115 sys_catalog.cc:564] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:34.903985  8119 sys_catalog.cc:455] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "6aa020a7b7ec4a05a6d7196f36683265" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6aa020a7b7ec4a05a6d7196f36683265" member_type: VOTER } }
I20250901 14:18:34.904052  8120 sys_catalog.cc:455] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [sys.catalog]: SysCatalogTable state changed. Reason: New leader 6aa020a7b7ec4a05a6d7196f36683265. Latest consensus state: current_term: 1 leader_uuid: "6aa020a7b7ec4a05a6d7196f36683265" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "6aa020a7b7ec4a05a6d7196f36683265" member_type: VOTER } }
I20250901 14:18:34.904699  8119 sys_catalog.cc:458] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:34.904788  8120 sys_catalog.cc:458] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:34.908046  8124 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:34.912470  8124 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:34.918262  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:34.920365  8124 catalog_manager.cc:1349] Generated new cluster ID: 74285eb712f043e1a58b0fa5dedf45da
I20250901 14:18:34.920651  8124 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:34.937844  8124 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:34.939116  8124 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:34.954643  8124 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265: Generated new TSK 0
I20250901 14:18:34.955273  8124 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:34.985792  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:34.991318  8136 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:34.992642  8137 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:34.994930  8139 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:34.995069  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:34.996016  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:34.996194  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:34.996313  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736314996303 us; error 0 us; skew 500 ppm
I20250901 14:18:34.996783  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:34.998967  5023 webserver.cc:480] Webserver started at http://127.4.231.193:35259/ using document root <none> and password file <none>
I20250901 14:18:34.999369  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:34.999521  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:34.999727  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:35.000751  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestSetSessionMutationBufferMaxNum.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "381dbe2f496f4649b0b43e131ace72c7"
format_stamp: "Formatted at 2025-09-01 14:18:34 on dist-test-slave-9gf0"
I20250901 14:18:35.005236  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.002s	sys 0.004s
I20250901 14:18:35.008428  8144 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:35.009140  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:35.009377  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestSetSessionMutationBufferMaxNum.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "381dbe2f496f4649b0b43e131ace72c7"
format_stamp: "Formatted at 2025-09-01 14:18:34 on dist-test-slave-9gf0"
I20250901 14:18:35.009693  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestSetSessionMutationBufferMaxNum.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestSetSessionMutationBufferMaxNum.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestSetSessionMutationBufferMaxNum.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:35.020797  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:35.021814  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:35.026398  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:35.026777  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.001s	sys 0.000s
I20250901 14:18:35.027138  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:35.027391  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:35.076165  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:45565
I20250901 14:18:35.076264  8214 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:45565 every 8 connection(s)
I20250901 14:18:35.080736  8215 heartbeater.cc:344] Connected to a master server at 127.4.231.254:44555
I20250901 14:18:35.081136  8215 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:35.081918  8215 heartbeater.cc:507] Master 127.4.231.254:44555 requested a full tablet report, sending...
I20250901 14:18:35.083774  8070 ts_manager.cc:194] Registered new tserver with Master: 381dbe2f496f4649b0b43e131ace72c7 (127.4.231.193:45565)
I20250901 14:18:35.084231  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004934178s
I20250901 14:18:35.085454  8070 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:53734
I20250901 14:18:35.098155  8215 heartbeater.cc:499] Master 127.4.231.254:44555 was elected leader, sending a full tablet report...
I20250901 14:18:35.105152  8069 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:53746:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:35.136019  8180 tablet_service.cc:1468] Processing CreateTablet for tablet e4cfab4c3c084bf688d7c8da90c7bafa (DEFAULT_TABLE table=client-testtb [id=d86cde04345f4fbe998185d4efcaff50]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:35.136325  8179 tablet_service.cc:1468] Processing CreateTablet for tablet 88dbceeada6145cb852712935198e9bf (DEFAULT_TABLE table=client-testtb [id=d86cde04345f4fbe998185d4efcaff50]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:35.137310  8180 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet e4cfab4c3c084bf688d7c8da90c7bafa. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:35.137872  8179 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 88dbceeada6145cb852712935198e9bf. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:35.150003  8225 tablet_bootstrap.cc:492] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7: Bootstrap starting.
I20250901 14:18:35.154788  8225 tablet_bootstrap.cc:654] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:35.158607  8225 tablet_bootstrap.cc:492] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7: No bootstrap required, opened a new log
I20250901 14:18:35.158957  8225 ts_tablet_manager.cc:1397] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7: Time spent bootstrapping tablet: real 0.009s	user 0.004s	sys 0.003s
I20250901 14:18:35.160758  8225 raft_consensus.cc:357] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "381dbe2f496f4649b0b43e131ace72c7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45565 } }
I20250901 14:18:35.161135  8225 raft_consensus.cc:383] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:35.161346  8225 raft_consensus.cc:738] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 381dbe2f496f4649b0b43e131ace72c7, State: Initialized, Role: FOLLOWER
I20250901 14:18:35.161880  8225 consensus_queue.cc:260] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "381dbe2f496f4649b0b43e131ace72c7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45565 } }
I20250901 14:18:35.162297  8225 raft_consensus.cc:397] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:35.162534  8225 raft_consensus.cc:491] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:35.162775  8225 raft_consensus.cc:3058] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:35.167749  8225 raft_consensus.cc:513] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "381dbe2f496f4649b0b43e131ace72c7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45565 } }
I20250901 14:18:35.168316  8225 leader_election.cc:304] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 381dbe2f496f4649b0b43e131ace72c7; no voters: 
I20250901 14:18:35.169445  8225 leader_election.cc:290] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:35.169808  8227 raft_consensus.cc:2802] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:35.171203  8227 raft_consensus.cc:695] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 1 LEADER]: Becoming Leader. State: Replica: 381dbe2f496f4649b0b43e131ace72c7, State: Running, Role: LEADER
I20250901 14:18:35.171694  8225 ts_tablet_manager.cc:1428] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7: Time spent starting tablet: real 0.012s	user 0.009s	sys 0.004s
I20250901 14:18:35.171905  8227 consensus_queue.cc:237] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "381dbe2f496f4649b0b43e131ace72c7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45565 } }
I20250901 14:18:35.172402  8225 tablet_bootstrap.cc:492] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7: Bootstrap starting.
I20250901 14:18:35.178542  8225 tablet_bootstrap.cc:654] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:35.178591  8070 catalog_manager.cc:5582] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 reported cstate change: term changed from 0 to 1, leader changed from <none> to 381dbe2f496f4649b0b43e131ace72c7 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "381dbe2f496f4649b0b43e131ace72c7" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "381dbe2f496f4649b0b43e131ace72c7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45565 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:35.184041  8225 tablet_bootstrap.cc:492] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7: No bootstrap required, opened a new log
I20250901 14:18:35.184423  8225 ts_tablet_manager.cc:1397] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7: Time spent bootstrapping tablet: real 0.012s	user 0.010s	sys 0.000s
I20250901 14:18:35.186587  8225 raft_consensus.cc:357] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "381dbe2f496f4649b0b43e131ace72c7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45565 } }
I20250901 14:18:35.187117  8225 raft_consensus.cc:383] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:35.187366  8225 raft_consensus.cc:738] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 381dbe2f496f4649b0b43e131ace72c7, State: Initialized, Role: FOLLOWER
I20250901 14:18:35.187950  8225 consensus_queue.cc:260] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "381dbe2f496f4649b0b43e131ace72c7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45565 } }
I20250901 14:18:35.188374  8225 raft_consensus.cc:397] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:35.188587  8225 raft_consensus.cc:491] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:35.188824  8225 raft_consensus.cc:3058] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:35.194780  8225 raft_consensus.cc:513] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "381dbe2f496f4649b0b43e131ace72c7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45565 } }
I20250901 14:18:35.195300  8225 leader_election.cc:304] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 381dbe2f496f4649b0b43e131ace72c7; no voters: 
I20250901 14:18:35.195729  8225 leader_election.cc:290] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:35.195919  8227 raft_consensus.cc:2802] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:35.196365  8227 raft_consensus.cc:695] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 1 LEADER]: Becoming Leader. State: Replica: 381dbe2f496f4649b0b43e131ace72c7, State: Running, Role: LEADER
I20250901 14:18:35.197068  8227 consensus_queue.cc:237] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "381dbe2f496f4649b0b43e131ace72c7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45565 } }
I20250901 14:18:35.197510  8225 ts_tablet_manager.cc:1428] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7: Time spent starting tablet: real 0.013s	user 0.011s	sys 0.000s
I20250901 14:18:35.202790  8070 catalog_manager.cc:5582] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 reported cstate change: term changed from 0 to 1, leader changed from <none> to 381dbe2f496f4649b0b43e131ace72c7 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "381dbe2f496f4649b0b43e131ace72c7" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "381dbe2f496f4649b0b43e131ace72c7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 45565 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:35.249362  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:35.264799  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:35.265553  5023 tablet_replica.cc:331] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7: stopping tablet replica
I20250901 14:18:35.266120  5023 raft_consensus.cc:2241] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:35.266534  5023 raft_consensus.cc:2270] T 88dbceeada6145cb852712935198e9bf P 381dbe2f496f4649b0b43e131ace72c7 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:35.268714  5023 tablet_replica.cc:331] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7: stopping tablet replica
I20250901 14:18:35.269160  5023 raft_consensus.cc:2241] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:35.269629  5023 raft_consensus.cc:2270] T e4cfab4c3c084bf688d7c8da90c7bafa P 381dbe2f496f4649b0b43e131ace72c7 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:35.289322  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:35.299599  5023 master.cc:561] Master@127.4.231.254:44555 shutting down...
I20250901 14:18:35.315975  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:35.316490  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:35.316946  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 6aa020a7b7ec4a05a6d7196f36683265: stopping tablet replica
I20250901 14:18:35.326571  5023 master.cc:583] Master@127.4.231.254:44555 shutdown complete.
[       OK ] ClientTest.TestSetSessionMutationBufferMaxNum (562 ms)
[ RUN      ] ClientTest.TestAutoFlushBackgroundRandomOps
I20250901 14:18:35.346673  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:40155
I20250901 14:18:35.347676  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:35.352574  8236 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:35.354110  8239 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:35.354029  8237 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:35.354485  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:35.355619  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:35.355818  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:35.355966  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736315355949 us; error 0 us; skew 500 ppm
I20250901 14:18:35.356434  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:35.358649  5023 webserver.cc:480] Webserver started at http://127.4.231.254:46787/ using document root <none> and password file <none>
I20250901 14:18:35.359097  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:35.359272  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:35.359516  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:35.360504  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAutoFlushBackgroundRandomOps.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "ff44bfeb520d4206aff63a42d484e0a6"
format_stamp: "Formatted at 2025-09-01 14:18:35 on dist-test-slave-9gf0"
I20250901 14:18:35.364516  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:35.367614  8244 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:35.368379  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:35.368634  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAutoFlushBackgroundRandomOps.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "ff44bfeb520d4206aff63a42d484e0a6"
format_stamp: "Formatted at 2025-09-01 14:18:35 on dist-test-slave-9gf0"
I20250901 14:18:35.368870  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAutoFlushBackgroundRandomOps.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAutoFlushBackgroundRandomOps.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAutoFlushBackgroundRandomOps.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:35.384407  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:35.385353  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:35.424556  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:40155
I20250901 14:18:35.424643  8305 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:40155 every 8 connection(s)
I20250901 14:18:35.428275  8306 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:35.438437  8306 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6: Bootstrap starting.
I20250901 14:18:35.442508  8306 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:35.446264  8306 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6: No bootstrap required, opened a new log
I20250901 14:18:35.448141  8306 raft_consensus.cc:357] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ff44bfeb520d4206aff63a42d484e0a6" member_type: VOTER }
I20250901 14:18:35.448511  8306 raft_consensus.cc:383] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:35.448731  8306 raft_consensus.cc:738] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: ff44bfeb520d4206aff63a42d484e0a6, State: Initialized, Role: FOLLOWER
I20250901 14:18:35.449290  8306 consensus_queue.cc:260] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ff44bfeb520d4206aff63a42d484e0a6" member_type: VOTER }
I20250901 14:18:35.449761  8306 raft_consensus.cc:397] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:35.449975  8306 raft_consensus.cc:491] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:35.450207  8306 raft_consensus.cc:3058] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:35.454641  8306 raft_consensus.cc:513] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ff44bfeb520d4206aff63a42d484e0a6" member_type: VOTER }
I20250901 14:18:35.455169  8306 leader_election.cc:304] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: ff44bfeb520d4206aff63a42d484e0a6; no voters: 
I20250901 14:18:35.456179  8306 leader_election.cc:290] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:35.456535  8309 raft_consensus.cc:2802] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:35.457846  8309 raft_consensus.cc:695] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 1 LEADER]: Becoming Leader. State: Replica: ff44bfeb520d4206aff63a42d484e0a6, State: Running, Role: LEADER
I20250901 14:18:35.458535  8309 consensus_queue.cc:237] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ff44bfeb520d4206aff63a42d484e0a6" member_type: VOTER }
I20250901 14:18:35.459096  8306 sys_catalog.cc:564] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:35.461102  8311 sys_catalog.cc:455] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [sys.catalog]: SysCatalogTable state changed. Reason: New leader ff44bfeb520d4206aff63a42d484e0a6. Latest consensus state: current_term: 1 leader_uuid: "ff44bfeb520d4206aff63a42d484e0a6" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ff44bfeb520d4206aff63a42d484e0a6" member_type: VOTER } }
I20250901 14:18:35.461068  8310 sys_catalog.cc:455] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "ff44bfeb520d4206aff63a42d484e0a6" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ff44bfeb520d4206aff63a42d484e0a6" member_type: VOTER } }
I20250901 14:18:35.461712  8311 sys_catalog.cc:458] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:35.461908  8310 sys_catalog.cc:458] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:35.465492  8314 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:35.473026  8314 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:35.474047  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:35.482270  8314 catalog_manager.cc:1349] Generated new cluster ID: 954a9cdfc15949bc96c7e7decb29eed6
I20250901 14:18:35.482653  8314 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:35.501907  8314 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:35.503145  8314 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:35.517576  8314 catalog_manager.cc:5955] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6: Generated new TSK 0
I20250901 14:18:35.518129  8314 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:35.540838  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:35.546584  8327 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:35.547806  8328 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:35.548983  8330 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:35.550151  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:35.550873  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:35.551057  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:35.551209  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736315551193 us; error 0 us; skew 500 ppm
I20250901 14:18:35.551743  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:35.553961  5023 webserver.cc:480] Webserver started at http://127.4.231.193:44839/ using document root <none> and password file <none>
I20250901 14:18:35.554388  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:35.554569  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:35.554802  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:35.555850  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAutoFlushBackgroundRandomOps.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "40a721eb59fa46d69231fc996ad163af"
format_stamp: "Formatted at 2025-09-01 14:18:35 on dist-test-slave-9gf0"
I20250901 14:18:35.561399  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.005s	user 0.003s	sys 0.004s
I20250901 14:18:35.564498  8335 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:35.565140  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:35.565419  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAutoFlushBackgroundRandomOps.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "40a721eb59fa46d69231fc996ad163af"
format_stamp: "Formatted at 2025-09-01 14:18:35 on dist-test-slave-9gf0"
I20250901 14:18:35.565729  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAutoFlushBackgroundRandomOps.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAutoFlushBackgroundRandomOps.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAutoFlushBackgroundRandomOps.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:35.579826  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:35.580894  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:35.585389  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:35.585716  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.001s	sys 0.000s
I20250901 14:18:35.586009  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:35.586223  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:35.634332  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:38889
I20250901 14:18:35.634424  8405 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:38889 every 8 connection(s)
I20250901 14:18:35.638674  8406 heartbeater.cc:344] Connected to a master server at 127.4.231.254:40155
I20250901 14:18:35.639008  8406 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:35.639668  8406 heartbeater.cc:507] Master 127.4.231.254:40155 requested a full tablet report, sending...
I20250901 14:18:35.641448  8261 ts_manager.cc:194] Registered new tserver with Master: 40a721eb59fa46d69231fc996ad163af (127.4.231.193:38889)
I20250901 14:18:35.642127  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.0048193s
I20250901 14:18:35.643603  8261 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:58892
I20250901 14:18:35.656215  8406 heartbeater.cc:499] Master 127.4.231.254:40155 was elected leader, sending a full tablet report...
I20250901 14:18:35.662907  8260 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:58910:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:35.695654  8371 tablet_service.cc:1468] Processing CreateTablet for tablet ff4d7284147e4cdb9488e724a4bf9d81 (DEFAULT_TABLE table=client-testtb [id=ffb26ac294a8423d924597b0a2749d96]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:35.695811  8370 tablet_service.cc:1468] Processing CreateTablet for tablet 10aa1c1040634791ab24b467b4807dac (DEFAULT_TABLE table=client-testtb [id=ffb26ac294a8423d924597b0a2749d96]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:35.696710  8371 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet ff4d7284147e4cdb9488e724a4bf9d81. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:35.697237  8370 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 10aa1c1040634791ab24b467b4807dac. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:35.710353  8416 tablet_bootstrap.cc:492] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af: Bootstrap starting.
I20250901 14:18:35.715631  8416 tablet_bootstrap.cc:654] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:35.719430  8416 tablet_bootstrap.cc:492] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af: No bootstrap required, opened a new log
I20250901 14:18:35.719817  8416 ts_tablet_manager.cc:1397] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af: Time spent bootstrapping tablet: real 0.010s	user 0.005s	sys 0.003s
I20250901 14:18:35.721859  8416 raft_consensus.cc:357] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "40a721eb59fa46d69231fc996ad163af" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38889 } }
I20250901 14:18:35.722280  8416 raft_consensus.cc:383] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:35.722513  8416 raft_consensus.cc:738] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 40a721eb59fa46d69231fc996ad163af, State: Initialized, Role: FOLLOWER
I20250901 14:18:35.723019  8416 consensus_queue.cc:260] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "40a721eb59fa46d69231fc996ad163af" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38889 } }
I20250901 14:18:35.723479  8416 raft_consensus.cc:397] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:35.723702  8416 raft_consensus.cc:491] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:35.723948  8416 raft_consensus.cc:3058] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:35.729686  8416 raft_consensus.cc:513] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "40a721eb59fa46d69231fc996ad163af" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38889 } }
I20250901 14:18:35.730284  8416 leader_election.cc:304] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 40a721eb59fa46d69231fc996ad163af; no voters: 
I20250901 14:18:35.731566  8416 leader_election.cc:290] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:35.731837  8418 raft_consensus.cc:2802] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:35.733803  8416 ts_tablet_manager.cc:1428] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af: Time spent starting tablet: real 0.014s	user 0.007s	sys 0.009s
I20250901 14:18:35.733780  8418 raft_consensus.cc:695] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 1 LEADER]: Becoming Leader. State: Replica: 40a721eb59fa46d69231fc996ad163af, State: Running, Role: LEADER
I20250901 14:18:35.734699  8416 tablet_bootstrap.cc:492] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af: Bootstrap starting.
I20250901 14:18:35.734457  8418 consensus_queue.cc:237] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "40a721eb59fa46d69231fc996ad163af" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38889 } }
I20250901 14:18:35.740217  8416 tablet_bootstrap.cc:654] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:35.740345  8261 catalog_manager.cc:5582] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af reported cstate change: term changed from 0 to 1, leader changed from <none> to 40a721eb59fa46d69231fc996ad163af (127.4.231.193). New cstate: current_term: 1 leader_uuid: "40a721eb59fa46d69231fc996ad163af" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "40a721eb59fa46d69231fc996ad163af" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38889 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:35.744606  8416 tablet_bootstrap.cc:492] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af: No bootstrap required, opened a new log
I20250901 14:18:35.745036  8416 ts_tablet_manager.cc:1397] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af: Time spent bootstrapping tablet: real 0.011s	user 0.005s	sys 0.003s
I20250901 14:18:35.747220  8416 raft_consensus.cc:357] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "40a721eb59fa46d69231fc996ad163af" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38889 } }
I20250901 14:18:35.747604  8416 raft_consensus.cc:383] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:35.747831  8416 raft_consensus.cc:738] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 40a721eb59fa46d69231fc996ad163af, State: Initialized, Role: FOLLOWER
I20250901 14:18:35.748317  8416 consensus_queue.cc:260] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "40a721eb59fa46d69231fc996ad163af" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38889 } }
I20250901 14:18:35.748762  8416 raft_consensus.cc:397] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:35.748986  8416 raft_consensus.cc:491] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:35.749212  8416 raft_consensus.cc:3058] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:35.754046  8416 raft_consensus.cc:513] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "40a721eb59fa46d69231fc996ad163af" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38889 } }
I20250901 14:18:35.754616  8416 leader_election.cc:304] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 40a721eb59fa46d69231fc996ad163af; no voters: 
I20250901 14:18:35.755093  8416 leader_election.cc:290] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:35.755250  8418 raft_consensus.cc:2802] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:35.755713  8418 raft_consensus.cc:695] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 1 LEADER]: Becoming Leader. State: Replica: 40a721eb59fa46d69231fc996ad163af, State: Running, Role: LEADER
I20250901 14:18:35.756379  8418 consensus_queue.cc:237] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "40a721eb59fa46d69231fc996ad163af" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38889 } }
I20250901 14:18:35.757077  8416 ts_tablet_manager.cc:1428] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af: Time spent starting tablet: real 0.012s	user 0.012s	sys 0.000s
I20250901 14:18:35.761926  8261 catalog_manager.cc:5582] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af reported cstate change: term changed from 0 to 1, leader changed from <none> to 40a721eb59fa46d69231fc996ad163af (127.4.231.193). New cstate: current_term: 1 leader_uuid: "40a721eb59fa46d69231fc996ad163af" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "40a721eb59fa46d69231fc996ad163af" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38889 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:35.778041  5023 test_util.cc:276] Using random seed: 202433492
I20250901 14:18:37.935593  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:37.965736  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:37.966423  5023 tablet_replica.cc:331] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af: stopping tablet replica
I20250901 14:18:37.967187  5023 raft_consensus.cc:2241] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:37.967861  5023 raft_consensus.cc:2270] T ff4d7284147e4cdb9488e724a4bf9d81 P 40a721eb59fa46d69231fc996ad163af [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:37.970494  5023 tablet_replica.cc:331] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af: stopping tablet replica
I20250901 14:18:37.971297  5023 raft_consensus.cc:2241] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:37.971864  5023 raft_consensus.cc:2270] T 10aa1c1040634791ab24b467b4807dac P 40a721eb59fa46d69231fc996ad163af [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:37.999799  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:38.018848  5023 master.cc:561] Master@127.4.231.254:40155 shutting down...
I20250901 14:18:38.044901  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:38.045595  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:38.045998  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P ff44bfeb520d4206aff63a42d484e0a6: stopping tablet replica
I20250901 14:18:38.066809  5023 master.cc:583] Master@127.4.231.254:40155 shutdown complete.
[       OK ] ClientTest.TestAutoFlushBackgroundRandomOps (2748 ms)
[ RUN      ] ClientTest.TestWriteWithBadColumn
I20250901 14:18:38.097393  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:33343
I20250901 14:18:38.098851  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:38.104547  8431 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:38.105104  8432 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:38.108186  8435 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:38.108701  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:38.109691  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:38.109941  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:38.110121  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736318110104 us; error 0 us; skew 500 ppm
I20250901 14:18:38.110808  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:38.113626  5023 webserver.cc:480] Webserver started at http://127.4.231.254:39901/ using document root <none> and password file <none>
I20250901 14:18:38.114231  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:38.114459  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:38.114754  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:38.116192  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestWriteWithBadColumn.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "8203d750381e4cf98483514847f88a42"
format_stamp: "Formatted at 2025-09-01 14:18:38 on dist-test-slave-9gf0"
I20250901 14:18:38.122166  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.005s	user 0.007s	sys 0.000s
I20250901 14:18:38.126492  8439 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:38.127348  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.004s	sys 0.000s
I20250901 14:18:38.127619  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestWriteWithBadColumn.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "8203d750381e4cf98483514847f88a42"
format_stamp: "Formatted at 2025-09-01 14:18:38 on dist-test-slave-9gf0"
I20250901 14:18:38.127899  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestWriteWithBadColumn.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestWriteWithBadColumn.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestWriteWithBadColumn.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:38.138723  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:38.139698  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:38.199787  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:33343
I20250901 14:18:38.199874  8500 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:33343 every 8 connection(s)
I20250901 14:18:38.204362  8501 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:38.220346  8501 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42: Bootstrap starting.
I20250901 14:18:38.226377  8501 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:38.232434  8501 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42: No bootstrap required, opened a new log
I20250901 14:18:38.235175  8501 raft_consensus.cc:357] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8203d750381e4cf98483514847f88a42" member_type: VOTER }
I20250901 14:18:38.235697  8501 raft_consensus.cc:383] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:38.235988  8501 raft_consensus.cc:738] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8203d750381e4cf98483514847f88a42, State: Initialized, Role: FOLLOWER
I20250901 14:18:38.236646  8501 consensus_queue.cc:260] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8203d750381e4cf98483514847f88a42" member_type: VOTER }
I20250901 14:18:38.237262  8501 raft_consensus.cc:397] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:38.237582  8501 raft_consensus.cc:491] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:38.237906  8501 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:38.277415  8501 raft_consensus.cc:513] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8203d750381e4cf98483514847f88a42" member_type: VOTER }
I20250901 14:18:38.278260  8501 leader_election.cc:304] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 8203d750381e4cf98483514847f88a42; no voters: 
I20250901 14:18:38.279690  8501 leader_election.cc:290] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:38.280089  8504 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:38.282711  8504 raft_consensus.cc:695] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 1 LEADER]: Becoming Leader. State: Replica: 8203d750381e4cf98483514847f88a42, State: Running, Role: LEADER
I20250901 14:18:38.283402  8504 consensus_queue.cc:237] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8203d750381e4cf98483514847f88a42" member_type: VOTER }
I20250901 14:18:38.283844  8501 sys_catalog.cc:564] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:38.286545  8506 sys_catalog.cc:455] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [sys.catalog]: SysCatalogTable state changed. Reason: New leader 8203d750381e4cf98483514847f88a42. Latest consensus state: current_term: 1 leader_uuid: "8203d750381e4cf98483514847f88a42" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8203d750381e4cf98483514847f88a42" member_type: VOTER } }
I20250901 14:18:38.287153  8506 sys_catalog.cc:458] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:38.286571  8505 sys_catalog.cc:455] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "8203d750381e4cf98483514847f88a42" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8203d750381e4cf98483514847f88a42" member_type: VOTER } }
I20250901 14:18:38.288187  8505 sys_catalog.cc:458] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:38.290799  8509 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:38.296617  8509 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:38.301780  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:38.306169  8509 catalog_manager.cc:1349] Generated new cluster ID: 36c53e6498804eafbd24d6665562cb69
I20250901 14:18:38.306480  8509 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:38.335227  8509 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:38.336526  8509 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:38.357630  8509 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42: Generated new TSK 0
I20250901 14:18:38.358248  8509 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:38.369729  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:38.376546  8522 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:38.377504  8523 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:38.382750  8525 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:38.382865  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:38.383941  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:38.384131  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:38.384294  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736318384275 us; error 0 us; skew 500 ppm
I20250901 14:18:38.384810  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:38.387413  5023 webserver.cc:480] Webserver started at http://127.4.231.193:36505/ using document root <none> and password file <none>
I20250901 14:18:38.388008  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:38.388240  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:38.388571  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:38.389999  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestWriteWithBadColumn.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "bd953893ebad476c972c0d4f4f830037"
format_stamp: "Formatted at 2025-09-01 14:18:38 on dist-test-slave-9gf0"
I20250901 14:18:38.395639  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.005s	user 0.005s	sys 0.000s
I20250901 14:18:38.399744  8530 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:38.400569  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:38.400846  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestWriteWithBadColumn.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "bd953893ebad476c972c0d4f4f830037"
format_stamp: "Formatted at 2025-09-01 14:18:38 on dist-test-slave-9gf0"
I20250901 14:18:38.401100  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestWriteWithBadColumn.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestWriteWithBadColumn.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestWriteWithBadColumn.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:38.418776  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:38.419767  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:38.437737  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:38.438078  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:38.438387  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:38.438597  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:38.502743  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:39093
I20250901 14:18:38.502848  8600 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:39093 every 8 connection(s)
I20250901 14:18:38.508553  8601 heartbeater.cc:344] Connected to a master server at 127.4.231.254:33343
I20250901 14:18:38.508934  8601 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:38.509739  8601 heartbeater.cc:507] Master 127.4.231.254:33343 requested a full tablet report, sending...
I20250901 14:18:38.511648  8456 ts_manager.cc:194] Registered new tserver with Master: bd953893ebad476c972c0d4f4f830037 (127.4.231.193:39093)
I20250901 14:18:38.511909  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004927316s
I20250901 14:18:38.513278  8456 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:54890
I20250901 14:18:38.525888  8601 heartbeater.cc:499] Master 127.4.231.254:33343 was elected leader, sending a full tablet report...
I20250901 14:18:38.537055  8456 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:54910:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:38.574669  8566 tablet_service.cc:1468] Processing CreateTablet for tablet a528d82174d74d0ea13ba0f0d276f81c (DEFAULT_TABLE table=client-testtb [id=67bd543ec1e74d559938db8996c92be6]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:38.574865  8565 tablet_service.cc:1468] Processing CreateTablet for tablet 1cea51c4d9334e50a4a3806b1f58d283 (DEFAULT_TABLE table=client-testtb [id=67bd543ec1e74d559938db8996c92be6]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:38.576119  8565 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 1cea51c4d9334e50a4a3806b1f58d283. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:38.576822  8566 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet a528d82174d74d0ea13ba0f0d276f81c. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:38.598712  8611 tablet_bootstrap.cc:492] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037: Bootstrap starting.
I20250901 14:18:38.604490  8611 tablet_bootstrap.cc:654] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:38.610251  8611 tablet_bootstrap.cc:492] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037: No bootstrap required, opened a new log
I20250901 14:18:38.610716  8611 ts_tablet_manager.cc:1397] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037: Time spent bootstrapping tablet: real 0.012s	user 0.010s	sys 0.000s
I20250901 14:18:38.612816  8611 raft_consensus.cc:357] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd953893ebad476c972c0d4f4f830037" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 39093 } }
I20250901 14:18:38.613245  8611 raft_consensus.cc:383] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:38.613473  8611 raft_consensus.cc:738] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: bd953893ebad476c972c0d4f4f830037, State: Initialized, Role: FOLLOWER
I20250901 14:18:38.614023  8611 consensus_queue.cc:260] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd953893ebad476c972c0d4f4f830037" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 39093 } }
I20250901 14:18:38.614472  8611 raft_consensus.cc:397] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:38.614684  8611 raft_consensus.cc:491] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:38.614934  8611 raft_consensus.cc:3058] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:38.688592  8611 raft_consensus.cc:513] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd953893ebad476c972c0d4f4f830037" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 39093 } }
I20250901 14:18:38.689478  8611 leader_election.cc:304] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: bd953893ebad476c972c0d4f4f830037; no voters: 
I20250901 14:18:38.695127  8611 leader_election.cc:290] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:38.695688  8613 raft_consensus.cc:2802] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:38.700338  8613 raft_consensus.cc:695] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 1 LEADER]: Becoming Leader. State: Replica: bd953893ebad476c972c0d4f4f830037, State: Running, Role: LEADER
I20250901 14:18:38.700718  8611 ts_tablet_manager.cc:1428] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037: Time spent starting tablet: real 0.090s	user 0.011s	sys 0.009s
I20250901 14:18:38.701504  8611 tablet_bootstrap.cc:492] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037: Bootstrap starting.
I20250901 14:18:38.701122  8613 consensus_queue.cc:237] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd953893ebad476c972c0d4f4f830037" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 39093 } }
I20250901 14:18:38.708264  8611 tablet_bootstrap.cc:654] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:38.707930  8456 catalog_manager.cc:5582] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 reported cstate change: term changed from 0 to 1, leader changed from <none> to bd953893ebad476c972c0d4f4f830037 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "bd953893ebad476c972c0d4f4f830037" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd953893ebad476c972c0d4f4f830037" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 39093 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:38.717855  8611 tablet_bootstrap.cc:492] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037: No bootstrap required, opened a new log
I20250901 14:18:38.718331  8611 ts_tablet_manager.cc:1397] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037: Time spent bootstrapping tablet: real 0.017s	user 0.014s	sys 0.000s
I20250901 14:18:38.720870  8611 raft_consensus.cc:357] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd953893ebad476c972c0d4f4f830037" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 39093 } }
I20250901 14:18:38.721433  8611 raft_consensus.cc:383] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:38.721796  8611 raft_consensus.cc:738] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: bd953893ebad476c972c0d4f4f830037, State: Initialized, Role: FOLLOWER
I20250901 14:18:38.722689  8611 consensus_queue.cc:260] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd953893ebad476c972c0d4f4f830037" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 39093 } }
I20250901 14:18:38.723579  8611 raft_consensus.cc:397] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:38.723875  8611 raft_consensus.cc:491] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:38.724189  8611 raft_consensus.cc:3058] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:38.795073  8611 raft_consensus.cc:513] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd953893ebad476c972c0d4f4f830037" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 39093 } }
I20250901 14:18:38.795764  8611 leader_election.cc:304] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: bd953893ebad476c972c0d4f4f830037; no voters: 
I20250901 14:18:38.796279  8611 leader_election.cc:290] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:38.796437  8615 raft_consensus.cc:2802] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:38.797467  8615 raft_consensus.cc:695] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 1 LEADER]: Becoming Leader. State: Replica: bd953893ebad476c972c0d4f4f830037, State: Running, Role: LEADER
I20250901 14:18:38.798118  8611 ts_tablet_manager.cc:1428] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037: Time spent starting tablet: real 0.079s	user 0.013s	sys 0.003s
I20250901 14:18:38.798316  8615 consensus_queue.cc:237] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd953893ebad476c972c0d4f4f830037" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 39093 } }
I20250901 14:18:38.805585  8456 catalog_manager.cc:5582] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 reported cstate change: term changed from 0 to 1, leader changed from <none> to bd953893ebad476c972c0d4f4f830037 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "bd953893ebad476c972c0d4f4f830037" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "bd953893ebad476c972c0d4f4f830037" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 39093 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:38.825912  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:38.848512  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:38.849227  5023 tablet_replica.cc:331] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037: stopping tablet replica
I20250901 14:18:38.849884  5023 raft_consensus.cc:2241] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:38.850344  5023 raft_consensus.cc:2270] T a528d82174d74d0ea13ba0f0d276f81c P bd953893ebad476c972c0d4f4f830037 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:38.852636  5023 tablet_replica.cc:331] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037: stopping tablet replica
I20250901 14:18:38.853098  5023 raft_consensus.cc:2241] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:38.853502  5023 raft_consensus.cc:2270] T 1cea51c4d9334e50a4a3806b1f58d283 P bd953893ebad476c972c0d4f4f830037 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:38.874419  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:38.885407  5023 master.cc:561] Master@127.4.231.254:33343 shutting down...
I20250901 14:18:38.906595  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:38.907152  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:38.907445  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 8203d750381e4cf98483514847f88a42: stopping tablet replica
I20250901 14:18:38.927244  5023 master.cc:583] Master@127.4.231.254:33343 shutdown complete.
[       OK ] ClientTest.TestWriteWithBadColumn (853 ms)
[ RUN      ] ClientTest.TestCompactionOfSoftDeletedAndRecalledTable
I20250901 14:18:38.948993  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:38409
I20250901 14:18:38.950069  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:38.955272  8620 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:38.955521  8621 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:38.958748  8623 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:38.959358  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:38.960359  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:38.960567  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:38.960721  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736318960703 us; error 0 us; skew 500 ppm
I20250901 14:18:38.961215  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:38.965108  5023 webserver.cc:480] Webserver started at http://127.4.231.254:46729/ using document root <none> and password file <none>
I20250901 14:18:38.965623  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:38.965799  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:38.966048  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:38.967090  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCompactionOfSoftDeletedAndRecalledTable.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "7460618b2360411ebca2aff387330c24"
format_stamp: "Formatted at 2025-09-01 14:18:38 on dist-test-slave-9gf0"
I20250901 14:18:38.971487  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:38.974854  8628 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:38.975641  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:38.976008  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCompactionOfSoftDeletedAndRecalledTable.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "7460618b2360411ebca2aff387330c24"
format_stamp: "Formatted at 2025-09-01 14:18:38 on dist-test-slave-9gf0"
I20250901 14:18:38.976385  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCompactionOfSoftDeletedAndRecalledTable.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCompactionOfSoftDeletedAndRecalledTable.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCompactionOfSoftDeletedAndRecalledTable.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:39.021291  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:39.022655  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:39.074956  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:38409
I20250901 14:18:39.075125  8690 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:38409 every 8 connection(s)
I20250901 14:18:39.079926  8691 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:39.096342  8691 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24: Bootstrap starting.
I20250901 14:18:39.101835  8691 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:39.106796  8691 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24: No bootstrap required, opened a new log
I20250901 14:18:39.109027  8691 raft_consensus.cc:357] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7460618b2360411ebca2aff387330c24" member_type: VOTER }
I20250901 14:18:39.109405  8691 raft_consensus.cc:383] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:39.109680  8691 raft_consensus.cc:738] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 7460618b2360411ebca2aff387330c24, State: Initialized, Role: FOLLOWER
I20250901 14:18:39.110284  8691 consensus_queue.cc:260] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7460618b2360411ebca2aff387330c24" member_type: VOTER }
I20250901 14:18:39.110774  8691 raft_consensus.cc:397] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:39.110987  8691 raft_consensus.cc:491] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:39.111274  8691 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:39.201000  8691 raft_consensus.cc:513] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7460618b2360411ebca2aff387330c24" member_type: VOTER }
I20250901 14:18:39.201876  8691 leader_election.cc:304] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 7460618b2360411ebca2aff387330c24; no voters: 
I20250901 14:18:39.206034  8691 leader_election.cc:290] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:39.206482  8694 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:39.208169  8694 raft_consensus.cc:695] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 1 LEADER]: Becoming Leader. State: Replica: 7460618b2360411ebca2aff387330c24, State: Running, Role: LEADER
I20250901 14:18:39.208930  8694 consensus_queue.cc:237] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7460618b2360411ebca2aff387330c24" member_type: VOTER }
I20250901 14:18:39.209396  8691 sys_catalog.cc:564] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:39.212239  8696 sys_catalog.cc:455] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [sys.catalog]: SysCatalogTable state changed. Reason: New leader 7460618b2360411ebca2aff387330c24. Latest consensus state: current_term: 1 leader_uuid: "7460618b2360411ebca2aff387330c24" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7460618b2360411ebca2aff387330c24" member_type: VOTER } }
I20250901 14:18:39.212113  8695 sys_catalog.cc:455] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "7460618b2360411ebca2aff387330c24" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7460618b2360411ebca2aff387330c24" member_type: VOTER } }
I20250901 14:18:39.212908  8696 sys_catalog.cc:458] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:39.212985  8695 sys_catalog.cc:458] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:39.221858  8699 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:39.230064  8699 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:39.236714  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:39.241427  8699 catalog_manager.cc:1349] Generated new cluster ID: f6a59b42b6454f5a9e27443ec9798315
I20250901 14:18:39.241765  8699 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:39.272099  8699 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:39.273324  8699 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:39.291898  8699 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24: Generated new TSK 0
I20250901 14:18:39.292528  8699 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:39.303936  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:39.310328  8712 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:39.311582  8713 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:39.314291  8715 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:39.314543  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:39.315409  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:39.315618  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:39.315773  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736319315754 us; error 0 us; skew 500 ppm
I20250901 14:18:39.316260  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:39.319134  5023 webserver.cc:480] Webserver started at http://127.4.231.193:42449/ using document root <none> and password file <none>
I20250901 14:18:39.319591  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:39.319773  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:39.320021  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:39.321035  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCompactionOfSoftDeletedAndRecalledTable.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "a4abc3e032f84236a923ddee04759bce"
format_stamp: "Formatted at 2025-09-01 14:18:39 on dist-test-slave-9gf0"
I20250901 14:18:39.325589  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.004s	sys 0.000s
I20250901 14:18:39.329099  8720 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:39.330000  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:39.330353  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCompactionOfSoftDeletedAndRecalledTable.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "a4abc3e032f84236a923ddee04759bce"
format_stamp: "Formatted at 2025-09-01 14:18:39 on dist-test-slave-9gf0"
I20250901 14:18:39.330713  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCompactionOfSoftDeletedAndRecalledTable.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCompactionOfSoftDeletedAndRecalledTable.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCompactionOfSoftDeletedAndRecalledTable.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:39.350579  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:39.351871  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:39.364691  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:39.365131  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.001s	sys 0.000s
I20250901 14:18:39.365509  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:39.365800  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:39.448238  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:46485
I20250901 14:18:39.448303  8790 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:46485 every 8 connection(s)
I20250901 14:18:39.453790  8791 heartbeater.cc:344] Connected to a master server at 127.4.231.254:38409
I20250901 14:18:39.454120  8791 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:39.454792  8791 heartbeater.cc:507] Master 127.4.231.254:38409 requested a full tablet report, sending...
I20250901 14:18:39.456588  8645 ts_manager.cc:194] Registered new tserver with Master: a4abc3e032f84236a923ddee04759bce (127.4.231.193:46485)
I20250901 14:18:39.457319  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004836855s
I20250901 14:18:39.458734  8645 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:56836
I20250901 14:18:39.472431  8791 heartbeater.cc:499] Master 127.4.231.254:38409 was elected leader, sending a full tablet report...
I20250901 14:18:39.480799  8645 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:56856:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:39.530462  8756 tablet_service.cc:1468] Processing CreateTablet for tablet 49ccb1e6bc134619bd05d86dc04c391a (DEFAULT_TABLE table=client-testtb [id=d3dd5a698fd54c4490587f8f7ca9a9e9]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:39.530884  8755 tablet_service.cc:1468] Processing CreateTablet for tablet dc80b4f4e6624cdcb18f2ab9f2f51962 (DEFAULT_TABLE table=client-testtb [id=d3dd5a698fd54c4490587f8f7ca9a9e9]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:39.531510  8756 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 49ccb1e6bc134619bd05d86dc04c391a. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:39.532050  8755 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet dc80b4f4e6624cdcb18f2ab9f2f51962. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:39.546144  8801 tablet_bootstrap.cc:492] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce: Bootstrap starting.
I20250901 14:18:39.550029  8801 tablet_bootstrap.cc:654] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:39.553728  8801 tablet_bootstrap.cc:492] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce: No bootstrap required, opened a new log
I20250901 14:18:39.554101  8801 ts_tablet_manager.cc:1397] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce: Time spent bootstrapping tablet: real 0.008s	user 0.005s	sys 0.003s
I20250901 14:18:39.555826  8801 raft_consensus.cc:357] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4abc3e032f84236a923ddee04759bce" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46485 } }
I20250901 14:18:39.556231  8801 raft_consensus.cc:383] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:39.556442  8801 raft_consensus.cc:738] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: a4abc3e032f84236a923ddee04759bce, State: Initialized, Role: FOLLOWER
I20250901 14:18:39.556933  8801 consensus_queue.cc:260] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4abc3e032f84236a923ddee04759bce" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46485 } }
I20250901 14:18:39.557390  8801 raft_consensus.cc:397] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:39.557658  8801 raft_consensus.cc:491] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:39.557909  8801 raft_consensus.cc:3058] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:39.607342  8801 raft_consensus.cc:513] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4abc3e032f84236a923ddee04759bce" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46485 } }
I20250901 14:18:39.608007  8801 leader_election.cc:304] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: a4abc3e032f84236a923ddee04759bce; no voters: 
I20250901 14:18:39.609067  8801 leader_election.cc:290] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:39.609436  8803 raft_consensus.cc:2802] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:39.611828  8801 ts_tablet_manager.cc:1428] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce: Time spent starting tablet: real 0.057s	user 0.012s	sys 0.000s
I20250901 14:18:39.611716  8803 raft_consensus.cc:695] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 1 LEADER]: Becoming Leader. State: Replica: a4abc3e032f84236a923ddee04759bce, State: Running, Role: LEADER
I20250901 14:18:39.612596  8801 tablet_bootstrap.cc:492] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce: Bootstrap starting.
I20250901 14:18:39.612478  8803 consensus_queue.cc:237] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4abc3e032f84236a923ddee04759bce" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46485 } }
I20250901 14:18:39.618315  8801 tablet_bootstrap.cc:654] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:39.619716  8644 catalog_manager.cc:5582] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce reported cstate change: term changed from 0 to 1, leader changed from <none> to a4abc3e032f84236a923ddee04759bce (127.4.231.193). New cstate: current_term: 1 leader_uuid: "a4abc3e032f84236a923ddee04759bce" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4abc3e032f84236a923ddee04759bce" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46485 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:39.622666  8801 tablet_bootstrap.cc:492] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce: No bootstrap required, opened a new log
I20250901 14:18:39.623085  8801 ts_tablet_manager.cc:1397] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce: Time spent bootstrapping tablet: real 0.011s	user 0.005s	sys 0.004s
I20250901 14:18:39.625424  8801 raft_consensus.cc:357] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4abc3e032f84236a923ddee04759bce" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46485 } }
I20250901 14:18:39.626027  8801 raft_consensus.cc:383] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:39.626361  8801 raft_consensus.cc:738] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: a4abc3e032f84236a923ddee04759bce, State: Initialized, Role: FOLLOWER
I20250901 14:18:39.627031  8801 consensus_queue.cc:260] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4abc3e032f84236a923ddee04759bce" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46485 } }
I20250901 14:18:39.627521  8801 raft_consensus.cc:397] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:39.627827  8801 raft_consensus.cc:491] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:39.628154  8801 raft_consensus.cc:3058] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:39.708382  8801 raft_consensus.cc:513] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4abc3e032f84236a923ddee04759bce" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46485 } }
I20250901 14:18:39.709009  8801 leader_election.cc:304] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: a4abc3e032f84236a923ddee04759bce; no voters: 
I20250901 14:18:39.709508  8801 leader_election.cc:290] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:39.709714  8803 raft_consensus.cc:2802] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:39.710201  8803 raft_consensus.cc:695] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 1 LEADER]: Becoming Leader. State: Replica: a4abc3e032f84236a923ddee04759bce, State: Running, Role: LEADER
I20250901 14:18:39.710898  8803 consensus_queue.cc:237] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4abc3e032f84236a923ddee04759bce" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46485 } }
I20250901 14:18:39.712230  8801 ts_tablet_manager.cc:1428] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce: Time spent starting tablet: real 0.089s	user 0.011s	sys 0.004s
I20250901 14:18:39.716609  8644 catalog_manager.cc:5582] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce reported cstate change: term changed from 0 to 1, leader changed from <none> to a4abc3e032f84236a923ddee04759bce (127.4.231.193). New cstate: current_term: 1 leader_uuid: "a4abc3e032f84236a923ddee04759bce" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4abc3e032f84236a923ddee04759bce" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46485 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:39.769740  8644 catalog_manager.cc:2482] Servicing SoftDeleteTable request from {username='slave'} at 127.0.0.1:56856:
table { table_name: "client-testtb" } modify_external_catalogs: true reserve_seconds: 60000
I20250901 14:18:39.779073  8644 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:56856:
table { table_name: "client-testtb" } new_extra_configs { key: "kudu.table.disable_compaction" value: "true" }
I20250901 14:18:39.794651  8811 tablet.cc:1722] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce: Alter schema from (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
) version 0 to (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
) version 1
I20250901 14:18:39.796787  8812 tablet.cc:1722] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce: Alter schema from (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
) version 0 to (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
) version 1
I20250901 14:18:39.803877  8645 catalog_manager.cc:2869] Servicing RecallDeletedTableRpc request from {username='slave'} at 127.0.0.1:56856:
table { table_id: "d3dd5a698fd54c4490587f8f7ca9a9e9" }
I20250901 14:18:39.816036  8645 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:56856:
table { table_id: "d3dd5a698fd54c4490587f8f7ca9a9e9" } new_extra_configs { key: "kudu.table.disable_compaction" value: "false" }
I20250901 14:18:39.830107  8812 tablet.cc:1722] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce: Alter schema from (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
) version 1 to (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
) version 2
I20250901 14:18:39.830564  8811 tablet.cc:1722] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce: Alter schema from (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
) version 1 to (
    10:key INT32 NOT NULL,
    11:int_val INT32 NOT NULL,
    12:string_val STRING NULLABLE,
    13:non_null_with_default INT32 NOT NULL,
    PRIMARY KEY (key)
) version 2
I20250901 14:18:39.837011  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:39.862296  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:39.862988  5023 tablet_replica.cc:331] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce: stopping tablet replica
I20250901 14:18:39.863492  5023 raft_consensus.cc:2241] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:39.863898  5023 raft_consensus.cc:2270] T dc80b4f4e6624cdcb18f2ab9f2f51962 P a4abc3e032f84236a923ddee04759bce [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:39.913156  5023 tablet_replica.cc:331] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce: stopping tablet replica
I20250901 14:18:39.913774  5023 raft_consensus.cc:2241] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:39.914224  5023 raft_consensus.cc:2270] T 49ccb1e6bc134619bd05d86dc04c391a P a4abc3e032f84236a923ddee04759bce [term 1 FOLLOWER]: Raft consensus is shut down!
==================
WARNING: ThreadSanitizer: data race (pid=5023)
  Write of size 8 at 0x7b4400151748 by main thread:
    #0 std::__1::__vector_base<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::__destruct_at_end(kudu::MemTracker**) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:429:12 (libkudu_util.so+0x3a6c30)
    #1 std::__1::__vector_base<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libkudu_util.so+0x3a6b44)
    #2 std::__1::__vector_base<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libkudu_util.so+0x3a693b)
    #3 std::__1::vector<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libkudu_util.so+0x39ebf1)
    #4 kudu::MemTracker::~MemTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:83:1 (libkudu_util.so+0x39e7a3)
    #5 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3503:23 (libkudu_util.so+0x3a5a65)
    #6 std::__1::__shared_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3341:9 (libtablet_test_util.so+0x35465)
    #7 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3383:27 (libtablet_test_util.so+0x35409)
    #8 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #9 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #10 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #11 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #12 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #13 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #14 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #15 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #16 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #17 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #18 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #19 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #20 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #21 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #22 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #23 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #24 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #25 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #26 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #27 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #28 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #29 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #30 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #31 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #32 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #33 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #34 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #35 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #36 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #37 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #38 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #39 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #40 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #41 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #42 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #43 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #44 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #45 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #46 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous read of size 8 at 0x7b4400151748 by thread T114 (mutexes: write M920558637112824480):
    #0 std::__1::vector<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::end() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:1536:30 (libkudu_util.so+0x39fb0a)
    #1 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:284:22 (libkudu_util.so+0x39d1e6)
    #2 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #3 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #4 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #5 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #6 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #7 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #8 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #9 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #10 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #11 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #12 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #13 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #14 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #15 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #16 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #17 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #18 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #19 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #20 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Location is heap block of size 264 at 0x7b44001516c0 allocated by thread T88:
    #0 operator new(unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:64 (client-test+0x4f1827)
    #1 std::__1::__libcpp_allocate(unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:253:10 (libkudu_util.so+0x29a206)
    #2 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::allocate(unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1789:34 (libkudu_util.so+0x3a5345)
    #3 std::__1::enable_if<!(is_array<long&>::value), std::__1::shared_ptr<long&> >::type std::__1::make_shared<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler&&...) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4290:45 (libkudu_util.so+0x3a5121)
    #4 std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/make_shared.h:61:12 (libkudu_util.so+0x39ea55)
    #5 kudu::MemTracker::CreateTracker(long, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:54:34 (libkudu_util.so+0x39ca7e)
    #6 kudu::tablet::OpTracker::StartMemoryTracking(std::__1::shared_ptr<kudu::MemTracker> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:311:20 (libtablet.so+0x41dc9b)
    #7 kudu::tablet::TabletReplica::Start(kudu::consensus::ConsensusBootstrapInfo const&, std::__1::shared_ptr<kudu::tablet::Tablet>, kudu::clock::Clock*, std::__1::shared_ptr<kudu::rpc::Messenger>, scoped_refptr<kudu::rpc::ResultTracker>, scoped_refptr<kudu::log::Log>, kudu::ThreadPool*, kudu::DnsResolver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:277:19 (libtablet.so+0x3d0656)
    #8 kudu::tserver::TSTabletManager::OpenTablet(scoped_refptr<kudu::tablet::TabletReplica> const&, scoped_refptr<kudu::tserver::TransitionInProgressDeleter> const&, std::__1::atomic<int>*, std::__1::atomic<int>*, kudu::Timer*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1430:18 (libtserver.so+0x2b9b20)
    #9 kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:703:3 (libtserver.so+0x2cda91)
    #10 decltype(std::__1::forward<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11&>(fp)()) std::__1::__invoke<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11&>(kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtserver.so+0x2cda39)
    #11 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11&>(kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtserver.so+0x2cd9c9)
    #12 std::__1::__function::__alloc_func<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11, std::__1::allocator<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtserver.so+0x2cd991)
    #13 std::__1::__function::__func<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11, std::__1::allocator<kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*)::$_11>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtserver.so+0x2ccc6d)
    #14 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #15 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #16 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #17 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #18 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #19 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #20 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #21 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #22 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #23 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #24 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  Thread T88 'tablet-open [wo' (tid=8801, finished) created by thread T188 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tserver::TSTabletManager::CreateNewTablet(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Partition const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, kudu::Schema const&, kudu::PartitionSchema const&, kudu::consensus::RaftConfigPB, std::__1::optional<kudu::TableExtraConfigPB>, std::__1::optional<std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > >, std::__1::optional<kudu::TableTypePB>, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:703:3 (libtserver.so+0x2b6d12)
    #7 kudu::tserver::TabletServiceAdminImpl::CreateTablet(kudu::tserver::CreateTabletRequestPB const*, kudu::tserver::CreateTabletResponsePB*, kudu::rpc::RpcContext*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_service.cc:1475:34 (libtserver.so+0x267199)
    #8 kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1::operator()(google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*) const /home/jenkins-slave/workspace/build_and_test_flaky/build/tsan/src/kudu/tserver/tserver_admin.service.cc:189:13 (libtserver_admin_proto.so+0x68be4)
    #9 decltype(std::__1::forward<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1&>(fp)(std::__1::forward<google::protobuf::Message const*>(fp0), std::__1::forward<google::protobuf::Message*>(fp0), std::__1::forward<kudu::rpc::RpcContext*>(fp0))) std::__1::__invoke<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1&, google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*>(kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1&, google::protobuf::Message const*&&, google::protobuf::Message*&&, kudu::rpc::RpcContext*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtserver_admin_proto.so+0x68b72)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1&, google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*>(kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1&, google::protobuf::Message const*&&, google::protobuf::Message*&&, kudu::rpc::RpcContext*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtserver_admin_proto.so+0x68aa1)
    #11 std::__1::__function::__alloc_func<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1, std::__1::allocator<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1>, void (google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*)>::operator()(google::protobuf::Message const*&&, google::protobuf::Message*&&, kudu::rpc::RpcContext*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtserver_admin_proto.so+0x68a1c)
    #12 std::__1::__function::__func<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1, std::__1::allocator<kudu::tserver::TabletServerAdminServiceIf::TabletServerAdminServiceIf(scoped_refptr<kudu::MetricEntity> const&, scoped_refptr<kudu::rpc::ResultTracker> const&)::$_1>, void (google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*)>::operator()(google::protobuf::Message const*&&, google::protobuf::Message*&&, kudu::rpc::RpcContext*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtserver_admin_proto.so+0x67cd2)
    #13 std::__1::__function::__value_func<void (google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*)>::operator()(google::protobuf::Message const*&&, google::protobuf::Message*&&, kudu::rpc::RpcContext*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libkrpc.so+0x1f3c4c)
    #14 std::__1::function<void (google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*)>::operator()(google::protobuf::Message const*, google::protobuf::Message*, kudu::rpc::RpcContext*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libkrpc.so+0x1f3086)
    #15 kudu::rpc::GeneratedServiceIf::Handle(kudu::rpc::InboundCall*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/rpc/service_if.cc:137:3 (libkrpc.so+0x1f2a2f)
    #16 kudu::rpc::ServicePool::RunThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/rpc/service_pool.cc:229:15 (libkrpc.so+0x1f5d43)
    #17 kudu::rpc::ServicePool::Init(int)::$_0::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/rpc/service_pool.cc:92:5 (libkrpc.so+0x1f7061)
    #18 decltype(std::__1::forward<kudu::rpc::ServicePool::Init(int)::$_0&>(fp)()) std::__1::__invoke<kudu::rpc::ServicePool::Init(int)::$_0&>(kudu::rpc::ServicePool::Init(int)::$_0&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkrpc.so+0x1f7019)
    #19 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::rpc::ServicePool::Init(int)::$_0&>(kudu::rpc::ServicePool::Init(int)::$_0&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkrpc.so+0x1f6fa9)
    #20 std::__1::__function::__alloc_func<kudu::rpc::ServicePool::Init(int)::$_0, std::__1::allocator<kudu::rpc::ServicePool::Init(int)::$_0>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkrpc.so+0x1f6f71)
    #21 std::__1::__function::__func<kudu::rpc::ServicePool::Init(int)::$_0, std::__1::allocator<kudu::rpc::ServicePool::Init(int)::$_0>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkrpc.so+0x1f626d)
    #22 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #23 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #24 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

SUMMARY: ThreadSanitizer: data race /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:429:12 in std::__1::__vector_base<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::__destruct_at_end(kudu::MemTracker**)
==================
==================
WARNING: ThreadSanitizer: data race (pid=5023)
  Write of size 8 at 0x7b08000a5380 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<kudu::MemTracker*>::deallocate(kudu::MemTracker**, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a6d0a)
    #6 std::__1::allocator_traits<std::__1::allocator<kudu::MemTracker*> >::deallocate(std::__1::allocator<kudu::MemTracker*>&, kudu::MemTracker**, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1533:14 (libkudu_util.so+0x3a6b89)
    #7 std::__1::__vector_base<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:466:9 (libkudu_util.so+0x3a6967)
    #8 std::__1::vector<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libkudu_util.so+0x39ebf1)
    #9 kudu::MemTracker::~MemTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:83:1 (libkudu_util.so+0x39e7a3)
    #10 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3503:23 (libkudu_util.so+0x3a5a65)
    #11 std::__1::__shared_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3341:9 (libtablet_test_util.so+0x35465)
    #12 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3383:27 (libtablet_test_util.so+0x35409)
    #13 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #14 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #15 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #16 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #17 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #18 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #19 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #20 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #21 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #22 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #23 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #24 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #25 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #26 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #27 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #28 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #29 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #30 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #31 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #32 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #33 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #34 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #35 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #36 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #37 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #38 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #39 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #40 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #41 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #42 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #43 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #44 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #45 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #46 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #47 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #48 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #49 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #50 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #51 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous read of size 8 at 0x7b08000a5380 by thread T114 (mutexes: write M920558637112824480):
    #0 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:285:5 (libkudu_util.so+0x39d222)
    #1 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #2 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #3 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #4 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #5 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #6 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #7 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #8 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #9 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #10 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #11 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #12 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #13 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #14 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #15 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #16 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #17 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #18 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #19 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

SUMMARY: ThreadSanitizer: data race /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 in operator delete(void*)
==================
==================
WARNING: ThreadSanitizer: data race (pid=5023)
  Write of size 8 at 0x7b4400150e70 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::deallocate(std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a612d)
    #6 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3515:9 (libkudu_util.so+0x3a5ad0)
    #7 std::__1::__shared_weak_count::__release_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/libcxx/src/memory.cpp (libc++.so.1+0xbde46)
    #8 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3384:9 (libtablet_test_util.so+0x35415)
    #9 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #10 kudu::MemTracker::~MemTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:83:1 (libkudu_util.so+0x39e7ab)
    #11 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3503:23 (libkudu_util.so+0x3a5a65)
    #12 std::__1::__shared_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3341:9 (libtablet_test_util.so+0x35465)
    #13 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3383:27 (libtablet_test_util.so+0x35409)
    #14 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #15 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #16 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #17 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #18 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #19 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #20 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #21 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #22 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #23 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #24 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #25 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #26 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #27 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #28 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #29 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #30 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #31 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #32 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #33 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #34 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #35 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #36 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #37 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #38 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #39 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #40 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #41 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #42 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #43 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #44 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #45 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #46 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #47 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #48 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #49 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #50 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #51 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #52 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous atomic write of size 8 at 0x7b4400150e70 by thread T114 (mutexes: write M920558637112824480):
    #0 __tsan_atomic64_fetch_add /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp:620 (client-test+0x4c53d5)
    #1 long std::__1::__cxx_atomic_fetch_add<long>(std::__1::__cxx_atomic_base_impl<long>*, long, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1042:12 (libkudu_util.so+0x2a5163)
    #2 std::__1::__atomic_base<long, true>::fetch_add(long, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1706:17 (libkudu_util.so+0x2a50a9)
    #3 kudu::HighWaterMark::IncrementBy(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:70:30 (libkudu_util.so+0x39fbc2)
    #4 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:285:27 (libkudu_util.so+0x39d231)
    #5 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #6 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #7 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #8 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #13 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #14 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #15 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #16 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #17 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #18 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #19 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #20 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #21 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #22 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #23 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

SUMMARY: ThreadSanitizer: data race /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 in operator delete(void*)
==================
==================
WARNING: ThreadSanitizer: data race (pid=5023)
  Write of size 8 at 0x7b4400150e78 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::deallocate(std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a612d)
    #6 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3515:9 (libkudu_util.so+0x3a5ad0)
    #7 std::__1::__shared_weak_count::__release_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/libcxx/src/memory.cpp (libc++.so.1+0xbde46)
    #8 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3384:9 (libtablet_test_util.so+0x35415)
    #9 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #10 kudu::MemTracker::~MemTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:83:1 (libkudu_util.so+0x39e7ab)
    #11 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3503:23 (libkudu_util.so+0x3a5a65)
    #12 std::__1::__shared_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3341:9 (libtablet_test_util.so+0x35465)
    #13 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3383:27 (libtablet_test_util.so+0x35409)
    #14 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #15 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #16 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #17 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #18 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #19 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #20 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #21 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #22 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #23 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #24 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #25 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #26 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #27 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #28 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #29 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #30 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #31 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #32 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #33 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #34 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #35 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #36 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #37 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #38 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #39 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #40 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #41 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #42 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #43 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #44 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #45 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #46 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #47 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #48 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #49 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #50 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #51 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #52 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous atomic write of size 8 at 0x7b4400150e78 by thread T114 (mutexes: write M920558637112824480):
    #0 __tsan_atomic64_compare_exchange_val /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp:850 (client-test+0x4cc4a8)
    #1 bool std::__1::__cxx_atomic_compare_exchange_weak<long>(std::__1::__cxx_atomic_base_impl<long>*, long*, long, std::__1::memory_order, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1031:12 (liblog.so+0x8bce4)
    #2 std::__1::__atomic_base<long, false>::compare_exchange_weak(long&, long, std::__1::memory_order, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1622:17 (liblog.so+0x8bb09)
    #3 void kudu::AtomicStoreMax<long>(std::__1::atomic<long>&, long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/atomic-utils.h:33:13 (liblog.so+0x7ea60)
    #4 kudu::HighWaterMark::UpdateMax(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:80:5 (libkudu_util.so+0x3a4d64)
    #5 kudu::HighWaterMark::IncrementBy(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:70:5 (libkudu_util.so+0x39fbcd)
    #6 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:285:27 (libkudu_util.so+0x39d231)
    #7 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #8 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #9 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #10 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #11 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #12 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #13 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #14 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #15 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #16 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #17 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #18 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #19 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #21 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #22 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #23 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #24 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #25 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

SUMMARY: ThreadSanitizer: data race /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 in operator delete(void*)
==================
==================
WARNING: ThreadSanitizer: data race (pid=5023)
  Write of size 8 at 0x7b4400151730 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::deallocate(std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a612d)
    #6 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3515:9 (libkudu_util.so+0x3a5ad0)
    #7 std::__1::__shared_weak_count::__release_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/libcxx/src/memory.cpp (libc++.so.1+0xbde46)
    #8 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3384:9 (libtablet_test_util.so+0x35415)
    #9 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #10 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #11 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #12 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #13 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #14 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #15 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #16 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #17 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #18 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #19 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #20 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #21 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #22 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #23 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #24 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #25 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #26 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #27 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #28 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #29 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #30 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #31 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #32 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #33 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #34 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #35 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #36 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #37 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #38 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #39 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #40 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #41 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #42 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #43 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #44 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #45 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #46 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #47 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous atomic write of size 8 at 0x7b4400151730 by thread T114 (mutexes: write M920558637112824480):
    #0 __tsan_atomic64_fetch_add /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp:620 (client-test+0x4c53d5)
    #1 long std::__1::__cxx_atomic_fetch_add<long>(std::__1::__cxx_atomic_base_impl<long>*, long, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1042:12 (libkudu_util.so+0x2a5163)
    #2 std::__1::__atomic_base<long, true>::fetch_add(long, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1706:17 (libkudu_util.so+0x2a50a9)
    #3 kudu::HighWaterMark::IncrementBy(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:70:30 (libkudu_util.so+0x39fbc2)
    #4 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:285:27 (libkudu_util.so+0x39d231)
    #5 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #6 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #7 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #8 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #13 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #14 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #15 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #16 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #17 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #18 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #19 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #20 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #21 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #22 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #23 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

SUMMARY: ThreadSanitizer: data race /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 in operator delete(void*)
==================
==================
WARNING: ThreadSanitizer: data race (pid=5023)
  Write of size 8 at 0x7b4400151738 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::deallocate(std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a612d)
    #6 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3515:9 (libkudu_util.so+0x3a5ad0)
    #7 std::__1::__shared_weak_count::__release_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/libcxx/src/memory.cpp (libc++.so.1+0xbde46)
    #8 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3384:9 (libtablet_test_util.so+0x35415)
    #9 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #10 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #11 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #12 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #13 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #14 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #15 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #16 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #17 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #18 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #19 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #20 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #21 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #22 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #23 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #24 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #25 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #26 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #27 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #28 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #29 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #30 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #31 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #32 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #33 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #34 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #35 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #36 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #37 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #38 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #39 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #40 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #41 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #42 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #43 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #44 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #45 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #46 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #47 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous atomic write of size 8 at 0x7b4400151738 by thread T114 (mutexes: write M920558637112824480):
    #0 __tsan_atomic64_compare_exchange_val /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp:850 (client-test+0x4cc4a8)
    #1 bool std::__1::__cxx_atomic_compare_exchange_weak<long>(std::__1::__cxx_atomic_base_impl<long>*, long*, long, std::__1::memory_order, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1031:12 (liblog.so+0x8bce4)
    #2 std::__1::__atomic_base<long, false>::compare_exchange_weak(long&, long, std::__1::memory_order, std::__1::memory_order) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/atomic:1622:17 (liblog.so+0x8bb09)
    #3 void kudu::AtomicStoreMax<long>(std::__1::atomic<long>&, long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/atomic-utils.h:33:13 (liblog.so+0x7ea60)
    #4 kudu::HighWaterMark::UpdateMax(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:80:5 (libkudu_util.so+0x3a4d64)
    #5 kudu::HighWaterMark::IncrementBy(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/high_water_mark.h:70:5 (libkudu_util.so+0x39fbcd)
    #6 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:285:27 (libkudu_util.so+0x39d231)
    #7 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #8 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #9 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #10 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #11 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #12 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #13 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #14 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #15 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #16 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #17 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #18 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #19 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #21 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #22 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #23 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #24 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #25 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

SUMMARY: ThreadSanitizer: data race /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 in operator delete(void*)
==================
==================
WARNING: ThreadSanitizer: data race (pid=5023)
  Write of size 8 at 0x7b4400151740 by main thread:
    #0 operator delete(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 (client-test+0x4f20f9)
    #1 std::__1::_DeallocateCaller::__do_call(void*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:334:12 (client-test+0x610399)
    #2 std::__1::_DeallocateCaller::__do_deallocate_handle_size(void*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:292:12 (client-test+0x610339)
    #3 std::__1::_DeallocateCaller::__do_deallocate_handle_size_align(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:268:14 (libkudu_util.so+0x29a9a2)
    #4 std::__1::__libcpp_deallocate(void*, unsigned long, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/new:340:3 (libkudu_util.so+0x29a939)
    #5 std::__1::allocator<std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> > >::deallocate(std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >*, unsigned long) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1798:10 (libkudu_util.so+0x3a612d)
    #6 std::__1::__shared_ptr_emplace<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler, std::__1::allocator<std::__1::shared_ptr<kudu::MemTracker> enable_make_shared<kudu::MemTracker>::make_shared<long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&>(long&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::shared_ptr<kudu::MemTracker>&)::make_shared_enabler> >::__on_zero_shared_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3515:9 (libkudu_util.so+0x3a5ad0)
    #7 std::__1::__shared_weak_count::__release_weak() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/libcxx/src/memory.cpp (libc++.so.1+0xbde46)
    #8 std::__1::__shared_weak_count::__release_shared() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:3384:9 (libtablet_test_util.so+0x35415)
    #9 std::__1::shared_ptr<kudu::MemTracker>::~shared_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:4098:19 (libtablet_test_util.so+0x31bf8)
    #10 kudu::tablet::OpTracker::~OpTracker() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:142:1 (libtablet.so+0x41c814)
    #11 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:202:1 (libtablet.so+0x3cf5f5)
    #12 kudu::tablet::TabletReplica::~TabletReplica() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:197:33 (libtablet.so+0x3cf809)
    #13 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::DeleteInternal(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:153:44 (client-test+0x687b27)
    #14 kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica>::Destruct(kudu::tablet::TabletReplica const*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:116:5 (client-test+0x687ae9)
    #15 kudu::RefCountedThreadSafe<kudu::tablet::TabletReplica, kudu::DefaultRefCountedThreadSafeTraits<kudu::tablet::TabletReplica> >::Release() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:144:7 (client-test+0x687aa9)
    #16 scoped_refptr<kudu::tablet::TabletReplica>::~scoped_refptr() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/ref_counted.h:266:13 (client-test+0x5f264a)
    #17 std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >::destroy(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1811:92 (libmaster.so+0x3bc5d9)
    #18 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::integral_constant<bool, true>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1703:21 (libmaster.so+0x3bc5b0)
    #19 void std::__1::allocator_traits<std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::destroy<scoped_refptr<kudu::tablet::TabletReplica> >(std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> >&, scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:1544:14 (libmaster.so+0x3bc570)
    #20 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::__destruct_at_end(scoped_refptr<kudu::tablet::TabletReplica>*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:428:9 (libtserver.so+0x2b1026)
    #21 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::clear() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:371:29 (libtserver.so+0x2b0f44)
    #22 std::__1::__vector_base<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~__vector_base() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:465:9 (libtserver.so+0x2b0cfb)
    #23 std::__1::vector<scoped_refptr<kudu::tablet::TabletReplica>, std::__1::allocator<scoped_refptr<kudu::tablet::TabletReplica> > >::~vector() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:557:5 (libtserver.so+0x2893e1)
    #24 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1572:1 (libtserver.so+0x2b434e)
    #25 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #26 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #27 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #28 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #29 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #30 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #31 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #32 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #33 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #34 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #35 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #36 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #37 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #38 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #39 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #40 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #41 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #42 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #43 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #44 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #45 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #46 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #47 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Previous read of size 8 at 0x7b4400151740 by thread T114 (mutexes: write M920558637112824480):
    #0 std::__1::vector<kudu::MemTracker*, std::__1::allocator<kudu::MemTracker*> >::begin() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/vector:1520:30 (libkudu_util.so+0x39fac9)
    #1 kudu::MemTracker::Release(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/mem_tracker.cc:284:22 (libkudu_util.so+0x39d1da)
    #2 kudu::tablet::OpTracker::Release(kudu::tablet::OpDriver*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:239:19 (libtablet.so+0x41d1e0)
    #3 kudu::tablet::OpDriver::Finalize() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:606:16 (libtablet.so+0x416242)
    #4 kudu::tablet::OpDriver::ApplyTask() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:577:5 (libtablet.so+0x4159d3)
    #5 kudu::tablet::OpDriver::ApplyAsync()::$_2::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:65 (libtablet.so+0x419be1)
    #6 decltype(std::__1::forward<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(fp)()) std::__1::__invoke<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x419b99)
    #7 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::ApplyAsync()::$_2&>(kudu::tablet::OpDriver::ApplyAsync()::$_2&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x419b29)
    #8 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x419af1)
    #9 std::__1::__function::__func<kudu::tablet::OpDriver::ApplyAsync()::$_2, std::__1::allocator<kudu::tablet::OpDriver::ApplyAsync()::$_2>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x418d1d)
    #10 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #11 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #12 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #13 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #14 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #15 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #16 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #17 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #18 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #19 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #20 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

  As if synchronized via sleep:
    #0 nanosleep /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:363 (client-test+0x497362)
    #1 base::SleepForNanoseconds(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/gutil/sysinfo.cc:96:10 (libgutil.so+0xb5eba)
    #2 kudu::SleepFor(kudu::MonoDelta const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/monotime.cc:264:3 (libkudu_util.so+0x3d65a6)
    #3 kudu::tablet::OpTracker::WaitForAllToFinish(kudu::MonoDelta const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:298:5 (libtablet.so+0x41dac0)
    #4 kudu::tablet::OpTracker::WaitForAllToFinish() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_tracker.cc:260:3 (libtablet.so+0x41d49e)
    #5 kudu::tablet::TabletReplica::Stop() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:350:17 (libtablet.so+0x3d1307)
    #6 kudu::tablet::TabletReplica::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet_replica.cc:380:3 (libtablet.so+0x3d194e)
    #7 kudu::tserver::TSTabletManager::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/ts_tablet_manager.cc:1545:14 (libtserver.so+0x2b4247)
    #8 kudu::tserver::TabletServer::ShutdownImpl() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.cc:189:22 (libtserver.so+0x255fbe)
    #9 kudu::tserver::TabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/tablet_server.h:67:5 (libtserver.so+0x25b789)
    #10 kudu::tserver::MiniTabletServer::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tserver/mini_tablet_server.cc:124:14 (libtserver.so+0x313532)
    #11 kudu::cluster::InternalMiniCluster::ShutdownNodes(kudu::cluster::ClusterNodes) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:242:22 (libmini_cluster.so+0xd7b27)
    #12 kudu::cluster::MiniCluster::Shutdown() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/mini_cluster.h:84:5 (client-test+0x5fba14)
    #13 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:98:3 (libmini_cluster.so+0xd6523)
    #14 kudu::cluster::InternalMiniCluster::~InternalMiniCluster() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/mini-cluster/internal_mini_cluster.cc:97:45 (libmini_cluster.so+0xd65b9)
    #15 std::__1::default_delete<kudu::cluster::InternalMiniCluster>::operator()(kudu::cluster::InternalMiniCluster*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2262:5 (client-test+0x6073d7)
    #16 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::reset(kudu::cluster::InternalMiniCluster*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2517:7 (client-test+0x60733d)
    #17 std::__1::unique_ptr<kudu::cluster::InternalMiniCluster, std::__1::default_delete<kudu::cluster::InternalMiniCluster> >::~unique_ptr() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/memory:2471:19 (client-test+0x6072cb)
    #18 kudu::client::ClientTest::~ClientTest() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:240:7 (client-test+0x603f06)
    #19 kudu::client::ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test::~ClientTest_TestCompactionOfSoftDeletedAndRecalledTable_Test() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:5596:1 (client-test+0x603279)
    #20 testing::Test::DeleteSelf_() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/include/gtest/gtest.h:318:24 (libgtest.so.1.12.1+0x65467)
    #21 void testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x64e2f)
    #22 void testing::internal::HandleExceptionsInMethodIfSupported<testing::Test, void>(testing::Test*, void (testing::Test::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x64e2f)
    #23 testing::TestInfo::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2859:5 (libgtest.so.1.12.1+0x43dc2)
    #24 testing::TestSuite::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:3012:30 (libgtest.so.1.12.1+0x44d24)
    #25 testing::internal::UnitTestImpl::RunAllTests() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5870:44 (libgtest.so.1.12.1+0x59814)
    #26 bool testing::internal::HandleSehExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2599:10 (libgtest.so.1.12.1+0x65cef)
    #27 bool testing::internal::HandleExceptionsInMethodIfSupported<testing::internal::UnitTestImpl, bool>(testing::internal::UnitTestImpl*, bool (testing::internal::UnitTestImpl::*)(), char const*) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:2635:14 (libgtest.so.1.12.1+0x65cef)
    #28 testing::UnitTest::Run() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/googletest-release-1.12.1/googletest/src/gtest.cc:5444:10 (libgtest.so.1.12.1+0x58dcc)
    #29 RUN_ALL_TESTS() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/gtest/gtest.h:2293:73 (client-test+0x6b312b)
    #30 main /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/test_main.cc:115:10 (client-test+0x6b1d6c)

  Mutex M920558637112824480 is already destroyed.

  Thread T114 'apply [worker]-' (tid=8811, running) created by thread T163 at:
    #0 pthread_create /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp:966 (client-test+0x475e35)
    #1 kudu::Thread::StartThread(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, unsigned long, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:643:15 (libkudu_util.so+0x4422c8)
    #2 kudu::Thread::Create(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >, std::__1::function<void ()>, scoped_refptr<kudu::Thread>*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.h:147:12 (libmaster.so+0x2d4af9)
    #3 kudu::ThreadPool::CreateThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:814:10 (libkudu_util.so+0x45e94b)
    #4 kudu::ThreadPool::DoSubmit(std::__1::function<void ()>, kudu::ThreadPoolToken*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:599:21 (libkudu_util.so+0x45cbef)
    #5 kudu::ThreadPool::Submit(std::__1::function<void ()>) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:491:10 (libkudu_util.so+0x45eeb2)
    #6 kudu::tablet::OpDriver::ApplyAsync() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:524:23 (libtablet.so+0x414d70)
    #7 kudu::tablet::OpDriver::ReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:474:5 (libtablet.so+0x415309)
    #8 kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/ops/op_driver.cc:186:21 (libtablet.so+0x41759a)
    #9 decltype(std::__1::forward<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&>(fp)(std::__1::forward<kudu::Status const&>(fp0))) std::__1::__invoke<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libtablet.so+0x417528)
    #10 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&>(kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0&, kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libtablet.so+0x4174a8)
    #11 std::__1::__function::__alloc_func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libtablet.so+0x417463)
    #12 std::__1::__function::__func<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0, std::__1::allocator<kudu::tablet::OpDriver::Init(std::__1::unique_ptr<kudu::tablet::Op, std::__1::default_delete<kudu::tablet::Op> >, kudu::consensus::DriverType)::$_0>, void (kudu::Status const&)>::operator()(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libtablet.so+0x41667c)
    #13 std::__1::__function::__value_func<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libmaster.so+0x3ea2a6)
    #14 std::__1::function<void (kudu::Status const&)>::operator()(kudu::Status const&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libmaster.so+0x3e4af8)
    #15 kudu::consensus::ConsensusRound::NotifyReplicationFinished(kudu::Status const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:3315:3 (libconsensus.so+0x1aa280)
    #16 kudu::consensus::PendingRounds::AdvanceCommittedIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/pending_rounds.cc:195:12 (libconsensus.so+0x181963)
    #17 kudu::consensus::RaftConsensus::NotifyCommitIndex(long) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/raft_consensus.cc:914:3 (libconsensus.so+0x19b254)
    #18 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x138604)
    #19 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&>(fp)(std::__1::forward<kudu::consensus::PeerMessageQueueObserver*>(fp0))) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1385b3)
    #20 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)&, kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x138528)
    #21 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x1384e3)
    #22 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*), std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const::'lambda'(kudu::consensus::PeerMessageQueueObserver*)>, void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x1377cc)
    #23 std::__1::__function::__value_func<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*&&) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (libconsensus.so+0x161466)
    #24 std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)>::operator()(kudu::consensus::PeerMessageQueueObserver*) const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (libconsensus.so+0x14942b)
    #25 kudu::consensus::PeerMessageQueue::NotifyObserversTask(std::__1::function<void (kudu::consensus::PeerMessageQueueObserver*)> const&) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1564:5 (libconsensus.so+0x133a8d)
    #26 kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/consensus/consensus_queue.cc:1493:3 (libconsensus.so+0x137465)
    #27 decltype(std::__1::forward<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(fp)()) std::__1::__invoke<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libconsensus.so+0x1373f9)
    #28 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&>(kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libconsensus.so+0x137389)
    #29 std::__1::__function::__alloc_func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libconsensus.so+0x137351)
    #30 std::__1::__function::__func<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3, std::__1::allocator<kudu::consensus::PeerMessageQueue::NotifyObserversOfCommitIndexChange(long)::$_3>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libconsensus.so+0x13664d)
    #31 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #32 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #33 kudu::ThreadPool::DispatchThread() /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:733:7 (libkudu_util.so+0x45f714)
    #34 kudu::ThreadPool::CreateThread()::$_1::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/threadpool.cc:815:48 (libkudu_util.so+0x461a61)
    #35 decltype(std::__1::forward<kudu::ThreadPool::CreateThread()::$_1&>(fp)()) std::__1::__invoke<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/type_traits:3905:1 (libkudu_util.so+0x461a19)
    #36 void std::__1::__invoke_void_return_wrapper<void>::__call<kudu::ThreadPool::CreateThread()::$_1&>(kudu::ThreadPool::CreateThread()::$_1&) /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/__functional_base:348:9 (libkudu_util.so+0x4619a9)
    #37 std::__1::__function::__alloc_func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1557:16 (libkudu_util.so+0x461971)
    #38 std::__1::__function::__func<kudu::ThreadPool::CreateThread()::$_1, std::__1::allocator<kudu::ThreadPool::CreateThread()::$_1>, void ()>::operator()() /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1731:12 (libkudu_util.so+0x460c6d)
    #39 std::__1::__function::__value_func<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:1884:16 (client-test+0x6b33c4)
    #40 std::__1::function<void ()>::operator()() const /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/installed/tsan/include/c++/v1/functional:2556:12 (client-test+0x6b3209)
    #41 kudu::Thread::SuperviseThread(void*) /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/util/thread.cc:694:3 (libkudu_util.so+0x442a1a)

SUMMARY: ThreadSanitizer: data race /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/llvm-11.0.0.src/projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp:126 in operator delete(void*)
==================
I20250901 14:18:40.318013  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:40.328388  5023 master.cc:561] Master@127.4.231.254:38409 shutting down...
I20250901 14:18:40.344010  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:40.344527  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:40.344815  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 7460618b2360411ebca2aff387330c24: stopping tablet replica
I20250901 14:18:40.362334  5023 master.cc:583] Master@127.4.231.254:38409 shutdown complete.
[       OK ] ClientTest.TestCompactionOfSoftDeletedAndRecalledTable (1435 ms)
[ RUN      ] ClientTest.TestRandomWriteOperation
I20250901 14:18:40.384812  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:43385
I20250901 14:18:40.385895  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:40.390667  8813 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:40.390810  8814 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:40.392247  8816 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:40.393203  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:40.393945  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:40.394129  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:40.394273  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736320394257 us; error 0 us; skew 500 ppm
I20250901 14:18:40.394754  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:40.396910  5023 webserver.cc:480] Webserver started at http://127.4.231.254:33051/ using document root <none> and password file <none>
I20250901 14:18:40.397341  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:40.397521  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:40.397815  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:40.398804  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomWriteOperation.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "e9e304157fa14519bb3ac423dc5a0284"
format_stamp: "Formatted at 2025-09-01 14:18:40 on dist-test-slave-9gf0"
I20250901 14:18:40.403014  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:40.406168  8821 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:40.406944  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:40.407179  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomWriteOperation.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "e9e304157fa14519bb3ac423dc5a0284"
format_stamp: "Formatted at 2025-09-01 14:18:40 on dist-test-slave-9gf0"
I20250901 14:18:40.407426  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomWriteOperation.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomWriteOperation.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomWriteOperation.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:40.424809  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:40.425837  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:40.466135  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:43385
I20250901 14:18:40.466213  8882 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:43385 every 8 connection(s)
I20250901 14:18:40.469777  8883 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:40.479887  8883 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284: Bootstrap starting.
I20250901 14:18:40.483938  8883 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:40.487746  8883 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284: No bootstrap required, opened a new log
I20250901 14:18:40.489720  8883 raft_consensus.cc:357] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e9e304157fa14519bb3ac423dc5a0284" member_type: VOTER }
I20250901 14:18:40.490092  8883 raft_consensus.cc:383] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:40.490330  8883 raft_consensus.cc:738] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: e9e304157fa14519bb3ac423dc5a0284, State: Initialized, Role: FOLLOWER
I20250901 14:18:40.490932  8883 consensus_queue.cc:260] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e9e304157fa14519bb3ac423dc5a0284" member_type: VOTER }
I20250901 14:18:40.491374  8883 raft_consensus.cc:397] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:40.491591  8883 raft_consensus.cc:491] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:40.491863  8883 raft_consensus.cc:3058] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:40.496404  8883 raft_consensus.cc:513] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e9e304157fa14519bb3ac423dc5a0284" member_type: VOTER }
I20250901 14:18:40.496939  8883 leader_election.cc:304] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: e9e304157fa14519bb3ac423dc5a0284; no voters: 
I20250901 14:18:40.498102  8883 leader_election.cc:290] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:40.498399  8886 raft_consensus.cc:2802] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:40.499753  8886 raft_consensus.cc:695] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 1 LEADER]: Becoming Leader. State: Replica: e9e304157fa14519bb3ac423dc5a0284, State: Running, Role: LEADER
I20250901 14:18:40.500383  8886 consensus_queue.cc:237] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e9e304157fa14519bb3ac423dc5a0284" member_type: VOTER }
I20250901 14:18:40.500984  8883 sys_catalog.cc:564] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:40.505604  8888 sys_catalog.cc:455] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [sys.catalog]: SysCatalogTable state changed. Reason: New leader e9e304157fa14519bb3ac423dc5a0284. Latest consensus state: current_term: 1 leader_uuid: "e9e304157fa14519bb3ac423dc5a0284" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e9e304157fa14519bb3ac423dc5a0284" member_type: VOTER } }
I20250901 14:18:40.506573  8888 sys_catalog.cc:458] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:40.511029  8887 sys_catalog.cc:455] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "e9e304157fa14519bb3ac423dc5a0284" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "e9e304157fa14519bb3ac423dc5a0284" member_type: VOTER } }
I20250901 14:18:40.512387  8887 sys_catalog.cc:458] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:40.513104  8894 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:40.517558  8894 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:40.520877  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:40.525918  8894 catalog_manager.cc:1349] Generated new cluster ID: 8ac5e49137f34a0991e1e12a981f409e
I20250901 14:18:40.526225  8894 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:40.541419  8894 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:40.542685  8894 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:40.564021  8894 catalog_manager.cc:5955] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284: Generated new TSK 0
I20250901 14:18:40.564599  8894 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:40.587539  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:40.592846  8904 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:40.593981  8905 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:40.595321  8907 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:40.596145  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:40.596827  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:40.597002  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:40.597157  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736320597143 us; error 0 us; skew 500 ppm
I20250901 14:18:40.597645  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:40.599747  5023 webserver.cc:480] Webserver started at http://127.4.231.193:41403/ using document root <none> and password file <none>
I20250901 14:18:40.600164  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:40.600313  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:40.600524  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:40.601519  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomWriteOperation.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "1d18bafb099e478680f64718ea2d764e"
format_stamp: "Formatted at 2025-09-01 14:18:40 on dist-test-slave-9gf0"
I20250901 14:18:40.605741  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.000s
I20250901 14:18:40.608747  8912 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:40.609609  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:40.609867  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomWriteOperation.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "1d18bafb099e478680f64718ea2d764e"
format_stamp: "Formatted at 2025-09-01 14:18:40 on dist-test-slave-9gf0"
I20250901 14:18:40.610124  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomWriteOperation.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomWriteOperation.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestRandomWriteOperation.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:40.623546  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:40.624537  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:40.629113  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:40.629581  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.001s	sys 0.000s
I20250901 14:18:40.629981  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:40.630240  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:40.676545  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:42067
I20250901 14:18:40.676633  8982 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:42067 every 8 connection(s)
I20250901 14:18:40.681136  8983 heartbeater.cc:344] Connected to a master server at 127.4.231.254:43385
I20250901 14:18:40.681478  8983 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:40.682364  8983 heartbeater.cc:507] Master 127.4.231.254:43385 requested a full tablet report, sending...
I20250901 14:18:40.684057  8838 ts_manager.cc:194] Registered new tserver with Master: 1d18bafb099e478680f64718ea2d764e (127.4.231.193:42067)
I20250901 14:18:40.684470  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004780878s
I20250901 14:18:40.685665  8838 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:37630
I20250901 14:18:40.698083  8983 heartbeater.cc:499] Master 127.4.231.254:43385 was elected leader, sending a full tablet report...
I20250901 14:18:40.703686  8837 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:37660:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:40.734232  8948 tablet_service.cc:1468] Processing CreateTablet for tablet 28a6dbaad5cb4d0d83abdcb3fba6fa01 (DEFAULT_TABLE table=client-testtb [id=447561aa3a7d48f8bcc98b5e770c1808]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:40.734436  8947 tablet_service.cc:1468] Processing CreateTablet for tablet 713d90bced3a4f86a9762b35d6d3801f (DEFAULT_TABLE table=client-testtb [id=447561aa3a7d48f8bcc98b5e770c1808]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:40.735242  8948 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 28a6dbaad5cb4d0d83abdcb3fba6fa01. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:40.735723  8947 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 713d90bced3a4f86a9762b35d6d3801f. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:40.748427  8993 tablet_bootstrap.cc:492] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e: Bootstrap starting.
I20250901 14:18:40.752470  8993 tablet_bootstrap.cc:654] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:40.756636  8993 tablet_bootstrap.cc:492] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e: No bootstrap required, opened a new log
I20250901 14:18:40.757009  8993 ts_tablet_manager.cc:1397] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e: Time spent bootstrapping tablet: real 0.009s	user 0.007s	sys 0.000s
I20250901 14:18:40.758759  8993 raft_consensus.cc:357] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d18bafb099e478680f64718ea2d764e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 42067 } }
I20250901 14:18:40.759132  8993 raft_consensus.cc:383] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:40.759372  8993 raft_consensus.cc:738] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 1d18bafb099e478680f64718ea2d764e, State: Initialized, Role: FOLLOWER
I20250901 14:18:40.759907  8993 consensus_queue.cc:260] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d18bafb099e478680f64718ea2d764e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 42067 } }
I20250901 14:18:40.760510  8993 raft_consensus.cc:397] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:40.760813  8993 raft_consensus.cc:491] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:40.761133  8993 raft_consensus.cc:3058] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:40.765715  8993 raft_consensus.cc:513] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d18bafb099e478680f64718ea2d764e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 42067 } }
I20250901 14:18:40.766261  8993 leader_election.cc:304] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 1d18bafb099e478680f64718ea2d764e; no voters: 
I20250901 14:18:40.767696  8993 leader_election.cc:290] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:40.768011  8995 raft_consensus.cc:2802] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:40.770355  8995 raft_consensus.cc:695] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 1 LEADER]: Becoming Leader. State: Replica: 1d18bafb099e478680f64718ea2d764e, State: Running, Role: LEADER
I20250901 14:18:40.770536  8993 ts_tablet_manager.cc:1428] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e: Time spent starting tablet: real 0.013s	user 0.006s	sys 0.008s
I20250901 14:18:40.771422  8993 tablet_bootstrap.cc:492] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e: Bootstrap starting.
I20250901 14:18:40.771104  8995 consensus_queue.cc:237] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d18bafb099e478680f64718ea2d764e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 42067 } }
I20250901 14:18:40.776755  8993 tablet_bootstrap.cc:654] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:40.777333  8838 catalog_manager.cc:5582] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e reported cstate change: term changed from 0 to 1, leader changed from <none> to 1d18bafb099e478680f64718ea2d764e (127.4.231.193). New cstate: current_term: 1 leader_uuid: "1d18bafb099e478680f64718ea2d764e" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d18bafb099e478680f64718ea2d764e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 42067 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:40.781113  8993 tablet_bootstrap.cc:492] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e: No bootstrap required, opened a new log
I20250901 14:18:40.781579  8993 ts_tablet_manager.cc:1397] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e: Time spent bootstrapping tablet: real 0.010s	user 0.007s	sys 0.003s
I20250901 14:18:40.783849  8993 raft_consensus.cc:357] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d18bafb099e478680f64718ea2d764e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 42067 } }
I20250901 14:18:40.784327  8993 raft_consensus.cc:383] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:40.784628  8993 raft_consensus.cc:738] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 1d18bafb099e478680f64718ea2d764e, State: Initialized, Role: FOLLOWER
I20250901 14:18:40.785233  8993 consensus_queue.cc:260] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d18bafb099e478680f64718ea2d764e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 42067 } }
I20250901 14:18:40.785809  8993 raft_consensus.cc:397] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:40.786023  8993 raft_consensus.cc:491] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:40.786267  8993 raft_consensus.cc:3058] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:40.792326  8993 raft_consensus.cc:513] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d18bafb099e478680f64718ea2d764e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 42067 } }
I20250901 14:18:40.792876  8993 leader_election.cc:304] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 1d18bafb099e478680f64718ea2d764e; no voters: 
I20250901 14:18:40.793376  8993 leader_election.cc:290] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:40.793552  8995 raft_consensus.cc:2802] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:40.794062  8995 raft_consensus.cc:695] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 1 LEADER]: Becoming Leader. State: Replica: 1d18bafb099e478680f64718ea2d764e, State: Running, Role: LEADER
I20250901 14:18:40.794694  8995 consensus_queue.cc:237] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d18bafb099e478680f64718ea2d764e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 42067 } }
I20250901 14:18:40.794863  8993 ts_tablet_manager.cc:1428] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e: Time spent starting tablet: real 0.013s	user 0.008s	sys 0.003s
I20250901 14:18:40.800118  8838 catalog_manager.cc:5582] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e reported cstate change: term changed from 0 to 1, leader changed from <none> to 1d18bafb099e478680f64718ea2d764e (127.4.231.193). New cstate: current_term: 1 leader_uuid: "1d18bafb099e478680f64718ea2d764e" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d18bafb099e478680f64718ea2d764e" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 42067 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:40.913196  5023 client-test.cc:5979] Randomized mutations testing.
I20250901 14:18:40.913403  5023 test_util.cc:276] Using random seed: 207568876
I20250901 14:18:40.913600  5023 client-test.cc:5984] Correctness test 0
I20250901 14:18:41.133719  5023 client-test.cc:5987] ...complete
I20250901 14:18:41.146958  5023 client-test.cc:5984] Correctness test 50
I20250901 14:18:41.220741  5023 client-test.cc:5987] ...complete
I20250901 14:18:41.230723  5023 client-test.cc:5984] Correctness test 100
I20250901 14:18:41.347337  5023 client-test.cc:5987] ...complete
I20250901 14:18:41.356640  5023 client-test.cc:5984] Correctness test 150
I20250901 14:18:41.440232  5023 client-test.cc:5987] ...complete
I20250901 14:18:41.450448  5023 client-test.cc:5984] Correctness test 200
I20250901 14:18:41.541978  5023 client-test.cc:5987] ...complete
I20250901 14:18:41.551630  5023 client-test.cc:5984] Correctness test 250
I20250901 14:18:41.633024  5023 client-test.cc:5987] ...complete
I20250901 14:18:41.643360  5023 client-test.cc:5984] Correctness test 300
I20250901 14:18:41.716375  5023 client-test.cc:5987] ...complete
I20250901 14:18:41.726686  5023 client-test.cc:5984] Correctness test 350
I20250901 14:18:41.801504  5023 client-test.cc:5987] ...complete
I20250901 14:18:41.812137  5023 client-test.cc:5984] Correctness test 400
I20250901 14:18:41.889910  5023 client-test.cc:5987] ...complete
I20250901 14:18:41.899711  5023 client-test.cc:5984] Correctness test 450
I20250901 14:18:42.002959  5023 client-test.cc:5987] ...complete
I20250901 14:18:42.013152  5023 client-test.cc:5984] Correctness test 500
I20250901 14:18:42.092787  5023 client-test.cc:5987] ...complete
I20250901 14:18:42.102098  5023 client-test.cc:5984] Correctness test 550
I20250901 14:18:42.187911  5023 client-test.cc:5987] ...complete
I20250901 14:18:42.197067  5023 client-test.cc:5984] Correctness test 600
I20250901 14:18:42.283179  5023 client-test.cc:5987] ...complete
I20250901 14:18:42.292649  5023 client-test.cc:5984] Correctness test 650
I20250901 14:18:42.382114  5023 client-test.cc:5987] ...complete
I20250901 14:18:42.392345  5023 client-test.cc:5984] Correctness test 700
I20250901 14:18:42.480985  5023 client-test.cc:5987] ...complete
I20250901 14:18:42.491312  5023 client-test.cc:5984] Correctness test 750
I20250901 14:18:42.580948  5023 client-test.cc:5987] ...complete
I20250901 14:18:42.591845  5023 client-test.cc:5984] Correctness test 800
I20250901 14:18:42.681185  5023 client-test.cc:5987] ...complete
I20250901 14:18:42.690827  5023 client-test.cc:5984] Correctness test 850
I20250901 14:18:42.781194  5023 client-test.cc:5987] ...complete
I20250901 14:18:42.790831  5023 client-test.cc:5984] Correctness test 900
I20250901 14:18:42.928470  5023 client-test.cc:5987] ...complete
I20250901 14:18:42.938261  5023 client-test.cc:5984] Correctness test 950
I20250901 14:18:43.036615  5023 client-test.cc:5987] ...complete
I20250901 14:18:43.047493  5023 client-test.cc:5984] Correctness test 1000
I20250901 14:18:43.142740  5023 client-test.cc:5987] ...complete
I20250901 14:18:43.231279  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:43.259902  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:43.260679  5023 tablet_replica.cc:331] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e: stopping tablet replica
I20250901 14:18:43.261471  5023 raft_consensus.cc:2241] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:43.262187  5023 raft_consensus.cc:2270] T 713d90bced3a4f86a9762b35d6d3801f P 1d18bafb099e478680f64718ea2d764e [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:43.266197  5023 tablet_replica.cc:331] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e: stopping tablet replica
I20250901 14:18:43.266742  5023 raft_consensus.cc:2241] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:43.267227  5023 raft_consensus.cc:2270] T 28a6dbaad5cb4d0d83abdcb3fba6fa01 P 1d18bafb099e478680f64718ea2d764e [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:43.281802  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:43.298321  5023 master.cc:561] Master@127.4.231.254:43385 shutting down...
I20250901 14:18:43.322758  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:43.323345  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:43.323662  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P e9e304157fa14519bb3ac423dc5a0284: stopping tablet replica
I20250901 14:18:43.342947  5023 master.cc:583] Master@127.4.231.254:43385 shutdown complete.
[       OK ] ClientTest.TestRandomWriteOperation (2992 ms)
[ RUN      ] ClientTest.TestCreateTable_TableNames
I20250901 14:18:43.381734  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:34027
I20250901 14:18:43.382791  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:43.387924  9007 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:43.387949  9008 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:43.391383  9010 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:43.393013  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:43.393942  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:43.394126  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:43.394241  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736323394231 us; error 0 us; skew 500 ppm
I20250901 14:18:43.394675  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:43.396971  5023 webserver.cc:480] Webserver started at http://127.4.231.254:33379/ using document root <none> and password file <none>
I20250901 14:18:43.397385  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:43.397583  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:43.397809  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:43.398849  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCreateTable_TableNames.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "7812177517fc420899d8a0167b6e728c"
format_stamp: "Formatted at 2025-09-01 14:18:43 on dist-test-slave-9gf0"
I20250901 14:18:43.403275  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.001s	sys 0.004s
I20250901 14:18:43.406782  9015 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:43.407506  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:43.407804  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCreateTable_TableNames.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "7812177517fc420899d8a0167b6e728c"
format_stamp: "Formatted at 2025-09-01 14:18:43 on dist-test-slave-9gf0"
I20250901 14:18:43.408071  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCreateTable_TableNames.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCreateTable_TableNames.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCreateTable_TableNames.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:43.419206  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:43.420398  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:43.464527  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:34027
I20250901 14:18:43.464639  9076 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:34027 every 8 connection(s)
I20250901 14:18:43.468385  9077 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:43.479786  9077 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c: Bootstrap starting.
I20250901 14:18:43.484457  9077 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:43.489095  9077 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c: No bootstrap required, opened a new log
I20250901 14:18:43.491262  9077 raft_consensus.cc:357] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7812177517fc420899d8a0167b6e728c" member_type: VOTER }
I20250901 14:18:43.491858  9077 raft_consensus.cc:383] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:43.492134  9077 raft_consensus.cc:738] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 7812177517fc420899d8a0167b6e728c, State: Initialized, Role: FOLLOWER
I20250901 14:18:43.492766  9077 consensus_queue.cc:260] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7812177517fc420899d8a0167b6e728c" member_type: VOTER }
I20250901 14:18:43.493270  9077 raft_consensus.cc:397] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:43.493505  9077 raft_consensus.cc:491] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:43.493798  9077 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:43.498855  9077 raft_consensus.cc:513] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7812177517fc420899d8a0167b6e728c" member_type: VOTER }
I20250901 14:18:43.499420  9077 leader_election.cc:304] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 7812177517fc420899d8a0167b6e728c; no voters: 
I20250901 14:18:43.500538  9077 leader_election.cc:290] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:43.500860  9080 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:43.502139  9080 raft_consensus.cc:695] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 1 LEADER]: Becoming Leader. State: Replica: 7812177517fc420899d8a0167b6e728c, State: Running, Role: LEADER
I20250901 14:18:43.502806  9080 consensus_queue.cc:237] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7812177517fc420899d8a0167b6e728c" member_type: VOTER }
I20250901 14:18:43.503683  9077 sys_catalog.cc:564] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:43.505813  9082 sys_catalog.cc:455] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [sys.catalog]: SysCatalogTable state changed. Reason: New leader 7812177517fc420899d8a0167b6e728c. Latest consensus state: current_term: 1 leader_uuid: "7812177517fc420899d8a0167b6e728c" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7812177517fc420899d8a0167b6e728c" member_type: VOTER } }
I20250901 14:18:43.505887  9081 sys_catalog.cc:455] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "7812177517fc420899d8a0167b6e728c" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7812177517fc420899d8a0167b6e728c" member_type: VOTER } }
I20250901 14:18:43.506644  9082 sys_catalog.cc:458] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:43.507500  9081 sys_catalog.cc:458] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:43.509766  9087 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:43.514369  9087 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:43.519666  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:43.522727  9087 catalog_manager.cc:1349] Generated new cluster ID: e3bdd90bcf6a4217afde87d3c86a4686
I20250901 14:18:43.522981  9087 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:43.545600  9087 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:43.546852  9087 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:43.561619  9087 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c: Generated new TSK 0
I20250901 14:18:43.562204  9087 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:43.586298  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:43.592967  9099 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:43.594112  9100 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:43.597856  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:43.599277  9102 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:43.600149  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:43.600363  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:43.600524  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736323600505 us; error 0 us; skew 500 ppm
I20250901 14:18:43.601042  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:43.603330  5023 webserver.cc:480] Webserver started at http://127.4.231.193:44863/ using document root <none> and password file <none>
I20250901 14:18:43.603809  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:43.603991  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:43.604238  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:43.605398  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCreateTable_TableNames.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "083ad2dc8ee4472d81eb10f73de36cb6"
format_stamp: "Formatted at 2025-09-01 14:18:43 on dist-test-slave-9gf0"
I20250901 14:18:43.609856  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.000s
I20250901 14:18:43.613164  9107 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:43.613991  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:43.614284  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCreateTable_TableNames.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "083ad2dc8ee4472d81eb10f73de36cb6"
format_stamp: "Formatted at 2025-09-01 14:18:43 on dist-test-slave-9gf0"
I20250901 14:18:43.614591  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCreateTable_TableNames.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCreateTable_TableNames.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestCreateTable_TableNames.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:43.639585  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:43.640765  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:43.645370  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:43.645880  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.001s	user 0.001s	sys 0.000s
I20250901 14:18:43.646349  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:43.646703  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:43.698872  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:44239
I20250901 14:18:43.698951  9177 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:44239 every 8 connection(s)
I20250901 14:18:43.703644  9178 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34027
I20250901 14:18:43.704016  9178 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:43.704708  9178 heartbeater.cc:507] Master 127.4.231.254:34027 requested a full tablet report, sending...
I20250901 14:18:43.706511  9032 ts_manager.cc:194] Registered new tserver with Master: 083ad2dc8ee4472d81eb10f73de36cb6 (127.4.231.193:44239)
I20250901 14:18:43.707166  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.00483605s
I20250901 14:18:43.708235  9032 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:45478
I20250901 14:18:43.720903  9178 heartbeater.cc:499] Master 127.4.231.254:34027 was elected leader, sending a full tablet report...
I20250901 14:18:43.728578  9032 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:45490:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:43.760488  9143 tablet_service.cc:1468] Processing CreateTablet for tablet 974ae42cf1fc4c6fad92838c93c73a10 (DEFAULT_TABLE table=client-testtb [id=e462eed336bc43de9d7078566a36ee64]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:43.760878  9142 tablet_service.cc:1468] Processing CreateTablet for tablet 8ab899cf6dbb4c2ab9033c0a841738a7 (DEFAULT_TABLE table=client-testtb [id=e462eed336bc43de9d7078566a36ee64]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:43.762060  9143 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 974ae42cf1fc4c6fad92838c93c73a10. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:43.762840  9142 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 8ab899cf6dbb4c2ab9033c0a841738a7. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:43.780082  9188 tablet_bootstrap.cc:492] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6: Bootstrap starting.
I20250901 14:18:43.784631  9188 tablet_bootstrap.cc:654] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:43.789103  9188 tablet_bootstrap.cc:492] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6: No bootstrap required, opened a new log
I20250901 14:18:43.789505  9188 ts_tablet_manager.cc:1397] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6: Time spent bootstrapping tablet: real 0.010s	user 0.000s	sys 0.008s
I20250901 14:18:43.791936  9188 raft_consensus.cc:357] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44239 } }
I20250901 14:18:43.792415  9188 raft_consensus.cc:383] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:43.792639  9188 raft_consensus.cc:738] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 083ad2dc8ee4472d81eb10f73de36cb6, State: Initialized, Role: FOLLOWER
I20250901 14:18:43.793169  9188 consensus_queue.cc:260] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44239 } }
I20250901 14:18:43.793771  9188 raft_consensus.cc:397] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:43.793989  9188 raft_consensus.cc:491] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:43.794239  9188 raft_consensus.cc:3058] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:43.799250  9188 raft_consensus.cc:513] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44239 } }
I20250901 14:18:43.799811  9188 leader_election.cc:304] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 083ad2dc8ee4472d81eb10f73de36cb6; no voters: 
I20250901 14:18:43.801031  9188 leader_election.cc:290] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:43.801412  9190 raft_consensus.cc:2802] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:43.803290  9188 ts_tablet_manager.cc:1428] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6: Time spent starting tablet: real 0.014s	user 0.010s	sys 0.002s
I20250901 14:18:43.803540  9190 raft_consensus.cc:695] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 1 LEADER]: Becoming Leader. State: Replica: 083ad2dc8ee4472d81eb10f73de36cb6, State: Running, Role: LEADER
I20250901 14:18:43.804059  9188 tablet_bootstrap.cc:492] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6: Bootstrap starting.
I20250901 14:18:43.804194  9190 consensus_queue.cc:237] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44239 } }
I20250901 14:18:43.810178  9188 tablet_bootstrap.cc:654] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:43.811757  9031 catalog_manager.cc:5582] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 reported cstate change: term changed from 0 to 1, leader changed from <none> to 083ad2dc8ee4472d81eb10f73de36cb6 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44239 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:43.815222  9188 tablet_bootstrap.cc:492] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6: No bootstrap required, opened a new log
I20250901 14:18:43.815608  9188 ts_tablet_manager.cc:1397] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6: Time spent bootstrapping tablet: real 0.012s	user 0.008s	sys 0.001s
I20250901 14:18:43.818173  9188 raft_consensus.cc:357] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44239 } }
I20250901 14:18:43.818707  9188 raft_consensus.cc:383] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:43.819041  9188 raft_consensus.cc:738] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 083ad2dc8ee4472d81eb10f73de36cb6, State: Initialized, Role: FOLLOWER
I20250901 14:18:43.819636  9188 consensus_queue.cc:260] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44239 } }
I20250901 14:18:43.820225  9188 raft_consensus.cc:397] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:43.820504  9188 raft_consensus.cc:491] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:43.820835  9188 raft_consensus.cc:3058] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:43.826694  9188 raft_consensus.cc:513] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44239 } }
I20250901 14:18:43.827174  9188 leader_election.cc:304] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 083ad2dc8ee4472d81eb10f73de36cb6; no voters: 
I20250901 14:18:43.827569  9188 leader_election.cc:290] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:43.827739  9190 raft_consensus.cc:2802] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:43.828228  9190 raft_consensus.cc:695] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 1 LEADER]: Becoming Leader. State: Replica: 083ad2dc8ee4472d81eb10f73de36cb6, State: Running, Role: LEADER
I20250901 14:18:43.828959  9188 ts_tablet_manager.cc:1428] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6: Time spent starting tablet: real 0.013s	user 0.005s	sys 0.006s
I20250901 14:18:43.828898  9190 consensus_queue.cc:237] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44239 } }
I20250901 14:18:43.833873  9031 catalog_manager.cc:5582] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 reported cstate change: term changed from 0 to 1, leader changed from <none> to 083ad2dc8ee4472d81eb10f73de36cb6 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "083ad2dc8ee4472d81eb10f73de36cb6" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44239 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:43.848064  9032 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:45490:
name: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
split_rows_range_bounds {
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:43.851855  9032 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:45490:
name: "foo\000bar"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
split_rows_range_bounds {
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
[libprotobuf ERROR /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/protobuf-3.21.9/src/google/protobuf/wire_format_lite.cc:618] String field 'kudu.master.CreateTableRequestPB.name' contains invalid UTF-8 data when serializing a protocol buffer. Use the 'bytes' type if you intend to send raw bytes. 
[libprotobuf ERROR /home/jenkins-slave/workspace/build_and_test_flaky/thirdparty/src/protobuf-3.21.9/src/google/protobuf/wire_format_lite.cc:618] String field 'kudu.master.CreateTableRequestPB.name' contains invalid UTF-8 data when parsing a protocol buffer. Use the 'bytes' type if you intend to send raw bytes. 
I20250901 14:18:43.855540  9032 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:45490:
name: "foo\360(\214\274"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
split_rows_range_bounds {
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:43.859289  9031 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:45490:
name: "\344\275\240\345\245\275"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
split_rows_range_bounds {
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:43.862895  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:43.882048  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:43.882761  5023 tablet_replica.cc:331] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6: stopping tablet replica
I20250901 14:18:43.883416  5023 raft_consensus.cc:2241] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:43.883839  5023 raft_consensus.cc:2270] T 8ab899cf6dbb4c2ab9033c0a841738a7 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:43.885970  5023 tablet_replica.cc:331] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6: stopping tablet replica
I20250901 14:18:43.886385  5023 raft_consensus.cc:2241] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:43.886763  5023 raft_consensus.cc:2270] T 974ae42cf1fc4c6fad92838c93c73a10 P 083ad2dc8ee4472d81eb10f73de36cb6 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:43.906957  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:43.917795  5023 master.cc:561] Master@127.4.231.254:34027 shutting down...
I20250901 14:18:43.934538  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:43.935020  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:43.935416  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 7812177517fc420899d8a0167b6e728c: stopping tablet replica
I20250901 14:18:43.945037  5023 master.cc:583] Master@127.4.231.254:34027 shutdown complete.
[       OK ] ClientTest.TestCreateTable_TableNames (585 ms)
[ RUN      ] ClientTest.TestAlterTableWithValidComment
I20250901 14:18:43.967197  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:35569
I20250901 14:18:43.968231  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:43.973124  9197 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:43.973341  9198 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:43.974871  9200 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:43.975643  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:43.976579  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:43.976915  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:43.977087  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736323977066 us; error 0 us; skew 500 ppm
I20250901 14:18:43.977687  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:43.979811  5023 webserver.cc:480] Webserver started at http://127.4.231.254:40135/ using document root <none> and password file <none>
I20250901 14:18:43.980242  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:43.980429  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:43.980672  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:43.981760  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAlterTableWithValidComment.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "a067862c30c1425dafcfd0250bcc2981"
format_stamp: "Formatted at 2025-09-01 14:18:43 on dist-test-slave-9gf0"
I20250901 14:18:43.985877  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.000s
I20250901 14:18:43.989151  9205 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:43.989892  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:43.990149  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAlterTableWithValidComment.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "a067862c30c1425dafcfd0250bcc2981"
format_stamp: "Formatted at 2025-09-01 14:18:43 on dist-test-slave-9gf0"
I20250901 14:18:43.990396  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAlterTableWithValidComment.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAlterTableWithValidComment.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAlterTableWithValidComment.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:44.044103  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:44.045194  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:44.085469  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:35569
I20250901 14:18:44.085590  9266 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:35569 every 8 connection(s)
I20250901 14:18:44.089231  9267 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:44.100162  9267 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: Bootstrap starting.
I20250901 14:18:44.104324  9267 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:44.108175  9267 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: No bootstrap required, opened a new log
I20250901 14:18:44.110119  9267 raft_consensus.cc:357] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a067862c30c1425dafcfd0250bcc2981" member_type: VOTER }
I20250901 14:18:44.110498  9267 raft_consensus.cc:383] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:44.110670  9267 raft_consensus.cc:738] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: a067862c30c1425dafcfd0250bcc2981, State: Initialized, Role: FOLLOWER
I20250901 14:18:44.111188  9267 consensus_queue.cc:260] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a067862c30c1425dafcfd0250bcc2981" member_type: VOTER }
I20250901 14:18:44.111627  9267 raft_consensus.cc:397] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:44.111821  9267 raft_consensus.cc:491] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:44.112025  9267 raft_consensus.cc:3058] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:44.116513  9267 raft_consensus.cc:513] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a067862c30c1425dafcfd0250bcc2981" member_type: VOTER }
I20250901 14:18:44.116991  9267 leader_election.cc:304] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: a067862c30c1425dafcfd0250bcc2981; no voters: 
I20250901 14:18:44.118119  9267 leader_election.cc:290] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:44.118428  9270 raft_consensus.cc:2802] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:44.119839  9270 raft_consensus.cc:695] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 1 LEADER]: Becoming Leader. State: Replica: a067862c30c1425dafcfd0250bcc2981, State: Running, Role: LEADER
I20250901 14:18:44.120519  9270 consensus_queue.cc:237] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a067862c30c1425dafcfd0250bcc2981" member_type: VOTER }
I20250901 14:18:44.121104  9267 sys_catalog.cc:564] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:44.123230  9272 sys_catalog.cc:455] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [sys.catalog]: SysCatalogTable state changed. Reason: New leader a067862c30c1425dafcfd0250bcc2981. Latest consensus state: current_term: 1 leader_uuid: "a067862c30c1425dafcfd0250bcc2981" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a067862c30c1425dafcfd0250bcc2981" member_type: VOTER } }
I20250901 14:18:44.123203  9271 sys_catalog.cc:455] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "a067862c30c1425dafcfd0250bcc2981" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a067862c30c1425dafcfd0250bcc2981" member_type: VOTER } }
I20250901 14:18:44.123867  9272 sys_catalog.cc:458] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:44.124056  9271 sys_catalog.cc:458] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:44.135094  9275 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:44.142360  9275 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:44.143388  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:44.151266  9275 catalog_manager.cc:1349] Generated new cluster ID: 89e59bb553d24607b54528b461e84292
I20250901 14:18:44.151517  9275 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:44.169152  9275 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:44.170490  9275 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:44.185413  9275 catalog_manager.cc:5955] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: Generated new TSK 0
I20250901 14:18:44.185994  9275 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:44.210511  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:44.216055  9288 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:44.217350  9289 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:44.218416  9291 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:44.219337  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:44.220098  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:44.220285  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:44.220438  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736324220421 us; error 0 us; skew 500 ppm
I20250901 14:18:44.220952  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:44.226802  5023 webserver.cc:480] Webserver started at http://127.4.231.193:43581/ using document root <none> and password file <none>
I20250901 14:18:44.227468  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:44.227741  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:44.228129  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:44.229805  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAlterTableWithValidComment.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "8ae10c0b018145c39df233bc5bfe8773"
format_stamp: "Formatted at 2025-09-01 14:18:44 on dist-test-slave-9gf0"
I20250901 14:18:44.234411  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.001s	sys 0.003s
I20250901 14:18:44.237622  9296 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:44.238337  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:44.238632  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAlterTableWithValidComment.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "8ae10c0b018145c39df233bc5bfe8773"
format_stamp: "Formatted at 2025-09-01 14:18:44 on dist-test-slave-9gf0"
I20250901 14:18:44.238932  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAlterTableWithValidComment.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAlterTableWithValidComment.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestAlterTableWithValidComment.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:44.250703  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:44.251766  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:44.257666  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:44.257956  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:44.258191  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:44.258340  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:44.307106  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:43091
I20250901 14:18:44.307221  9367 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:43091 every 8 connection(s)
I20250901 14:18:44.311707  9368 heartbeater.cc:344] Connected to a master server at 127.4.231.254:35569
I20250901 14:18:44.312073  9368 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:44.312772  9368 heartbeater.cc:507] Master 127.4.231.254:35569 requested a full tablet report, sending...
I20250901 14:18:44.314711  9222 ts_manager.cc:194] Registered new tserver with Master: 8ae10c0b018145c39df233bc5bfe8773 (127.4.231.193:43091)
I20250901 14:18:44.315115  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004843583s
I20250901 14:18:44.316439  9222 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:51090
I20250901 14:18:44.328894  9368 heartbeater.cc:499] Master 127.4.231.254:35569 was elected leader, sending a full tablet report...
I20250901 14:18:44.336663  9221 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:51120:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:44.368527  9332 tablet_service.cc:1468] Processing CreateTablet for tablet ed8ed106e29846d98cac59d11539c044 (DEFAULT_TABLE table=client-testtb [id=badfbcf75a024799aaa51e9ee0f25e3f]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:44.368711  9331 tablet_service.cc:1468] Processing CreateTablet for tablet 84c376393131454d913713686fdfb348 (DEFAULT_TABLE table=client-testtb [id=badfbcf75a024799aaa51e9ee0f25e3f]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:44.369746  9332 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet ed8ed106e29846d98cac59d11539c044. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:44.370340  9331 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 84c376393131454d913713686fdfb348. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:44.383548  9378 tablet_bootstrap.cc:492] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773: Bootstrap starting.
I20250901 14:18:44.388144  9378 tablet_bootstrap.cc:654] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:44.392019  9378 tablet_bootstrap.cc:492] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773: No bootstrap required, opened a new log
I20250901 14:18:44.392378  9378 ts_tablet_manager.cc:1397] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773: Time spent bootstrapping tablet: real 0.009s	user 0.007s	sys 0.000s
I20250901 14:18:44.394356  9378 raft_consensus.cc:357] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.394793  9378 raft_consensus.cc:383] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:44.395010  9378 raft_consensus.cc:738] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8ae10c0b018145c39df233bc5bfe8773, State: Initialized, Role: FOLLOWER
I20250901 14:18:44.395557  9378 consensus_queue.cc:260] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.396054  9378 raft_consensus.cc:397] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:44.396274  9378 raft_consensus.cc:491] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:44.396524  9378 raft_consensus.cc:3058] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:44.401388  9378 raft_consensus.cc:513] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.401955  9378 leader_election.cc:304] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 8ae10c0b018145c39df233bc5bfe8773; no voters: 
I20250901 14:18:44.403026  9378 leader_election.cc:290] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:44.403333  9380 raft_consensus.cc:2802] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:44.405158  9380 raft_consensus.cc:695] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 LEADER]: Becoming Leader. State: Replica: 8ae10c0b018145c39df233bc5bfe8773, State: Running, Role: LEADER
I20250901 14:18:44.405498  9378 ts_tablet_manager.cc:1428] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773: Time spent starting tablet: real 0.013s	user 0.013s	sys 0.000s
I20250901 14:18:44.405972  9380 consensus_queue.cc:237] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.406415  9378 tablet_bootstrap.cc:492] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773: Bootstrap starting.
I20250901 14:18:44.412371  9378 tablet_bootstrap.cc:654] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:44.412505  9221 catalog_manager.cc:5582] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 reported cstate change: term changed from 0 to 1, leader changed from <none> to 8ae10c0b018145c39df233bc5bfe8773 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "8ae10c0b018145c39df233bc5bfe8773" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:44.416950  9378 tablet_bootstrap.cc:492] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773: No bootstrap required, opened a new log
I20250901 14:18:44.417348  9378 ts_tablet_manager.cc:1397] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773: Time spent bootstrapping tablet: real 0.011s	user 0.008s	sys 0.000s
I20250901 14:18:44.419700  9378 raft_consensus.cc:357] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.420297  9378 raft_consensus.cc:383] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:44.420572  9378 raft_consensus.cc:738] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8ae10c0b018145c39df233bc5bfe8773, State: Initialized, Role: FOLLOWER
I20250901 14:18:44.421151  9378 consensus_queue.cc:260] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.421787  9378 raft_consensus.cc:397] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:44.422092  9378 raft_consensus.cc:491] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:44.422435  9378 raft_consensus.cc:3058] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:44.428669  9378 raft_consensus.cc:513] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.429203  9378 leader_election.cc:304] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 8ae10c0b018145c39df233bc5bfe8773; no voters: 
I20250901 14:18:44.429679  9378 leader_election.cc:290] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:44.429858  9380 raft_consensus.cc:2802] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:44.430366  9380 raft_consensus.cc:695] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 LEADER]: Becoming Leader. State: Replica: 8ae10c0b018145c39df233bc5bfe8773, State: Running, Role: LEADER
I20250901 14:18:44.431092  9380 consensus_queue.cc:237] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.431471  9378 ts_tablet_manager.cc:1428] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773: Time spent starting tablet: real 0.014s	user 0.012s	sys 0.000s
I20250901 14:18:44.436373  9221 catalog_manager.cc:5582] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 reported cstate change: term changed from 0 to 1, leader changed from <none> to 8ae10c0b018145c39df233bc5bfe8773 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "8ae10c0b018145c39df233bc5bfe8773" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:44.454281  9221 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:51120:
name: "table_comment"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "val"
    type: INT32
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:44.472442  9332 tablet_service.cc:1468] Processing CreateTablet for tablet e1dd017be3ca4def852a5b96961154e1 (DEFAULT_TABLE table=table_comment [id=58e68ace2c104271bbc0ab79cd664e37]), partition=RANGE (key) PARTITION UNBOUNDED
I20250901 14:18:44.473444  9332 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet e1dd017be3ca4def852a5b96961154e1. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:44.484486  9378 tablet_bootstrap.cc:492] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Bootstrap starting.
I20250901 14:18:44.490140  9378 tablet_bootstrap.cc:654] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:44.494201  9378 tablet_bootstrap.cc:492] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: No bootstrap required, opened a new log
I20250901 14:18:44.494553  9378 ts_tablet_manager.cc:1397] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Time spent bootstrapping tablet: real 0.010s	user 0.009s	sys 0.000s
I20250901 14:18:44.496389  9378 raft_consensus.cc:357] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.496798  9378 raft_consensus.cc:383] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:44.496977  9378 raft_consensus.cc:738] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8ae10c0b018145c39df233bc5bfe8773, State: Initialized, Role: FOLLOWER
I20250901 14:18:44.497469  9378 consensus_queue.cc:260] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.497987  9378 raft_consensus.cc:397] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:44.498183  9378 raft_consensus.cc:491] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:44.498390  9378 raft_consensus.cc:3058] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:44.503820  9378 raft_consensus.cc:513] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.504341  9378 leader_election.cc:304] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 8ae10c0b018145c39df233bc5bfe8773; no voters: 
I20250901 14:18:44.504817  9378 leader_election.cc:290] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:44.504966  9380 raft_consensus.cc:2802] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:44.505475  9380 raft_consensus.cc:695] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 LEADER]: Becoming Leader. State: Replica: 8ae10c0b018145c39df233bc5bfe8773, State: Running, Role: LEADER
I20250901 14:18:44.506278  9380 consensus_queue.cc:237] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } }
I20250901 14:18:44.506776  9378 ts_tablet_manager.cc:1428] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Time spent starting tablet: real 0.012s	user 0.012s	sys 0.000s
I20250901 14:18:44.512594  9221 catalog_manager.cc:5582] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 reported cstate change: term changed from 0 to 1, leader changed from <none> to 8ae10c0b018145c39df233bc5bfe8773 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "8ae10c0b018145c39df233bc5bfe8773" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8ae10c0b018145c39df233bc5bfe8773" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43091 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:44.522817  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:44.537290  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 0 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 1
I20250901 14:18:44.556742  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 1)
I20250901 14:18:44.562240  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:44.574779  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 1 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 2
I20250901 14:18:44.592386  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 2)
I20250901 14:18:44.599491  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxx"
I20250901 14:18:44.611779  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 2 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 3
I20250901 14:18:44.613178  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:44.631631  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 3)
I20250901 14:18:44.637348  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:44.649612  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 3 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 4
I20250901 14:18:44.651011  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:44.670504  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 4)
I20250901 14:18:44.680212  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:44.692831  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 4 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 5
I20250901 14:18:44.694305  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:44.711460  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 5)
I20250901 14:18:44.718039  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:44.730103  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 5 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 6
I20250901 14:18:44.731518  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:44.750306  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 6)
I20250901 14:18:44.761628  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:44.774091  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 6 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 7
I20250901 14:18:44.775650  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:44.792901  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 7)
I20250901 14:18:44.798753  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:44.810947  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 7 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 8
I20250901 14:18:44.812469  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:44.832336  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 8)
I20250901 14:18:44.842474  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:44.854315  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 8 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 9
I20250901 14:18:44.855719  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:44.873366  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 9)
I20250901 14:18:44.884156  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:44.895848  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 9 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 10
I20250901 14:18:44.897104  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:44.913775  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 10)
I20250901 14:18:44.920748  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:44.932471  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 10 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 11
I20250901 14:18:44.933804  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:44.951567  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 11)
I20250901 14:18:44.957505  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:44.969074  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 11 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 12
I20250901 14:18:44.970548  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:44.988565  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 12)
I20250901 14:18:44.998935  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.011039  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 12 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 13
I20250901 14:18:45.012259  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.028605  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 13)
I20250901 14:18:45.035123  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.046769  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 13 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 14
I20250901 14:18:45.048018  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.063412  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 14)
I20250901 14:18:45.070643  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.081598  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 14 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 15
I20250901 14:18:45.082836  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.098110  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 15)
I20250901 14:18:45.104573  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.116281  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 15 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 16
I20250901 14:18:45.117666  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.135740  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 16)
I20250901 14:18:45.145850  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.157864  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 16 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 17
I20250901 14:18:45.158964  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.174751  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 17)
I20250901 14:18:45.182237  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.194219  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 17 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 18
I20250901 14:18:45.195488  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.212146  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 18)
I20250901 14:18:45.218427  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.229900  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 18 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 19
I20250901 14:18:45.231045  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.246168  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 19)
I20250901 14:18:45.253305  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.264313  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 19 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 20
I20250901 14:18:45.265599  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.283610  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 20)
I20250901 14:18:45.293308  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.305745  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 20 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 21
I20250901 14:18:45.307039  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.324374  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 21)
I20250901 14:18:45.329675  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.341101  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 21 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 22
I20250901 14:18:45.342521  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.358137  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 22)
I20250901 14:18:45.364964  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.376466  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 22 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 23
I20250901 14:18:45.377570  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.395766  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 23)
I20250901 14:18:45.406515  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.417121  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 23 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 24
I20250901 14:18:45.418377  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.434404  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 24)
I20250901 14:18:45.441234  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.452658  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 24 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 25
I20250901 14:18:45.453891  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.471843  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 25)
I20250901 14:18:45.477628  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.489164  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 25 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 26
I20250901 14:18:45.490253  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.504976  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 26)
I20250901 14:18:45.513307  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.524971  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 26 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 27
I20250901 14:18:45.526165  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.542548  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 27)
I20250901 14:18:45.548791  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.559638  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 27 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 28
I20250901 14:18:45.560930  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.576052  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 28)
I20250901 14:18:45.583802  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.595727  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 28 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 29
I20250901 14:18:45.597079  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.612200  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 29)
I20250901 14:18:45.619062  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.630287  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 29 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 30
I20250901 14:18:45.631668  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.648857  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 30)
I20250901 14:18:45.654623  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.666422  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 30 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 31
I20250901 14:18:45.667791  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.682919  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 31)
I20250901 14:18:45.689622  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.700695  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 31 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 32
I20250901 14:18:45.701885  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.718750  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 32)
I20250901 14:18:45.725020  9221 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" } } } modify_external_catalogs: true new_table_comment: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
I20250901 14:18:45.737448  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 32 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 33
I20250901 14:18:45.738740  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.756075  9222 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 33)
I20250901 14:18:45.765879  9222 catalog_manager.cc:3502] Servicing AlterTable request from {username='slave'} at 127.0.0.1:51120:
table { table_name: "table_comment" } alter_schema_steps { type: ALTER_COLUMN alter_column { delta { name: "val" new_comment: "" } } } modify_external_catalogs: true new_table_comment: ""
I20250901 14:18:45.776973  9389 tablet.cc:1722] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: Alter schema from (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 33 to (
    10:key INT32 NOT NULL,
    11:val INT32 NULLABLE,
    PRIMARY KEY (key)
) version 34
I20250901 14:18:45.778273  9389 tablet.cc:1620] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: MemRowSet was empty: no flush needed.
I20250901 14:18:45.794872  9221 catalog_manager.cc:6127] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: table_comment [id=58e68ace2c104271bbc0ab79cd664e37] alter complete (version 34)
I20250901 14:18:45.799969  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:45.823661  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:45.824262  5023 tablet_replica.cc:331] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773: stopping tablet replica
I20250901 14:18:45.824837  5023 raft_consensus.cc:2241] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:45.825294  5023 raft_consensus.cc:2270] T e1dd017be3ca4def852a5b96961154e1 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:45.827436  5023 tablet_replica.cc:331] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773: stopping tablet replica
I20250901 14:18:45.827908  5023 raft_consensus.cc:2241] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:45.828279  5023 raft_consensus.cc:2270] T 84c376393131454d913713686fdfb348 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:45.829830  5023 tablet_replica.cc:331] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773: stopping tablet replica
I20250901 14:18:45.830235  5023 raft_consensus.cc:2241] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:45.830596  5023 raft_consensus.cc:2270] T ed8ed106e29846d98cac59d11539c044 P 8ae10c0b018145c39df233bc5bfe8773 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:45.850314  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:45.862782  5023 master.cc:561] Master@127.4.231.254:35569 shutting down...
I20250901 14:18:45.880250  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:45.880785  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:45.881084  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P a067862c30c1425dafcfd0250bcc2981: stopping tablet replica
I20250901 14:18:45.898418  5023 master.cc:583] Master@127.4.231.254:35569 shutdown complete.
[       OK ] ClientTest.TestAlterTableWithValidComment (1953 ms)
[ RUN      ] ClientTest.TestNoDefaultPartitioning
I20250901 14:18:45.920454  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:37349
I20250901 14:18:45.921423  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:45.926682  9391 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:45.926784  9390 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:45.927529  9393 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:45.929992  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:45.931627  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:45.931806  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:45.931916  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736325931906 us; error 0 us; skew 500 ppm
I20250901 14:18:45.932343  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:45.934497  5023 webserver.cc:480] Webserver started at http://127.4.231.254:33321/ using document root <none> and password file <none>
I20250901 14:18:45.934916  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:45.935063  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:45.935266  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:45.936223  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestNoDefaultPartitioning.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "f1da1454de864706963cc3f197729503"
format_stamp: "Formatted at 2025-09-01 14:18:45 on dist-test-slave-9gf0"
I20250901 14:18:45.940352  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:45.943361  9398 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:45.944068  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.001s	sys 0.000s
I20250901 14:18:45.944329  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestNoDefaultPartitioning.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "f1da1454de864706963cc3f197729503"
format_stamp: "Formatted at 2025-09-01 14:18:45 on dist-test-slave-9gf0"
I20250901 14:18:45.944602  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestNoDefaultPartitioning.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestNoDefaultPartitioning.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestNoDefaultPartitioning.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:45.953251  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:45.954370  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:45.993837  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:37349
I20250901 14:18:45.993925  9459 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:37349 every 8 connection(s)
I20250901 14:18:45.997335  9460 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:46.007325  9460 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503: Bootstrap starting.
I20250901 14:18:46.011317  9460 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:46.015055  9460 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503: No bootstrap required, opened a new log
I20250901 14:18:46.016909  9460 raft_consensus.cc:357] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1da1454de864706963cc3f197729503" member_type: VOTER }
I20250901 14:18:46.017275  9460 raft_consensus.cc:383] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:46.017505  9460 raft_consensus.cc:738] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: f1da1454de864706963cc3f197729503, State: Initialized, Role: FOLLOWER
I20250901 14:18:46.018106  9460 consensus_queue.cc:260] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1da1454de864706963cc3f197729503" member_type: VOTER }
I20250901 14:18:46.018553  9460 raft_consensus.cc:397] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:46.018784  9460 raft_consensus.cc:491] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:46.019038  9460 raft_consensus.cc:3058] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:46.023758  9460 raft_consensus.cc:513] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1da1454de864706963cc3f197729503" member_type: VOTER }
I20250901 14:18:46.024269  9460 leader_election.cc:304] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: f1da1454de864706963cc3f197729503; no voters: 
I20250901 14:18:46.025396  9460 leader_election.cc:290] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:46.025786  9463 raft_consensus.cc:2802] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:46.027076  9463 raft_consensus.cc:695] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 1 LEADER]: Becoming Leader. State: Replica: f1da1454de864706963cc3f197729503, State: Running, Role: LEADER
I20250901 14:18:46.027714  9463 consensus_queue.cc:237] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1da1454de864706963cc3f197729503" member_type: VOTER }
I20250901 14:18:46.028303  9460 sys_catalog.cc:564] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:46.030423  9464 sys_catalog.cc:455] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "f1da1454de864706963cc3f197729503" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1da1454de864706963cc3f197729503" member_type: VOTER } }
I20250901 14:18:46.031106  9464 sys_catalog.cc:458] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:46.030433  9465 sys_catalog.cc:455] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [sys.catalog]: SysCatalogTable state changed. Reason: New leader f1da1454de864706963cc3f197729503. Latest consensus state: current_term: 1 leader_uuid: "f1da1454de864706963cc3f197729503" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "f1da1454de864706963cc3f197729503" member_type: VOTER } }
I20250901 14:18:46.031842  9465 sys_catalog.cc:458] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:46.038533  9470 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:46.045912  9470 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:46.046958  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:46.054420  9470 catalog_manager.cc:1349] Generated new cluster ID: 55d44692ede048c4926e0f555b300b34
I20250901 14:18:46.054695  9470 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:46.070534  9470 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:46.071853  9470 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:46.087000  9470 catalog_manager.cc:5955] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503: Generated new TSK 0
I20250901 14:18:46.087585  9470 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:46.113793  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:46.119246  9481 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:46.120409  9482 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:46.124063  9484 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:46.124540  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:46.125339  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:46.125560  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:46.125705  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736326125690 us; error 0 us; skew 500 ppm
I20250901 14:18:46.126194  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:46.128301  5023 webserver.cc:480] Webserver started at http://127.4.231.193:43297/ using document root <none> and password file <none>
I20250901 14:18:46.128746  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:46.128926  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:46.129165  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:46.130371  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestNoDefaultPartitioning.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "9f96cc25696740608d15b4ae709527f0"
format_stamp: "Formatted at 2025-09-01 14:18:46 on dist-test-slave-9gf0"
I20250901 14:18:46.134871  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:46.138049  9489 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:46.138796  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:46.139060  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestNoDefaultPartitioning.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "9f96cc25696740608d15b4ae709527f0"
format_stamp: "Formatted at 2025-09-01 14:18:46 on dist-test-slave-9gf0"
I20250901 14:18:46.139312  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestNoDefaultPartitioning.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestNoDefaultPartitioning.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestNoDefaultPartitioning.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:46.150908  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:46.151913  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:46.156811  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:46.157317  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.001s	user 0.000s	sys 0.001s
I20250901 14:18:46.157707  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:46.157923  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:46.205857  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:35977
I20250901 14:18:46.205937  9559 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:35977 every 8 connection(s)
I20250901 14:18:46.210570  9560 heartbeater.cc:344] Connected to a master server at 127.4.231.254:37349
I20250901 14:18:46.210914  9560 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:46.211607  9560 heartbeater.cc:507] Master 127.4.231.254:37349 requested a full tablet report, sending...
I20250901 14:18:46.213352  9415 ts_manager.cc:194] Registered new tserver with Master: 9f96cc25696740608d15b4ae709527f0 (127.4.231.193:35977)
I20250901 14:18:46.213719  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004526567s
I20250901 14:18:46.215673  9415 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:48168
I20250901 14:18:46.228348  9560 heartbeater.cc:499] Master 127.4.231.254:37349 was elected leader, sending a full tablet report...
I20250901 14:18:46.237159  9415 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:48184:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:46.268384  9524 tablet_service.cc:1468] Processing CreateTablet for tablet bad62b6508f2411db08f3f56bb615381 (DEFAULT_TABLE table=client-testtb [id=07fa4e8a4bc34d15b4e32cebd1178b58]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:46.268405  9523 tablet_service.cc:1468] Processing CreateTablet for tablet 9b1431271db3430b8e50bf2499bdacb4 (DEFAULT_TABLE table=client-testtb [id=07fa4e8a4bc34d15b4e32cebd1178b58]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:46.269426  9524 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet bad62b6508f2411db08f3f56bb615381. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:46.269899  9523 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 9b1431271db3430b8e50bf2499bdacb4. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:46.283581  9570 tablet_bootstrap.cc:492] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0: Bootstrap starting.
I20250901 14:18:46.288183  9570 tablet_bootstrap.cc:654] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:46.291815  9570 tablet_bootstrap.cc:492] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0: No bootstrap required, opened a new log
I20250901 14:18:46.292143  9570 ts_tablet_manager.cc:1397] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0: Time spent bootstrapping tablet: real 0.009s	user 0.007s	sys 0.000s
I20250901 14:18:46.293890  9570 raft_consensus.cc:357] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "9f96cc25696740608d15b4ae709527f0" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35977 } }
I20250901 14:18:46.294274  9570 raft_consensus.cc:383] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:46.294533  9570 raft_consensus.cc:738] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 9f96cc25696740608d15b4ae709527f0, State: Initialized, Role: FOLLOWER
I20250901 14:18:46.295149  9570 consensus_queue.cc:260] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "9f96cc25696740608d15b4ae709527f0" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35977 } }
I20250901 14:18:46.295755  9570 raft_consensus.cc:397] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:46.296051  9570 raft_consensus.cc:491] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:46.296311  9570 raft_consensus.cc:3058] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:46.301610  9570 raft_consensus.cc:513] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "9f96cc25696740608d15b4ae709527f0" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35977 } }
I20250901 14:18:46.302141  9570 leader_election.cc:304] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 9f96cc25696740608d15b4ae709527f0; no voters: 
I20250901 14:18:46.303279  9570 leader_election.cc:290] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:46.303634  9572 raft_consensus.cc:2802] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:46.305783  9570 ts_tablet_manager.cc:1428] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0: Time spent starting tablet: real 0.013s	user 0.011s	sys 0.004s
I20250901 14:18:46.305984  9572 raft_consensus.cc:695] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 1 LEADER]: Becoming Leader. State: Replica: 9f96cc25696740608d15b4ae709527f0, State: Running, Role: LEADER
I20250901 14:18:46.306770  9570 tablet_bootstrap.cc:492] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0: Bootstrap starting.
I20250901 14:18:46.306798  9572 consensus_queue.cc:237] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "9f96cc25696740608d15b4ae709527f0" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35977 } }
I20250901 14:18:46.312240  9570 tablet_bootstrap.cc:654] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:46.314497  9415 catalog_manager.cc:5582] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 reported cstate change: term changed from 0 to 1, leader changed from <none> to 9f96cc25696740608d15b4ae709527f0 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "9f96cc25696740608d15b4ae709527f0" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "9f96cc25696740608d15b4ae709527f0" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35977 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:46.316970  9570 tablet_bootstrap.cc:492] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0: No bootstrap required, opened a new log
I20250901 14:18:46.317395  9570 ts_tablet_manager.cc:1397] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0: Time spent bootstrapping tablet: real 0.011s	user 0.009s	sys 0.000s
I20250901 14:18:46.319495  9570 raft_consensus.cc:357] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "9f96cc25696740608d15b4ae709527f0" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35977 } }
I20250901 14:18:46.319897  9570 raft_consensus.cc:383] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:46.320161  9570 raft_consensus.cc:738] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 9f96cc25696740608d15b4ae709527f0, State: Initialized, Role: FOLLOWER
I20250901 14:18:46.320770  9570 consensus_queue.cc:260] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "9f96cc25696740608d15b4ae709527f0" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35977 } }
I20250901 14:18:46.321230  9570 raft_consensus.cc:397] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:46.321487  9570 raft_consensus.cc:491] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:46.321852  9570 raft_consensus.cc:3058] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:46.328157  9570 raft_consensus.cc:513] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "9f96cc25696740608d15b4ae709527f0" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35977 } }
I20250901 14:18:46.328848  9570 leader_election.cc:304] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 9f96cc25696740608d15b4ae709527f0; no voters: 
I20250901 14:18:46.329398  9570 leader_election.cc:290] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:46.329583  9572 raft_consensus.cc:2802] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:46.330122  9572 raft_consensus.cc:695] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 1 LEADER]: Becoming Leader. State: Replica: 9f96cc25696740608d15b4ae709527f0, State: Running, Role: LEADER
I20250901 14:18:46.330734  9572 consensus_queue.cc:237] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "9f96cc25696740608d15b4ae709527f0" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35977 } }
I20250901 14:18:46.330979  9570 ts_tablet_manager.cc:1428] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0: Time spent starting tablet: real 0.013s	user 0.009s	sys 0.003s
I20250901 14:18:46.336184  9415 catalog_manager.cc:5582] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 reported cstate change: term changed from 0 to 1, leader changed from <none> to 9f96cc25696740608d15b4ae709527f0 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "9f96cc25696740608d15b4ae709527f0" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "9f96cc25696740608d15b4ae709527f0" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35977 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:46.354923  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:46.372792  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:46.373545  5023 tablet_replica.cc:331] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0: stopping tablet replica
I20250901 14:18:46.374111  5023 raft_consensus.cc:2241] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:46.374539  5023 raft_consensus.cc:2270] T 9b1431271db3430b8e50bf2499bdacb4 P 9f96cc25696740608d15b4ae709527f0 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:46.376721  5023 tablet_replica.cc:331] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0: stopping tablet replica
I20250901 14:18:46.377154  5023 raft_consensus.cc:2241] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:46.377565  5023 raft_consensus.cc:2270] T bad62b6508f2411db08f3f56bb615381 P 9f96cc25696740608d15b4ae709527f0 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:46.397486  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:46.407546  5023 master.cc:561] Master@127.4.231.254:37349 shutting down...
I20250901 14:18:46.425644  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:46.426124  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:46.426486  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P f1da1454de864706963cc3f197729503: stopping tablet replica
I20250901 14:18:46.445242  5023 master.cc:583] Master@127.4.231.254:37349 shutdown complete.
[       OK ] ClientTest.TestNoDefaultPartitioning (545 ms)
[ RUN      ] ClientTest.TestInvalidPartitionerBuilder
I20250901 14:18:46.465958  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:37277
I20250901 14:18:46.466970  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:46.471429  9579 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:46.472280  9580 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:46.473102  9582 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:46.474314  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:46.475224  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:46.475394  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:46.475567  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736326475545 us; error 0 us; skew 500 ppm
I20250901 14:18:46.476061  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:46.478287  5023 webserver.cc:480] Webserver started at http://127.4.231.254:45265/ using document root <none> and password file <none>
I20250901 14:18:46.478708  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:46.478953  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:46.479173  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:46.480181  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInvalidPartitionerBuilder.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "8d7dcdc47d6b4849905334dad97029e3"
format_stamp: "Formatted at 2025-09-01 14:18:46 on dist-test-slave-9gf0"
I20250901 14:18:46.484467  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:46.487731  9587 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:46.488512  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:18:46.488773  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInvalidPartitionerBuilder.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "8d7dcdc47d6b4849905334dad97029e3"
format_stamp: "Formatted at 2025-09-01 14:18:46 on dist-test-slave-9gf0"
I20250901 14:18:46.489010  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInvalidPartitionerBuilder.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInvalidPartitionerBuilder.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInvalidPartitionerBuilder.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:46.503291  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:46.504308  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:46.543598  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:37277
I20250901 14:18:46.543690  9648 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:37277 every 8 connection(s)
I20250901 14:18:46.547310  9649 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:46.557650  9649 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3: Bootstrap starting.
I20250901 14:18:46.561771  9649 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:46.565657  9649 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3: No bootstrap required, opened a new log
I20250901 14:18:46.567556  9649 raft_consensus.cc:357] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8d7dcdc47d6b4849905334dad97029e3" member_type: VOTER }
I20250901 14:18:46.567939  9649 raft_consensus.cc:383] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:46.568161  9649 raft_consensus.cc:738] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8d7dcdc47d6b4849905334dad97029e3, State: Initialized, Role: FOLLOWER
I20250901 14:18:46.568661  9649 consensus_queue.cc:260] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8d7dcdc47d6b4849905334dad97029e3" member_type: VOTER }
I20250901 14:18:46.569084  9649 raft_consensus.cc:397] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:46.569286  9649 raft_consensus.cc:491] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:46.569545  9649 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:46.574301  9649 raft_consensus.cc:513] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8d7dcdc47d6b4849905334dad97029e3" member_type: VOTER }
I20250901 14:18:46.574852  9649 leader_election.cc:304] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 8d7dcdc47d6b4849905334dad97029e3; no voters: 
I20250901 14:18:46.575943  9649 leader_election.cc:290] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:46.576232  9652 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:46.577615  9652 raft_consensus.cc:695] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 1 LEADER]: Becoming Leader. State: Replica: 8d7dcdc47d6b4849905334dad97029e3, State: Running, Role: LEADER
I20250901 14:18:46.578256  9652 consensus_queue.cc:237] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8d7dcdc47d6b4849905334dad97029e3" member_type: VOTER }
I20250901 14:18:46.578902  9649 sys_catalog.cc:564] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:46.581079  9653 sys_catalog.cc:455] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "8d7dcdc47d6b4849905334dad97029e3" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8d7dcdc47d6b4849905334dad97029e3" member_type: VOTER } }
I20250901 14:18:46.581188  9654 sys_catalog.cc:455] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [sys.catalog]: SysCatalogTable state changed. Reason: New leader 8d7dcdc47d6b4849905334dad97029e3. Latest consensus state: current_term: 1 leader_uuid: "8d7dcdc47d6b4849905334dad97029e3" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8d7dcdc47d6b4849905334dad97029e3" member_type: VOTER } }
I20250901 14:18:46.581826  9654 sys_catalog.cc:458] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:46.582399  9653 sys_catalog.cc:458] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:46.587149  9658 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:46.591635  9658 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:46.593703  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:46.600466  9658 catalog_manager.cc:1349] Generated new cluster ID: 66735b4ca23f4609b14a50cc7030e5d6
I20250901 14:18:46.600690  9658 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:46.613709  9658 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:46.614950  9658 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:46.629166  9658 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3: Generated new TSK 0
I20250901 14:18:46.629820  9658 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:46.660221  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:46.666090  9670 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:46.667137  9671 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:46.668906  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:46.669083  9673 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:46.670001  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:46.670187  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:46.670347  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736326670329 us; error 0 us; skew 500 ppm
I20250901 14:18:46.670847  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:46.673148  5023 webserver.cc:480] Webserver started at http://127.4.231.193:37203/ using document root <none> and password file <none>
I20250901 14:18:46.673657  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:46.673828  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:46.674073  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:46.675091  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInvalidPartitionerBuilder.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "39600a3b384d4931b326f3db3a37cfd7"
format_stamp: "Formatted at 2025-09-01 14:18:46 on dist-test-slave-9gf0"
I20250901 14:18:46.679394  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:18:46.682613  9678 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:46.683350  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:46.683634  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInvalidPartitionerBuilder.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "39600a3b384d4931b326f3db3a37cfd7"
format_stamp: "Formatted at 2025-09-01 14:18:46 on dist-test-slave-9gf0"
I20250901 14:18:46.683900  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInvalidPartitionerBuilder.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInvalidPartitionerBuilder.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TestInvalidPartitionerBuilder.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:46.698853  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:46.699894  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:46.704532  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:46.704918  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:46.705291  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:46.705577  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:46.753893  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:34609
I20250901 14:18:46.753997  9748 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:34609 every 8 connection(s)
I20250901 14:18:46.758597  9749 heartbeater.cc:344] Connected to a master server at 127.4.231.254:37277
I20250901 14:18:46.758978  9749 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:46.759717  9749 heartbeater.cc:507] Master 127.4.231.254:37277 requested a full tablet report, sending...
I20250901 14:18:46.761452  9604 ts_manager.cc:194] Registered new tserver with Master: 39600a3b384d4931b326f3db3a37cfd7 (127.4.231.193:34609)
I20250901 14:18:46.761727  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004830462s
I20250901 14:18:46.763108  9604 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:57538
I20250901 14:18:46.775524  9749 heartbeater.cc:499] Master 127.4.231.254:37277 was elected leader, sending a full tablet report...
I20250901 14:18:46.782038  9603 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:57558:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:46.815114  9713 tablet_service.cc:1468] Processing CreateTablet for tablet 3ed0c82c3fef46b2b51a99edeee67bb8 (DEFAULT_TABLE table=client-testtb [id=499ba42cf5774d9d877843980e8a4e8d]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:46.815017  9714 tablet_service.cc:1468] Processing CreateTablet for tablet 58c437d299af42ee973b181c32ceada4 (DEFAULT_TABLE table=client-testtb [id=499ba42cf5774d9d877843980e8a4e8d]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:46.816365  9713 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 3ed0c82c3fef46b2b51a99edeee67bb8. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:46.817404  9714 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 58c437d299af42ee973b181c32ceada4. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:46.833611  9759 tablet_bootstrap.cc:492] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7: Bootstrap starting.
I20250901 14:18:46.837730  9759 tablet_bootstrap.cc:654] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:46.841775  9759 tablet_bootstrap.cc:492] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7: No bootstrap required, opened a new log
I20250901 14:18:46.842178  9759 ts_tablet_manager.cc:1397] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7: Time spent bootstrapping tablet: real 0.009s	user 0.004s	sys 0.004s
I20250901 14:18:46.844509  9759 raft_consensus.cc:357] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "39600a3b384d4931b326f3db3a37cfd7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 34609 } }
I20250901 14:18:46.844981  9759 raft_consensus.cc:383] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:46.845170  9759 raft_consensus.cc:738] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 39600a3b384d4931b326f3db3a37cfd7, State: Initialized, Role: FOLLOWER
I20250901 14:18:46.845643  9759 consensus_queue.cc:260] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "39600a3b384d4931b326f3db3a37cfd7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 34609 } }
I20250901 14:18:46.846055  9759 raft_consensus.cc:397] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:46.846237  9759 raft_consensus.cc:491] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:46.846436  9759 raft_consensus.cc:3058] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:46.851519  9759 raft_consensus.cc:513] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "39600a3b384d4931b326f3db3a37cfd7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 34609 } }
I20250901 14:18:46.852017  9759 leader_election.cc:304] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 39600a3b384d4931b326f3db3a37cfd7; no voters: 
I20250901 14:18:46.853060  9759 leader_election.cc:290] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:46.853341  9761 raft_consensus.cc:2802] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:46.855340  9759 ts_tablet_manager.cc:1428] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7: Time spent starting tablet: real 0.013s	user 0.008s	sys 0.004s
I20250901 14:18:46.855456  9761 raft_consensus.cc:695] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 1 LEADER]: Becoming Leader. State: Replica: 39600a3b384d4931b326f3db3a37cfd7, State: Running, Role: LEADER
I20250901 14:18:46.856102  9759 tablet_bootstrap.cc:492] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7: Bootstrap starting.
I20250901 14:18:46.856113  9761 consensus_queue.cc:237] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "39600a3b384d4931b326f3db3a37cfd7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 34609 } }
I20250901 14:18:46.862147  9759 tablet_bootstrap.cc:654] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:46.862468  9603 catalog_manager.cc:5582] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 reported cstate change: term changed from 0 to 1, leader changed from <none> to 39600a3b384d4931b326f3db3a37cfd7 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "39600a3b384d4931b326f3db3a37cfd7" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "39600a3b384d4931b326f3db3a37cfd7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 34609 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:46.866755  9759 tablet_bootstrap.cc:492] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7: No bootstrap required, opened a new log
I20250901 14:18:46.867146  9759 ts_tablet_manager.cc:1397] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7: Time spent bootstrapping tablet: real 0.011s	user 0.005s	sys 0.004s
I20250901 14:18:46.869311  9759 raft_consensus.cc:357] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "39600a3b384d4931b326f3db3a37cfd7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 34609 } }
I20250901 14:18:46.869808  9759 raft_consensus.cc:383] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:46.870025  9759 raft_consensus.cc:738] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 39600a3b384d4931b326f3db3a37cfd7, State: Initialized, Role: FOLLOWER
I20250901 14:18:46.870617  9759 consensus_queue.cc:260] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "39600a3b384d4931b326f3db3a37cfd7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 34609 } }
I20250901 14:18:46.871101  9759 raft_consensus.cc:397] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:46.871320  9759 raft_consensus.cc:491] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:46.871570  9759 raft_consensus.cc:3058] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:46.877012  9759 raft_consensus.cc:513] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "39600a3b384d4931b326f3db3a37cfd7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 34609 } }
I20250901 14:18:46.877632  9759 leader_election.cc:304] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 39600a3b384d4931b326f3db3a37cfd7; no voters: 
I20250901 14:18:46.878114  9759 leader_election.cc:290] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:46.878259  9761 raft_consensus.cc:2802] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:46.878858  9761 raft_consensus.cc:695] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 1 LEADER]: Becoming Leader. State: Replica: 39600a3b384d4931b326f3db3a37cfd7, State: Running, Role: LEADER
I20250901 14:18:46.879544  9761 consensus_queue.cc:237] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "39600a3b384d4931b326f3db3a37cfd7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 34609 } }
I20250901 14:18:46.879945  9759 ts_tablet_manager.cc:1428] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7: Time spent starting tablet: real 0.013s	user 0.013s	sys 0.000s
I20250901 14:18:46.885473  9604 catalog_manager.cc:5582] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 reported cstate change: term changed from 0 to 1, leader changed from <none> to 39600a3b384d4931b326f3db3a37cfd7 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "39600a3b384d4931b326f3db3a37cfd7" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "39600a3b384d4931b326f3db3a37cfd7" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 34609 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:46.898134  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:46.916064  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:46.916728  5023 tablet_replica.cc:331] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7: stopping tablet replica
I20250901 14:18:46.917490  5023 raft_consensus.cc:2241] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:46.918004  5023 raft_consensus.cc:2270] T 58c437d299af42ee973b181c32ceada4 P 39600a3b384d4931b326f3db3a37cfd7 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:46.920151  5023 tablet_replica.cc:331] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7: stopping tablet replica
I20250901 14:18:46.920560  5023 raft_consensus.cc:2241] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:46.920923  5023 raft_consensus.cc:2270] T 3ed0c82c3fef46b2b51a99edeee67bb8 P 39600a3b384d4931b326f3db3a37cfd7 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:46.940467  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:46.950907  5023 master.cc:561] Master@127.4.231.254:37277 shutting down...
I20250901 14:18:46.967419  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:46.967909  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:46.968261  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 8d7dcdc47d6b4849905334dad97029e3: stopping tablet replica
I20250901 14:18:46.977861  5023 master.cc:583] Master@127.4.231.254:37277 shutdown complete.
[       OK ] ClientTest.TestInvalidPartitionerBuilder (533 ms)
[ RUN      ] ClientTest.WritingRowsWithUnsetNonNullableColumns
I20250901 14:18:46.999073  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:46089
I20250901 14:18:47.000077  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:47.004869  9769 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:47.004825  9768 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:47.006320  9771 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:47.007882  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:47.008875  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:47.009055  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:47.009215  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736327009196 us; error 0 us; skew 500 ppm
I20250901 14:18:47.009866  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:47.012043  5023 webserver.cc:480] Webserver started at http://127.4.231.254:37621/ using document root <none> and password file <none>
I20250901 14:18:47.012544  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:47.012740  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:47.012993  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:47.014139  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.WritingRowsWithUnsetNonNullableColumns.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "1d8eb364d61a4a08a8d2394ecc421254"
format_stamp: "Formatted at 2025-09-01 14:18:47 on dist-test-slave-9gf0"
I20250901 14:18:47.018579  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.004s	sys 0.000s
I20250901 14:18:47.021919  9776 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:47.022658  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:47.022936  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.WritingRowsWithUnsetNonNullableColumns.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "1d8eb364d61a4a08a8d2394ecc421254"
format_stamp: "Formatted at 2025-09-01 14:18:47 on dist-test-slave-9gf0"
I20250901 14:18:47.023196  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.WritingRowsWithUnsetNonNullableColumns.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.WritingRowsWithUnsetNonNullableColumns.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.WritingRowsWithUnsetNonNullableColumns.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:47.035920  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:47.036922  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:47.076666  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:46089
I20250901 14:18:47.076753  9837 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:46089 every 8 connection(s)
I20250901 14:18:47.080268  9838 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:47.090319  9838 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254: Bootstrap starting.
I20250901 14:18:47.094384  9838 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:47.098202  9838 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254: No bootstrap required, opened a new log
I20250901 14:18:47.100044  9838 raft_consensus.cc:357] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d8eb364d61a4a08a8d2394ecc421254" member_type: VOTER }
I20250901 14:18:47.100420  9838 raft_consensus.cc:383] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:47.100596  9838 raft_consensus.cc:738] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 1d8eb364d61a4a08a8d2394ecc421254, State: Initialized, Role: FOLLOWER
I20250901 14:18:47.101063  9838 consensus_queue.cc:260] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d8eb364d61a4a08a8d2394ecc421254" member_type: VOTER }
I20250901 14:18:47.101444  9838 raft_consensus.cc:397] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:47.101698  9838 raft_consensus.cc:491] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:47.101909  9838 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:47.106308  9838 raft_consensus.cc:513] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d8eb364d61a4a08a8d2394ecc421254" member_type: VOTER }
I20250901 14:18:47.106768  9838 leader_election.cc:304] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 1d8eb364d61a4a08a8d2394ecc421254; no voters: 
I20250901 14:18:47.107785  9838 leader_election.cc:290] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:47.108083  9841 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:47.109354  9841 raft_consensus.cc:695] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 1 LEADER]: Becoming Leader. State: Replica: 1d8eb364d61a4a08a8d2394ecc421254, State: Running, Role: LEADER
I20250901 14:18:47.110041  9841 consensus_queue.cc:237] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d8eb364d61a4a08a8d2394ecc421254" member_type: VOTER }
I20250901 14:18:47.110602  9838 sys_catalog.cc:564] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:47.112725  9842 sys_catalog.cc:455] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "1d8eb364d61a4a08a8d2394ecc421254" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d8eb364d61a4a08a8d2394ecc421254" member_type: VOTER } }
I20250901 14:18:47.112799  9843 sys_catalog.cc:455] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [sys.catalog]: SysCatalogTable state changed. Reason: New leader 1d8eb364d61a4a08a8d2394ecc421254. Latest consensus state: current_term: 1 leader_uuid: "1d8eb364d61a4a08a8d2394ecc421254" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "1d8eb364d61a4a08a8d2394ecc421254" member_type: VOTER } }
I20250901 14:18:47.113509  9842 sys_catalog.cc:458] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:47.113565  9843 sys_catalog.cc:458] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:47.117344  9847 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:47.121857  9847 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:47.124511  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:47.130648  9847 catalog_manager.cc:1349] Generated new cluster ID: 43d39f4f44774e19b27b7f4782dc7b50
I20250901 14:18:47.130892  9847 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:47.161458  9847 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:47.162860  9847 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:47.180020  9847 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254: Generated new TSK 0
I20250901 14:18:47.180734  9847 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:47.190716  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:47.196213  9859 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:47.197170  9860 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:47.199892  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:47.200019  9862 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:47.200933  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:47.201128  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:47.201282  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736327201264 us; error 0 us; skew 500 ppm
I20250901 14:18:47.201818  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:47.203987  5023 webserver.cc:480] Webserver started at http://127.4.231.193:43743/ using document root <none> and password file <none>
I20250901 14:18:47.204437  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:47.204615  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:47.204859  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:47.205932  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.WritingRowsWithUnsetNonNullableColumns.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "cc26004b1b8c46e295bb66b2d8036eac"
format_stamp: "Formatted at 2025-09-01 14:18:47 on dist-test-slave-9gf0"
I20250901 14:18:47.210193  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.001s
I20250901 14:18:47.213268  9867 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:47.214031  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:18:47.214296  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.WritingRowsWithUnsetNonNullableColumns.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "cc26004b1b8c46e295bb66b2d8036eac"
format_stamp: "Formatted at 2025-09-01 14:18:47 on dist-test-slave-9gf0"
I20250901 14:18:47.214565  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.WritingRowsWithUnsetNonNullableColumns.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.WritingRowsWithUnsetNonNullableColumns.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.WritingRowsWithUnsetNonNullableColumns.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:47.230355  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:47.231345  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:47.235898  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:47.236258  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:47.236642  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:47.236891  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:47.284432  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:44947
I20250901 14:18:47.284540  9937 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:44947 every 8 connection(s)
I20250901 14:18:47.288805  9938 heartbeater.cc:344] Connected to a master server at 127.4.231.254:46089
I20250901 14:18:47.289170  9938 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:47.290015  9938 heartbeater.cc:507] Master 127.4.231.254:46089 requested a full tablet report, sending...
I20250901 14:18:47.291898  9793 ts_manager.cc:194] Registered new tserver with Master: cc26004b1b8c46e295bb66b2d8036eac (127.4.231.193:44947)
I20250901 14:18:47.292306  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004776712s
I20250901 14:18:47.293596  9793 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:52276
I20250901 14:18:47.305933  9938 heartbeater.cc:499] Master 127.4.231.254:46089 was elected leader, sending a full tablet report...
I20250901 14:18:47.312788  9792 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:52300:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:47.343740  9903 tablet_service.cc:1468] Processing CreateTablet for tablet c1349d8c4898464b9027437df7192b1c (DEFAULT_TABLE table=client-testtb [id=f5795ee7ea3f4e7cbe7229d3b0ec78a0]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:47.343922  9902 tablet_service.cc:1468] Processing CreateTablet for tablet 55f2e1f11f24406890dd8f2c218ad038 (DEFAULT_TABLE table=client-testtb [id=f5795ee7ea3f4e7cbe7229d3b0ec78a0]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:47.344748  9903 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet c1349d8c4898464b9027437df7192b1c. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:47.345201  9902 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 55f2e1f11f24406890dd8f2c218ad038. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:47.358155  9948 tablet_bootstrap.cc:492] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac: Bootstrap starting.
I20250901 14:18:47.362947  9948 tablet_bootstrap.cc:654] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:47.366683  9948 tablet_bootstrap.cc:492] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac: No bootstrap required, opened a new log
I20250901 14:18:47.367031  9948 ts_tablet_manager.cc:1397] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac: Time spent bootstrapping tablet: real 0.009s	user 0.007s	sys 0.000s
I20250901 14:18:47.368716  9948 raft_consensus.cc:357] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cc26004b1b8c46e295bb66b2d8036eac" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44947 } }
I20250901 14:18:47.369248  9948 raft_consensus.cc:383] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:47.369551  9948 raft_consensus.cc:738] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: cc26004b1b8c46e295bb66b2d8036eac, State: Initialized, Role: FOLLOWER
I20250901 14:18:47.370134  9948 consensus_queue.cc:260] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cc26004b1b8c46e295bb66b2d8036eac" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44947 } }
I20250901 14:18:47.370618  9948 raft_consensus.cc:397] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:47.370891  9948 raft_consensus.cc:491] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:47.371142  9948 raft_consensus.cc:3058] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:47.376916  9948 raft_consensus.cc:513] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cc26004b1b8c46e295bb66b2d8036eac" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44947 } }
I20250901 14:18:47.377462  9948 leader_election.cc:304] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: cc26004b1b8c46e295bb66b2d8036eac; no voters: 
I20250901 14:18:47.378861  9948 leader_election.cc:290] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:47.379108  9950 raft_consensus.cc:2802] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:47.381508  9948 ts_tablet_manager.cc:1428] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac: Time spent starting tablet: real 0.014s	user 0.015s	sys 0.000s
I20250901 14:18:47.381474  9950 raft_consensus.cc:695] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 1 LEADER]: Becoming Leader. State: Replica: cc26004b1b8c46e295bb66b2d8036eac, State: Running, Role: LEADER
I20250901 14:18:47.382408  9948 tablet_bootstrap.cc:492] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac: Bootstrap starting.
I20250901 14:18:47.382295  9950 consensus_queue.cc:237] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cc26004b1b8c46e295bb66b2d8036eac" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44947 } }
I20250901 14:18:47.388017  9948 tablet_bootstrap.cc:654] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:47.389075  9792 catalog_manager.cc:5582] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac reported cstate change: term changed from 0 to 1, leader changed from <none> to cc26004b1b8c46e295bb66b2d8036eac (127.4.231.193). New cstate: current_term: 1 leader_uuid: "cc26004b1b8c46e295bb66b2d8036eac" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cc26004b1b8c46e295bb66b2d8036eac" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44947 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:47.392480  9948 tablet_bootstrap.cc:492] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac: No bootstrap required, opened a new log
I20250901 14:18:47.392850  9948 ts_tablet_manager.cc:1397] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac: Time spent bootstrapping tablet: real 0.011s	user 0.008s	sys 0.000s
I20250901 14:18:47.394832  9948 raft_consensus.cc:357] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cc26004b1b8c46e295bb66b2d8036eac" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44947 } }
I20250901 14:18:47.395251  9948 raft_consensus.cc:383] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:47.395529  9948 raft_consensus.cc:738] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: cc26004b1b8c46e295bb66b2d8036eac, State: Initialized, Role: FOLLOWER
I20250901 14:18:47.396159  9948 consensus_queue.cc:260] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cc26004b1b8c46e295bb66b2d8036eac" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44947 } }
I20250901 14:18:47.396593  9948 raft_consensus.cc:397] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:47.396824  9948 raft_consensus.cc:491] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:47.397140  9948 raft_consensus.cc:3058] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:47.402122  9948 raft_consensus.cc:513] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cc26004b1b8c46e295bb66b2d8036eac" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44947 } }
I20250901 14:18:47.402665  9948 leader_election.cc:304] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: cc26004b1b8c46e295bb66b2d8036eac; no voters: 
I20250901 14:18:47.403152  9948 leader_election.cc:290] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:47.403294  9950 raft_consensus.cc:2802] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:47.403825  9950 raft_consensus.cc:695] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 1 LEADER]: Becoming Leader. State: Replica: cc26004b1b8c46e295bb66b2d8036eac, State: Running, Role: LEADER
I20250901 14:18:47.404485  9950 consensus_queue.cc:237] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cc26004b1b8c46e295bb66b2d8036eac" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44947 } }
I20250901 14:18:47.404781  9948 ts_tablet_manager.cc:1428] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac: Time spent starting tablet: real 0.012s	user 0.013s	sys 0.000s
I20250901 14:18:47.409768  9792 catalog_manager.cc:5582] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac reported cstate change: term changed from 0 to 1, leader changed from <none> to cc26004b1b8c46e295bb66b2d8036eac (127.4.231.193). New cstate: current_term: 1 leader_uuid: "cc26004b1b8c46e295bb66b2d8036eac" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "cc26004b1b8c46e295bb66b2d8036eac" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 44947 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:47.466130  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:47.484230  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:47.484985  5023 tablet_replica.cc:331] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac: stopping tablet replica
I20250901 14:18:47.485580  5023 raft_consensus.cc:2241] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:47.485997  5023 raft_consensus.cc:2270] T 55f2e1f11f24406890dd8f2c218ad038 P cc26004b1b8c46e295bb66b2d8036eac [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:47.488193  5023 tablet_replica.cc:331] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac: stopping tablet replica
I20250901 14:18:47.488646  5023 raft_consensus.cc:2241] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:47.489059  5023 raft_consensus.cc:2270] T c1349d8c4898464b9027437df7192b1c P cc26004b1b8c46e295bb66b2d8036eac [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:47.509512  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:47.520577  5023 master.cc:561] Master@127.4.231.254:46089 shutting down...
I20250901 14:18:47.537930  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:47.538435  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:47.538836  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 1d8eb364d61a4a08a8d2394ecc421254: stopping tablet replica
I20250901 14:18:47.557662  5023 master.cc:583] Master@127.4.231.254:46089 shutdown complete.
[       OK ] ClientTest.WritingRowsWithUnsetNonNullableColumns (579 ms)
[ RUN      ] ClientTest.TxnCreateSessionAfterCommit
I20250901 14:18:47.578915  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:40359
I20250901 14:18:47.579865  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:47.584496  9959 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:47.585188  9960 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:47.586542  9962 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:47.587538  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:47.588515  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:47.588691  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:47.588841  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736327588823 us; error 0 us; skew 500 ppm
I20250901 14:18:47.589334  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:47.591463  5023 webserver.cc:480] Webserver started at http://127.4.231.254:33459/ using document root <none> and password file <none>
I20250901 14:18:47.591904  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:47.592075  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:47.592312  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:47.593320  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnCreateSessionAfterCommit.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "599dd0d96c8e466281c167892d1441bb"
format_stamp: "Formatted at 2025-09-01 14:18:47 on dist-test-slave-9gf0"
I20250901 14:18:47.597482  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.001s
I20250901 14:18:47.600976  9967 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:47.601812  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:47.602151  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnCreateSessionAfterCommit.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "599dd0d96c8e466281c167892d1441bb"
format_stamp: "Formatted at 2025-09-01 14:18:47 on dist-test-slave-9gf0"
I20250901 14:18:47.602506  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnCreateSessionAfterCommit.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnCreateSessionAfterCommit.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnCreateSessionAfterCommit.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:47.622185  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:47.623149  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:47.663187  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:40359
I20250901 14:18:47.663270 10028 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:40359 every 8 connection(s)
I20250901 14:18:47.666808 10029 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:47.677258 10029 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb: Bootstrap starting.
I20250901 14:18:47.681442 10029 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:47.685745 10029 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb: No bootstrap required, opened a new log
I20250901 14:18:47.687621 10029 raft_consensus.cc:357] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "599dd0d96c8e466281c167892d1441bb" member_type: VOTER }
I20250901 14:18:47.688007 10029 raft_consensus.cc:383] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:47.688241 10029 raft_consensus.cc:738] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 599dd0d96c8e466281c167892d1441bb, State: Initialized, Role: FOLLOWER
I20250901 14:18:47.688827 10029 consensus_queue.cc:260] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "599dd0d96c8e466281c167892d1441bb" member_type: VOTER }
I20250901 14:18:47.689266 10029 raft_consensus.cc:397] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:47.689481 10029 raft_consensus.cc:491] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:47.689759 10029 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:47.694365 10029 raft_consensus.cc:513] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "599dd0d96c8e466281c167892d1441bb" member_type: VOTER }
I20250901 14:18:47.694885 10029 leader_election.cc:304] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 599dd0d96c8e466281c167892d1441bb; no voters: 
I20250901 14:18:47.696061 10029 leader_election.cc:290] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:47.696403 10033 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:47.697791 10033 raft_consensus.cc:695] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 1 LEADER]: Becoming Leader. State: Replica: 599dd0d96c8e466281c167892d1441bb, State: Running, Role: LEADER
I20250901 14:18:47.698371 10033 consensus_queue.cc:237] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "599dd0d96c8e466281c167892d1441bb" member_type: VOTER }
I20250901 14:18:47.698935 10029 sys_catalog.cc:564] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:47.701326 10035 sys_catalog.cc:455] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [sys.catalog]: SysCatalogTable state changed. Reason: New leader 599dd0d96c8e466281c167892d1441bb. Latest consensus state: current_term: 1 leader_uuid: "599dd0d96c8e466281c167892d1441bb" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "599dd0d96c8e466281c167892d1441bb" member_type: VOTER } }
I20250901 14:18:47.701268 10034 sys_catalog.cc:455] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "599dd0d96c8e466281c167892d1441bb" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "599dd0d96c8e466281c167892d1441bb" member_type: VOTER } }
I20250901 14:18:47.702086 10035 sys_catalog.cc:458] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:47.702180 10034 sys_catalog.cc:458] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:47.705080 10038 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:47.709610 10038 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:47.714697  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:47.718091 10038 catalog_manager.cc:1349] Generated new cluster ID: 0c4e0c2630574aa4a0af98ae786f18d7
I20250901 14:18:47.718379 10038 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:47.739974 10038 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:47.741243 10038 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:47.752135 10038 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb: Generated new TSK 0
I20250901 14:18:47.752810 10038 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:47.781569  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:47.787377 10051 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:47.788347 10052 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:47.790084  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:47.790023 10054 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:47.791035  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:47.791216  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:47.791381  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736327791363 us; error 0 us; skew 500 ppm
I20250901 14:18:47.791882  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:47.794160  5023 webserver.cc:480] Webserver started at http://127.4.231.193:38655/ using document root <none> and password file <none>
I20250901 14:18:47.794607  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:47.794776  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:47.795042  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:47.796062  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnCreateSessionAfterCommit.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "813041d3e30a4920b26165b6f92a84ee"
format_stamp: "Formatted at 2025-09-01 14:18:47 on dist-test-slave-9gf0"
I20250901 14:18:47.800398  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.000s	sys 0.006s
I20250901 14:18:47.803642 10059 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:47.804437  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.000s	sys 0.002s
I20250901 14:18:47.804747  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnCreateSessionAfterCommit.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "813041d3e30a4920b26165b6f92a84ee"
format_stamp: "Formatted at 2025-09-01 14:18:47 on dist-test-slave-9gf0"
I20250901 14:18:47.804998  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnCreateSessionAfterCommit.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnCreateSessionAfterCommit.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnCreateSessionAfterCommit.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:47.821529  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:47.822592  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:47.827306  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:47.827601  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:47.827939  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:47.828157  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:47.876551  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:33977
I20250901 14:18:47.876657 10129 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:33977 every 8 connection(s)
I20250901 14:18:47.881398 10130 heartbeater.cc:344] Connected to a master server at 127.4.231.254:40359
I20250901 14:18:47.881817 10130 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:47.882540 10130 heartbeater.cc:507] Master 127.4.231.254:40359 requested a full tablet report, sending...
I20250901 14:18:47.884503  9984 ts_manager.cc:194] Registered new tserver with Master: 813041d3e30a4920b26165b6f92a84ee (127.4.231.193:33977)
I20250901 14:18:47.885352  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.00492169s
I20250901 14:18:47.886574  9984 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:37072
I20250901 14:18:47.899072 10130 heartbeater.cc:499] Master 127.4.231.254:40359 was elected leader, sending a full tablet report...
I20250901 14:18:47.906565  9983 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:37088:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:47.937707 10095 tablet_service.cc:1468] Processing CreateTablet for tablet 9cc5b94adfef4c668824475b070a2108 (DEFAULT_TABLE table=client-testtb [id=70bef4dc14e844d4976d01cd03552309]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:47.938678 10095 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 9cc5b94adfef4c668824475b070a2108. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:47.946451 10094 tablet_service.cc:1468] Processing CreateTablet for tablet d63fb270c85f4e109bfc2bd627d37a7d (DEFAULT_TABLE table=client-testtb [id=70bef4dc14e844d4976d01cd03552309]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:47.947687 10094 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet d63fb270c85f4e109bfc2bd627d37a7d. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:47.951985 10140 tablet_bootstrap.cc:492] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee: Bootstrap starting.
I20250901 14:18:47.957329 10140 tablet_bootstrap.cc:654] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:47.961444 10140 tablet_bootstrap.cc:492] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee: No bootstrap required, opened a new log
I20250901 14:18:47.961890 10140 ts_tablet_manager.cc:1397] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee: Time spent bootstrapping tablet: real 0.010s	user 0.009s	sys 0.000s
I20250901 14:18:47.963816 10140 raft_consensus.cc:357] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:47.964319 10140 raft_consensus.cc:383] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:47.964555 10140 raft_consensus.cc:738] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 813041d3e30a4920b26165b6f92a84ee, State: Initialized, Role: FOLLOWER
I20250901 14:18:47.965031 10140 consensus_queue.cc:260] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:47.965494 10140 raft_consensus.cc:397] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:47.965770 10140 raft_consensus.cc:491] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:47.966019 10140 raft_consensus.cc:3058] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:47.971644 10140 raft_consensus.cc:513] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:47.972204 10140 leader_election.cc:304] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 813041d3e30a4920b26165b6f92a84ee; no voters: 
I20250901 14:18:47.973554 10140 leader_election.cc:290] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:47.973875 10142 raft_consensus.cc:2802] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:47.975339 10142 raft_consensus.cc:695] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 1 LEADER]: Becoming Leader. State: Replica: 813041d3e30a4920b26165b6f92a84ee, State: Running, Role: LEADER
I20250901 14:18:47.976068 10142 consensus_queue.cc:237] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:47.976470 10140 ts_tablet_manager.cc:1428] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee: Time spent starting tablet: real 0.014s	user 0.014s	sys 0.000s
I20250901 14:18:47.977226 10140 tablet_bootstrap.cc:492] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee: Bootstrap starting.
I20250901 14:18:47.981974  9984 catalog_manager.cc:5582] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee reported cstate change: term changed from 0 to 1, leader changed from <none> to 813041d3e30a4920b26165b6f92a84ee (127.4.231.193). New cstate: current_term: 1 leader_uuid: "813041d3e30a4920b26165b6f92a84ee" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:47.983711 10140 tablet_bootstrap.cc:654] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:47.988471 10140 tablet_bootstrap.cc:492] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee: No bootstrap required, opened a new log
I20250901 14:18:47.988922 10140 ts_tablet_manager.cc:1397] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee: Time spent bootstrapping tablet: real 0.012s	user 0.009s	sys 0.000s
I20250901 14:18:47.990897 10140 raft_consensus.cc:357] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:47.991293 10140 raft_consensus.cc:383] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:47.991526 10140 raft_consensus.cc:738] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 813041d3e30a4920b26165b6f92a84ee, State: Initialized, Role: FOLLOWER
I20250901 14:18:47.992069 10140 consensus_queue.cc:260] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:47.992511 10140 raft_consensus.cc:397] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:47.992743 10140 raft_consensus.cc:491] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:47.992992 10140 raft_consensus.cc:3058] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:47.997937 10140 raft_consensus.cc:513] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:47.998479 10140 leader_election.cc:304] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 813041d3e30a4920b26165b6f92a84ee; no voters: 
I20250901 14:18:47.998930 10140 leader_election.cc:290] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:47.999100 10142 raft_consensus.cc:2802] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:47.999565 10142 raft_consensus.cc:695] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 1 LEADER]: Becoming Leader. State: Replica: 813041d3e30a4920b26165b6f92a84ee, State: Running, Role: LEADER
I20250901 14:18:48.000231 10142 consensus_queue.cc:237] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:48.000435 10140 ts_tablet_manager.cc:1428] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee: Time spent starting tablet: real 0.011s	user 0.008s	sys 0.004s
I20250901 14:18:48.009007  9984 catalog_manager.cc:5582] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee reported cstate change: term changed from 0 to 1, leader changed from <none> to 813041d3e30a4920b26165b6f92a84ee (127.4.231.193). New cstate: current_term: 1 leader_uuid: "813041d3e30a4920b26165b6f92a84ee" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:48.048377  9984 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:37098:
name: "kudu_system.kudu_transactions"
schema {
  columns {
    name: "txn_id"
    type: INT64
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "entry_type"
    type: INT8
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "identifier"
    type: STRING
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "metadata"
    type: STRING
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\006\001\000\000\000\000\000\000\000\000\007\001@B\017\000\000\000\000\000""\006\001\000\000\000\000\000\000\000\000\007\001@B\017\000\000\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "txn_id"
    }
  }
}
table_type: TXN_STATUS_TABLE
I20250901 14:18:48.069110 10094 tablet_service.cc:1468] Processing CreateTablet for tablet 918a0d8e88074451a91c48a20a1ba35c (TXN_STATUS_TABLE table=kudu_system.kudu_transactions [id=2df649346ead4ae7bf67c5db9a17fd32]), partition=RANGE (txn_id) PARTITION 0 <= VALUES < 1000000
I20250901 14:18:48.070361 10094 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 918a0d8e88074451a91c48a20a1ba35c. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:48.083853 10140 tablet_bootstrap.cc:492] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee: Bootstrap starting.
I20250901 14:18:48.088336 10140 tablet_bootstrap.cc:654] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:48.093447 10140 tablet_bootstrap.cc:492] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee: No bootstrap required, opened a new log
I20250901 14:18:48.093887 10140 ts_tablet_manager.cc:1397] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee: Time spent bootstrapping tablet: real 0.010s	user 0.005s	sys 0.004s
I20250901 14:18:48.095817 10140 raft_consensus.cc:357] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:48.096241 10140 raft_consensus.cc:383] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:48.096459 10140 raft_consensus.cc:738] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 813041d3e30a4920b26165b6f92a84ee, State: Initialized, Role: FOLLOWER
I20250901 14:18:48.096971 10140 consensus_queue.cc:260] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:48.097615 10140 raft_consensus.cc:397] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:48.097851 10140 raft_consensus.cc:491] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:48.098099 10140 raft_consensus.cc:3058] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:48.103219 10140 raft_consensus.cc:513] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:48.103834 10140 leader_election.cc:304] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 813041d3e30a4920b26165b6f92a84ee; no voters: 
I20250901 14:18:48.104352 10140 leader_election.cc:290] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:48.104525 10142 raft_consensus.cc:2802] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:48.104997 10142 raft_consensus.cc:695] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 1 LEADER]: Becoming Leader. State: Replica: 813041d3e30a4920b26165b6f92a84ee, State: Running, Role: LEADER
I20250901 14:18:48.105772 10142 consensus_queue.cc:237] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } }
I20250901 14:18:48.106321 10140 ts_tablet_manager.cc:1428] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee: Time spent starting tablet: real 0.012s	user 0.012s	sys 0.000s
I20250901 14:18:48.110430 10143 tablet_replica.cc:440] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee: TxnStatusTablet state changed. Reason: New leader 813041d3e30a4920b26165b6f92a84ee. Latest consensus state: current_term: 1 leader_uuid: "813041d3e30a4920b26165b6f92a84ee" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } } }
I20250901 14:18:48.111063 10143 tablet_replica.cc:443] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee: This TxnStatusTablet replica's current role is: LEADER
I20250901 14:18:48.110905 10144 tablet_replica.cc:440] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee: TxnStatusTablet state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "813041d3e30a4920b26165b6f92a84ee" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } } }
I20250901 14:18:48.111470 10144 tablet_replica.cc:443] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee: This TxnStatusTablet replica's current role is: LEADER
I20250901 14:18:48.112541 10156 txn_status_manager.cc:874] Waiting until node catch up with all replicated operations in previous term...
I20250901 14:18:48.112941 10156 txn_status_manager.cc:930] Loading transaction status metadata into memory...
I20250901 14:18:48.113305  9984 catalog_manager.cc:5582] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee reported cstate change: term changed from 0 to 1, leader changed from <none> to 813041d3e30a4920b26165b6f92a84ee (127.4.231.193). New cstate: current_term: 1 leader_uuid: "813041d3e30a4920b26165b6f92a84ee" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "813041d3e30a4920b26165b6f92a84ee" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 33977 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:48.369272  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:18:48.399398  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:18:48.400588  5023 tablet_replica.cc:331] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee: stopping tablet replica
I20250901 14:18:48.401357  5023 raft_consensus.cc:2241] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:48.401973  5023 raft_consensus.cc:2270] T 918a0d8e88074451a91c48a20a1ba35c P 813041d3e30a4920b26165b6f92a84ee [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:48.404855  5023 tablet_replica.cc:331] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee: stopping tablet replica
I20250901 14:18:48.405519  5023 raft_consensus.cc:2241] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:48.406039  5023 raft_consensus.cc:2270] T d63fb270c85f4e109bfc2bd627d37a7d P 813041d3e30a4920b26165b6f92a84ee [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:48.408737  5023 tablet_replica.cc:331] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee: stopping tablet replica
I20250901 14:18:48.409329  5023 raft_consensus.cc:2241] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:48.409844  5023 raft_consensus.cc:2270] T 9cc5b94adfef4c668824475b070a2108 P 813041d3e30a4920b26165b6f92a84ee [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:48.435842  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:18:48.449558  5023 master.cc:561] Master@127.4.231.254:40359 shutting down...
I20250901 14:18:48.476413  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:18:48.477079  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:18:48.477466  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 599dd0d96c8e466281c167892d1441bb: stopping tablet replica
I20250901 14:18:48.489071  5023 master.cc:583] Master@127.4.231.254:40359 shutdown complete.
[       OK ] ClientTest.TxnCreateSessionAfterCommit (941 ms)
[ RUN      ] ClientTest.TxnKeepAlive
I20250901 14:18:48.522068  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:34143
I20250901 14:18:48.523411  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:48.529306 10164 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:48.529330 10165 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:48.537111  5023 server_base.cc:1047] running on GCE node
W20250901 14:18:48.537276 10167 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:48.538347  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:48.538614  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:48.538801  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736328538779 us; error 0 us; skew 500 ppm
I20250901 14:18:48.539474  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:48.542201  5023 webserver.cc:480] Webserver started at http://127.4.231.254:39819/ using document root <none> and password file <none>
I20250901 14:18:48.542790  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:48.543016  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:48.543324  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:48.544732  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnKeepAlive.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "c6f6cf426d6642a8ae1e6408836bcdc2"
format_stamp: "Formatted at 2025-09-01 14:18:48 on dist-test-slave-9gf0"
I20250901 14:18:48.550498  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.005s	user 0.005s	sys 0.003s
I20250901 14:18:48.554697 10172 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:48.555513  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.001s
I20250901 14:18:48.555785  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnKeepAlive.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "c6f6cf426d6642a8ae1e6408836bcdc2"
format_stamp: "Formatted at 2025-09-01 14:18:48 on dist-test-slave-9gf0"
I20250901 14:18:48.556049  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnKeepAlive.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnKeepAlive.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnKeepAlive.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:48.565701  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:48.566653  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:48.620357  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:34143
I20250901 14:18:48.620461 10233 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:34143 every 8 connection(s)
I20250901 14:18:48.624749 10234 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:48.640393 10234 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2: Bootstrap starting.
I20250901 14:18:48.644855 10234 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:48.648958 10234 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2: No bootstrap required, opened a new log
I20250901 14:18:48.650966 10234 raft_consensus.cc:357] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c6f6cf426d6642a8ae1e6408836bcdc2" member_type: VOTER }
I20250901 14:18:48.651396 10234 raft_consensus.cc:383] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:48.651624 10234 raft_consensus.cc:738] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: c6f6cf426d6642a8ae1e6408836bcdc2, State: Initialized, Role: FOLLOWER
I20250901 14:18:48.652134 10234 consensus_queue.cc:260] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c6f6cf426d6642a8ae1e6408836bcdc2" member_type: VOTER }
I20250901 14:18:48.652595 10234 raft_consensus.cc:397] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:48.652814 10234 raft_consensus.cc:491] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:48.653061 10234 raft_consensus.cc:3058] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:48.657884 10234 raft_consensus.cc:513] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c6f6cf426d6642a8ae1e6408836bcdc2" member_type: VOTER }
I20250901 14:18:48.658411 10234 leader_election.cc:304] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: c6f6cf426d6642a8ae1e6408836bcdc2; no voters: 
I20250901 14:18:48.659744 10234 leader_election.cc:290] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:48.660077 10237 raft_consensus.cc:2802] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:48.661973 10237 raft_consensus.cc:695] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 1 LEADER]: Becoming Leader. State: Replica: c6f6cf426d6642a8ae1e6408836bcdc2, State: Running, Role: LEADER
I20250901 14:18:48.662901 10237 consensus_queue.cc:237] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c6f6cf426d6642a8ae1e6408836bcdc2" member_type: VOTER }
I20250901 14:18:48.665195 10234 sys_catalog.cc:564] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:18:48.667241 10239 sys_catalog.cc:455] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "c6f6cf426d6642a8ae1e6408836bcdc2" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c6f6cf426d6642a8ae1e6408836bcdc2" member_type: VOTER } }
I20250901 14:18:48.667783 10239 sys_catalog.cc:458] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:48.669088 10238 sys_catalog.cc:455] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [sys.catalog]: SysCatalogTable state changed. Reason: New leader c6f6cf426d6642a8ae1e6408836bcdc2. Latest consensus state: current_term: 1 leader_uuid: "c6f6cf426d6642a8ae1e6408836bcdc2" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c6f6cf426d6642a8ae1e6408836bcdc2" member_type: VOTER } }
I20250901 14:18:48.669699 10238 sys_catalog.cc:458] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [sys.catalog]: This master's current role is: LEADER
I20250901 14:18:48.671953 10243 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:18:48.677800 10243 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:18:48.683365  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:18:48.687386 10243 catalog_manager.cc:1349] Generated new cluster ID: def740ef21694f249d230de1f7d03d84
I20250901 14:18:48.687704 10243 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:18:48.706647 10243 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:18:48.708330 10243 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:18:48.724094 10243 catalog_manager.cc:5955] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2: Generated new TSK 0
I20250901 14:18:48.724822 10243 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:18:48.750881  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:18:48.757687 10255 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:48.759258 10256 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:18:48.762171 10258 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:18:48.762373  5023 server_base.cc:1047] running on GCE node
I20250901 14:18:48.763511  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:18:48.763708  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:18:48.763875  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736328763856 us; error 0 us; skew 500 ppm
I20250901 14:18:48.764364  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:18:48.767068  5023 webserver.cc:480] Webserver started at http://127.4.231.193:42009/ using document root <none> and password file <none>
I20250901 14:18:48.767516  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:18:48.767684  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:18:48.767946  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:18:48.769014  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnKeepAlive.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "7e247ce72bd84fffae621aa1392c4925"
format_stamp: "Formatted at 2025-09-01 14:18:48 on dist-test-slave-9gf0"
I20250901 14:18:48.773638  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.006s	sys 0.000s
I20250901 14:18:48.777079 10263 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:48.777974  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:18:48.778321  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnKeepAlive.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "7e247ce72bd84fffae621aa1392c4925"
format_stamp: "Formatted at 2025-09-01 14:18:48 on dist-test-slave-9gf0"
I20250901 14:18:48.778687  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnKeepAlive.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnKeepAlive.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTest.TxnKeepAlive.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:18:48.793501  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:18:48.794870  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:18:48.807231  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:18:48.807529  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:48.807822  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:18:48.808023  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:18:48.879681  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:40229
I20250901 14:18:48.879757 10333 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:40229 every 8 connection(s)
I20250901 14:18:48.885399 10334 heartbeater.cc:344] Connected to a master server at 127.4.231.254:34143
I20250901 14:18:48.885818 10334 heartbeater.cc:461] Registering TS with master...
I20250901 14:18:48.886512 10334 heartbeater.cc:507] Master 127.4.231.254:34143 requested a full tablet report, sending...
I20250901 14:18:48.888258 10189 ts_manager.cc:194] Registered new tserver with Master: 7e247ce72bd84fffae621aa1392c4925 (127.4.231.193:40229)
I20250901 14:18:48.888669  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004849686s
I20250901 14:18:48.890573 10189 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:43470
I20250901 14:18:48.903410 10334 heartbeater.cc:499] Master 127.4.231.254:34143 was elected leader, sending a full tablet report...
I20250901 14:18:48.911793 10188 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:43478:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:18:48.950248 10299 tablet_service.cc:1468] Processing CreateTablet for tablet dd6ed06d153241d8989fe54a8850cc0d (DEFAULT_TABLE table=client-testtb [id=e98e3a82d40d422181f5aff1193e022a]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:18:48.950582 10298 tablet_service.cc:1468] Processing CreateTablet for tablet 569a3280983c449983d95282a9619c0b (DEFAULT_TABLE table=client-testtb [id=e98e3a82d40d422181f5aff1193e022a]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:18:48.951293 10299 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet dd6ed06d153241d8989fe54a8850cc0d. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:48.951927 10298 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 569a3280983c449983d95282a9619c0b. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:48.970148 10344 tablet_bootstrap.cc:492] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925: Bootstrap starting.
I20250901 14:18:48.975906 10344 tablet_bootstrap.cc:654] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:48.980176 10344 tablet_bootstrap.cc:492] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925: No bootstrap required, opened a new log
I20250901 14:18:48.980623 10344 ts_tablet_manager.cc:1397] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925: Time spent bootstrapping tablet: real 0.011s	user 0.008s	sys 0.000s
I20250901 14:18:48.982524 10344 raft_consensus.cc:357] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:48.982959 10344 raft_consensus.cc:383] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:48.983170 10344 raft_consensus.cc:738] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 7e247ce72bd84fffae621aa1392c4925, State: Initialized, Role: FOLLOWER
I20250901 14:18:48.983659 10344 consensus_queue.cc:260] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:48.984203 10344 raft_consensus.cc:397] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:48.984429 10344 raft_consensus.cc:491] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:48.984694 10344 raft_consensus.cc:3058] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:48.990151 10344 raft_consensus.cc:513] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:48.990847 10344 leader_election.cc:304] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 7e247ce72bd84fffae621aa1392c4925; no voters: 
I20250901 14:18:48.992185 10344 leader_election.cc:290] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:48.992548 10346 raft_consensus.cc:2802] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:48.994073 10346 raft_consensus.cc:695] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 1 LEADER]: Becoming Leader. State: Replica: 7e247ce72bd84fffae621aa1392c4925, State: Running, Role: LEADER
I20250901 14:18:48.994994 10346 consensus_queue.cc:237] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:49.001663 10344 ts_tablet_manager.cc:1428] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925: Time spent starting tablet: real 0.021s	user 0.014s	sys 0.008s
I20250901 14:18:49.002972 10344 tablet_bootstrap.cc:492] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925: Bootstrap starting.
I20250901 14:18:49.007014 10188 catalog_manager.cc:5582] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 reported cstate change: term changed from 0 to 1, leader changed from <none> to 7e247ce72bd84fffae621aa1392c4925 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "7e247ce72bd84fffae621aa1392c4925" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:49.008814 10344 tablet_bootstrap.cc:654] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:49.015722 10344 tablet_bootstrap.cc:492] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925: No bootstrap required, opened a new log
I20250901 14:18:49.016160 10344 ts_tablet_manager.cc:1397] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925: Time spent bootstrapping tablet: real 0.013s	user 0.010s	sys 0.000s
I20250901 14:18:49.018643 10344 raft_consensus.cc:357] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:49.019243 10344 raft_consensus.cc:383] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:49.019541 10344 raft_consensus.cc:738] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 7e247ce72bd84fffae621aa1392c4925, State: Initialized, Role: FOLLOWER
I20250901 14:18:49.020193 10344 consensus_queue.cc:260] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:49.020831 10344 raft_consensus.cc:397] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:49.021123 10344 raft_consensus.cc:491] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:49.021456 10344 raft_consensus.cc:3058] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:49.026515 10344 raft_consensus.cc:513] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:49.027093 10344 leader_election.cc:304] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 7e247ce72bd84fffae621aa1392c4925; no voters: 
I20250901 14:18:49.027570 10344 leader_election.cc:290] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:49.027735 10346 raft_consensus.cc:2802] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:49.028246 10346 raft_consensus.cc:695] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 1 LEADER]: Becoming Leader. State: Replica: 7e247ce72bd84fffae621aa1392c4925, State: Running, Role: LEADER
I20250901 14:18:49.028985 10346 consensus_queue.cc:237] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:49.029896 10344 ts_tablet_manager.cc:1428] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925: Time spent starting tablet: real 0.013s	user 0.015s	sys 0.000s
I20250901 14:18:49.039324 10188 catalog_manager.cc:5582] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 reported cstate change: term changed from 0 to 1, leader changed from <none> to 7e247ce72bd84fffae621aa1392c4925 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "7e247ce72bd84fffae621aa1392c4925" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:49.086031 10189 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:43492:
name: "kudu_system.kudu_transactions"
schema {
  columns {
    name: "txn_id"
    type: INT64
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "entry_type"
    type: INT8
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "identifier"
    type: STRING
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "metadata"
    type: STRING
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\006\001\000\000\000\000\000\000\000\000\007\001@B\017\000\000\000\000\000""\006\001\000\000\000\000\000\000\000\000\007\001@B\017\000\000\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "txn_id"
    }
  }
}
table_type: TXN_STATUS_TABLE
I20250901 14:18:49.107971 10298 tablet_service.cc:1468] Processing CreateTablet for tablet 1343add90ff14d8b924ea673c72bdbdf (TXN_STATUS_TABLE table=kudu_system.kudu_transactions [id=48431ad3b7154e968ef3a7fb4af1b4a0]), partition=RANGE (txn_id) PARTITION 0 <= VALUES < 1000000
I20250901 14:18:49.109047 10298 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 1343add90ff14d8b924ea673c72bdbdf. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:18:49.121304 10344 tablet_bootstrap.cc:492] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925: Bootstrap starting.
I20250901 14:18:49.126179 10344 tablet_bootstrap.cc:654] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925: Neither blocks nor log segments found. Creating new log.
I20250901 14:18:49.130944 10344 tablet_bootstrap.cc:492] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925: No bootstrap required, opened a new log
I20250901 14:18:49.131414 10344 ts_tablet_manager.cc:1397] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925: Time spent bootstrapping tablet: real 0.010s	user 0.005s	sys 0.004s
I20250901 14:18:49.133973 10344 raft_consensus.cc:357] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:49.134433 10344 raft_consensus.cc:383] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:18:49.134737 10344 raft_consensus.cc:738] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 7e247ce72bd84fffae621aa1392c4925, State: Initialized, Role: FOLLOWER
I20250901 14:18:49.135389 10344 consensus_queue.cc:260] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:49.135973 10344 raft_consensus.cc:397] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:18:49.136190 10344 raft_consensus.cc:491] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:18:49.136437 10344 raft_consensus.cc:3058] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:18:49.141631 10344 raft_consensus.cc:513] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:49.142195 10344 leader_election.cc:304] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 7e247ce72bd84fffae621aa1392c4925; no voters: 
I20250901 14:18:49.142648 10344 leader_election.cc:290] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:18:49.142836 10346 raft_consensus.cc:2802] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:18:49.143325 10346 raft_consensus.cc:695] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 1 LEADER]: Becoming Leader. State: Replica: 7e247ce72bd84fffae621aa1392c4925, State: Running, Role: LEADER
I20250901 14:18:49.144028 10346 consensus_queue.cc:237] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } }
I20250901 14:18:49.144456 10344 ts_tablet_manager.cc:1428] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925: Time spent starting tablet: real 0.013s	user 0.011s	sys 0.000s
I20250901 14:18:49.147871 10348 tablet_replica.cc:440] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925: TxnStatusTablet state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "7e247ce72bd84fffae621aa1392c4925" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } } }
I20250901 14:18:49.148499 10348 tablet_replica.cc:443] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925: This TxnStatusTablet replica's current role is: LEADER
I20250901 14:18:49.149966 10360 txn_status_manager.cc:874] Waiting until node catch up with all replicated operations in previous term...
I20250901 14:18:49.150563 10360 txn_status_manager.cc:930] Loading transaction status metadata into memory...
I20250901 14:18:49.153007 10347 tablet_replica.cc:440] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925: TxnStatusTablet state changed. Reason: New leader 7e247ce72bd84fffae621aa1392c4925. Latest consensus state: current_term: 1 leader_uuid: "7e247ce72bd84fffae621aa1392c4925" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } } }
I20250901 14:18:49.153628 10347 tablet_replica.cc:443] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925: This TxnStatusTablet replica's current role is: LEADER
I20250901 14:18:49.157799 10188 catalog_manager.cc:5582] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 reported cstate change: term changed from 0 to 1, leader changed from <none> to 7e247ce72bd84fffae621aa1392c4925 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "7e247ce72bd84fffae621aa1392c4925" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7e247ce72bd84fffae621aa1392c4925" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 40229 } health_report { overall_health: HEALTHY } } }
I20250901 14:18:53.926389 10272 txn_status_manager.cc:1391] automatically aborted stale txn (ID 1) past 1.667s from last keepalive heartbeat (effective timeout is 1.500s)
I20250901 14:18:56.942642 10272 txn_status_manager.cc:1391] automatically aborted stale txn (ID 2) past 1.664s from last keepalive heartbeat (effective timeout is 1.500s)
I20250901 14:19:01.315644  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:19:01.365815  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:01.366763  5023 tablet_replica.cc:331] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925: stopping tablet replica
I20250901 14:19:01.367810  5023 raft_consensus.cc:2241] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:01.368737  5023 raft_consensus.cc:2270] T 1343add90ff14d8b924ea673c72bdbdf P 7e247ce72bd84fffae621aa1392c4925 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:01.372313  5023 tablet_replica.cc:331] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925: stopping tablet replica
I20250901 14:19:01.372893  5023 raft_consensus.cc:2241] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:01.373497  5023 raft_consensus.cc:2270] T dd6ed06d153241d8989fe54a8850cc0d P 7e247ce72bd84fffae621aa1392c4925 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:01.376003  5023 tablet_replica.cc:331] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925: stopping tablet replica
I20250901 14:19:01.376492  5023 raft_consensus.cc:2241] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:01.377063  5023 raft_consensus.cc:2270] T 569a3280983c449983d95282a9619c0b P 7e247ce72bd84fffae621aa1392c4925 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:01.392022  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:19:01.412026  5023 master.cc:561] Master@127.4.231.254:34143 shutting down...
I20250901 14:19:01.443253  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:01.443917  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:01.444216  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P c6f6cf426d6642a8ae1e6408836bcdc2: stopping tablet replica
I20250901 14:19:01.463229  5023 master.cc:583] Master@127.4.231.254:34143 shutdown complete.
[       OK ] ClientTest.TxnKeepAlive (12985 ms)
[----------] 21 tests from ClientTest (50369 ms total)

[----------] 1 test from ClientTestImmutableColumnCompatibility
[ RUN      ] ClientTestImmutableColumnCompatibility.CreateTable
I20250901 14:19:01.506673  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:40699
I20250901 14:19:01.507915  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:01.514063 10394 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:01.514115 10395 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:01.514748 10397 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:01.516136  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:01.517000  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:01.517175  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:01.517287  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736341517276 us; error 0 us; skew 500 ppm
I20250901 14:19:01.517822  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:01.520176  5023 webserver.cc:480] Webserver started at http://127.4.231.254:39781/ using document root <none> and password file <none>
I20250901 14:19:01.520609  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:01.520776  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:01.520984  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:01.522176  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestImmutableColumnCompatibility.CreateTable.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "8f64523ce2594418a845c5d92007fe0a"
format_stamp: "Formatted at 2025-09-01 14:19:01 on dist-test-slave-9gf0"
I20250901 14:19:01.526763  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.001s	sys 0.004s
I20250901 14:19:01.530313 10402 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:01.531157  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:19:01.531409  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestImmutableColumnCompatibility.CreateTable.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "8f64523ce2594418a845c5d92007fe0a"
format_stamp: "Formatted at 2025-09-01 14:19:01 on dist-test-slave-9gf0"
I20250901 14:19:01.531673  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestImmutableColumnCompatibility.CreateTable.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestImmutableColumnCompatibility.CreateTable.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestImmutableColumnCompatibility.CreateTable.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:01.544991  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:01.546283  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:01.584381  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:40699
I20250901 14:19:01.584474 10453 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:40699 every 8 connection(s)
I20250901 14:19:01.588263 10454 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:01.599457 10454 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a: Bootstrap starting.
I20250901 14:19:01.604164 10454 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:01.608422 10454 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a: No bootstrap required, opened a new log
I20250901 14:19:01.610654 10454 raft_consensus.cc:357] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8f64523ce2594418a845c5d92007fe0a" member_type: VOTER }
I20250901 14:19:01.611088 10454 raft_consensus.cc:383] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:01.611333 10454 raft_consensus.cc:738] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 8f64523ce2594418a845c5d92007fe0a, State: Initialized, Role: FOLLOWER
I20250901 14:19:01.611876 10454 consensus_queue.cc:260] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8f64523ce2594418a845c5d92007fe0a" member_type: VOTER }
I20250901 14:19:01.612310 10454 raft_consensus.cc:397] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:01.612542 10454 raft_consensus.cc:491] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:01.612819 10454 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:01.617837 10454 raft_consensus.cc:513] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8f64523ce2594418a845c5d92007fe0a" member_type: VOTER }
I20250901 14:19:01.618359 10454 leader_election.cc:304] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 8f64523ce2594418a845c5d92007fe0a; no voters: 
I20250901 14:19:01.619546 10454 leader_election.cc:290] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:01.619874 10457 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:01.621245 10457 raft_consensus.cc:695] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 1 LEADER]: Becoming Leader. State: Replica: 8f64523ce2594418a845c5d92007fe0a, State: Running, Role: LEADER
I20250901 14:19:01.621955 10457 consensus_queue.cc:237] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8f64523ce2594418a845c5d92007fe0a" member_type: VOTER }
I20250901 14:19:01.622553 10454 sys_catalog.cc:564] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:19:01.626691 10459 sys_catalog.cc:455] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [sys.catalog]: SysCatalogTable state changed. Reason: New leader 8f64523ce2594418a845c5d92007fe0a. Latest consensus state: current_term: 1 leader_uuid: "8f64523ce2594418a845c5d92007fe0a" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8f64523ce2594418a845c5d92007fe0a" member_type: VOTER } }
I20250901 14:19:01.627315 10458 sys_catalog.cc:455] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "8f64523ce2594418a845c5d92007fe0a" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "8f64523ce2594418a845c5d92007fe0a" member_type: VOTER } }
I20250901 14:19:01.627640 10459 sys_catalog.cc:458] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:01.629551 10458 sys_catalog.cc:458] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:01.629871 10464 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:19:01.634924 10464 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:19:01.639633  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:19:01.644006 10464 catalog_manager.cc:1349] Generated new cluster ID: ed2ad4e200e1437996726889ec7a9540
I20250901 14:19:01.644266 10464 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:19:01.679381 10464 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:19:01.680598 10464 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:19:01.696177 10464 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a: Generated new TSK 0
I20250901 14:19:01.696722 10464 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:19:01.707384  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:01.713476 10475 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:01.714884 10476 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:01.716084 10478 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:01.716629  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:01.717483  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:01.717715  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:01.717871  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736341717855 us; error 0 us; skew 500 ppm
I20250901 14:19:01.718374  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:01.720649  5023 webserver.cc:480] Webserver started at http://127.4.231.193:37111/ using document root <none> and password file <none>
I20250901 14:19:01.721136  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:01.721314  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:01.721607  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:01.722842  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestImmutableColumnCompatibility.CreateTable.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "bc57a1241f70420dabd553eccd451207"
format_stamp: "Formatted at 2025-09-01 14:19:01 on dist-test-slave-9gf0"
I20250901 14:19:01.727618  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:19:01.730958 10483 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:01.731793  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.000s	sys 0.001s
I20250901 14:19:01.732089  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestImmutableColumnCompatibility.CreateTable.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "bc57a1241f70420dabd553eccd451207"
format_stamp: "Formatted at 2025-09-01 14:19:01 on dist-test-slave-9gf0"
I20250901 14:19:01.732380  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestImmutableColumnCompatibility.CreateTable.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestImmutableColumnCompatibility.CreateTable.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestImmutableColumnCompatibility.CreateTable.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:01.746974  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:01.748174  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:01.749769  5023 txn_system_client.cc:432] TxnSystemClient initialization is disabled...
I20250901 14:19:01.752079  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:01.752275  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:01.752506  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:01.752677  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:01.793447  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:35967
I20250901 14:19:01.793625 10545 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:35967 every 8 connection(s)
I20250901 14:19:01.808635 10546 heartbeater.cc:344] Connected to a master server at 127.4.231.254:40699
I20250901 14:19:01.809043 10546 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:01.809762 10546 heartbeater.cc:507] Master 127.4.231.254:40699 requested a full tablet report, sending...
I20250901 14:19:01.811722 10419 ts_manager.cc:194] Registered new tserver with Master: bc57a1241f70420dabd553eccd451207 (127.4.231.193:35967)
I20250901 14:19:01.812034  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.015258466s
I20250901 14:19:01.813686 10419 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:49620
W20250901 14:19:01.834489 10403 messenger.cc:365] Unable to handle RPC call: Not implemented: call requires unsupported application feature flags: 10
I20250901 14:19:01.836540  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:19:01.852463  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:01.868139  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:19:01.875537  5023 master.cc:561] Master@127.4.231.254:40699 shutting down...
I20250901 14:19:01.889377  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:01.889951  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:01.890337  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 8f64523ce2594418a845c5d92007fe0a: stopping tablet replica
I20250901 14:19:01.908499  5023 master.cc:583] Master@127.4.231.254:40699 shutdown complete.
[       OK ] ClientTestImmutableColumnCompatibility.CreateTable (421 ms)
[----------] 1 test from ClientTestImmutableColumnCompatibility (421 ms total)

[----------] 1 test from ClientTestAutoIncrementingColumn
[ RUN      ] ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts
I20250901 14:19:01.928102  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:42633
I20250901 14:19:01.929185  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:01.934203 10556 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:01.935125 10557 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:01.936475 10559 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:01.936882  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:01.937765  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:01.937951  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:01.938094  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736341938079 us; error 0 us; skew 500 ppm
I20250901 14:19:01.938635  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:01.940927  5023 webserver.cc:480] Webserver started at http://127.4.231.254:33057/ using document root <none> and password file <none>
I20250901 14:19:01.941428  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:01.941648  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:01.941910  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:01.942970  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "ba4f8e1997194819aeffc4c989d99461"
format_stamp: "Formatted at 2025-09-01 14:19:01 on dist-test-slave-9gf0"
I20250901 14:19:01.947561  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:19:01.950907 10564 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:01.951675  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.000s	sys 0.001s
I20250901 14:19:01.951932  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "ba4f8e1997194819aeffc4c989d99461"
format_stamp: "Formatted at 2025-09-01 14:19:01 on dist-test-slave-9gf0"
I20250901 14:19:01.952181  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:01.962366  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:01.963387  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:01.998445  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:42633
I20250901 14:19:01.998625 10615 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:42633 every 8 connection(s)
I20250901 14:19:02.002110 10616 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:02.013041 10616 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461: Bootstrap starting.
I20250901 14:19:02.017228 10616 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:02.021121 10616 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461: No bootstrap required, opened a new log
I20250901 14:19:02.023047 10616 raft_consensus.cc:357] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ba4f8e1997194819aeffc4c989d99461" member_type: VOTER }
I20250901 14:19:02.023416 10616 raft_consensus.cc:383] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:02.023602 10616 raft_consensus.cc:738] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: ba4f8e1997194819aeffc4c989d99461, State: Initialized, Role: FOLLOWER
I20250901 14:19:02.024118 10616 consensus_queue.cc:260] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ba4f8e1997194819aeffc4c989d99461" member_type: VOTER }
I20250901 14:19:02.024533 10616 raft_consensus.cc:397] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:02.024722 10616 raft_consensus.cc:491] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:02.024927 10616 raft_consensus.cc:3058] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:02.029510 10616 raft_consensus.cc:513] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ba4f8e1997194819aeffc4c989d99461" member_type: VOTER }
I20250901 14:19:02.030004 10616 leader_election.cc:304] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: ba4f8e1997194819aeffc4c989d99461; no voters: 
I20250901 14:19:02.031050 10616 leader_election.cc:290] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:02.031355 10619 raft_consensus.cc:2802] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:02.032662 10619 raft_consensus.cc:695] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 1 LEADER]: Becoming Leader. State: Replica: ba4f8e1997194819aeffc4c989d99461, State: Running, Role: LEADER
I20250901 14:19:02.033303 10619 consensus_queue.cc:237] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ba4f8e1997194819aeffc4c989d99461" member_type: VOTER }
I20250901 14:19:02.034018 10616 sys_catalog.cc:564] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:19:02.036566 10621 sys_catalog.cc:455] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [sys.catalog]: SysCatalogTable state changed. Reason: New leader ba4f8e1997194819aeffc4c989d99461. Latest consensus state: current_term: 1 leader_uuid: "ba4f8e1997194819aeffc4c989d99461" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ba4f8e1997194819aeffc4c989d99461" member_type: VOTER } }
I20250901 14:19:02.036506 10620 sys_catalog.cc:455] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "ba4f8e1997194819aeffc4c989d99461" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "ba4f8e1997194819aeffc4c989d99461" member_type: VOTER } }
I20250901 14:19:02.037256 10621 sys_catalog.cc:458] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:02.037335 10620 sys_catalog.cc:458] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:02.046808 10624 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:19:02.051478 10624 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:19:02.052654  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:19:02.059767 10624 catalog_manager.cc:1349] Generated new cluster ID: 3f18bd1eb3ac4cacb09219a5173c67de
I20250901 14:19:02.060070 10624 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:19:02.074322 10624 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:19:02.075662 10624 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:19:02.088227 10624 catalog_manager.cc:5955] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461: Generated new TSK 0
I20250901 14:19:02.088871 10624 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:19:02.119840  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:02.126329 10638 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:02.127079 10637 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:02.128131  5023 server_base.cc:1047] running on GCE node
W20250901 14:19:02.128089 10640 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:02.129169  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:02.129352  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:02.129518  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736342129499 us; error 0 us; skew 500 ppm
I20250901 14:19:02.130074  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:02.132328  5023 webserver.cc:480] Webserver started at http://127.4.231.193:33547/ using document root <none> and password file <none>
I20250901 14:19:02.132805  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:02.132963  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:02.133232  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:02.134356  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "92294cc85fff48178783e1631493b9a4"
format_stamp: "Formatted at 2025-09-01 14:19:02 on dist-test-slave-9gf0"
I20250901 14:19:02.139150  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:19:02.142387 10645 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.143146  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:19:02.143438  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "92294cc85fff48178783e1631493b9a4"
format_stamp: "Formatted at 2025-09-01 14:19:02 on dist-test-slave-9gf0"
I20250901 14:19:02.143733  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:02.188134  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:02.189216  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:02.190724  5023 txn_system_client.cc:432] TxnSystemClient initialization is disabled...
I20250901 14:19:02.192937  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:02.193130  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.193365  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:02.193544  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.234889  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:46325
I20250901 14:19:02.234966 10708 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:46325 every 8 connection(s)
I20250901 14:19:02.239444  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:02.246567 10713 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:02.248585 10714 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:02.250969 10716 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:02.251082  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:02.252180  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:02.252444  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:02.252653 10709 heartbeater.cc:344] Connected to a master server at 127.4.231.254:42633
I20250901 14:19:02.252693  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736342252676 us; error 0 us; skew 500 ppm
I20250901 14:19:02.253118 10709 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:02.253453  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:02.253859 10709 heartbeater.cc:507] Master 127.4.231.254:42633 requested a full tablet report, sending...
I20250901 14:19:02.256075  5023 webserver.cc:480] Webserver started at http://127.4.231.194:35171/ using document root <none> and password file <none>
I20250901 14:19:02.256062 10581 ts_manager.cc:194] Registered new tserver with Master: 92294cc85fff48178783e1631493b9a4 (127.4.231.193:46325)
I20250901 14:19:02.256769  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:02.256973  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:02.257210  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:02.258150 10581 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:42002
I20250901 14:19:02.258349  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-1-root/instance:
uuid: "10f2a09e9e924e4bae0df84237300b5d"
format_stamp: "Formatted at 2025-09-01 14:19:02 on dist-test-slave-9gf0"
I20250901 14:19:02.262960  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:19:02.266036 10721 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.266759  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:19:02.267035  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-1-root
uuid: "10f2a09e9e924e4bae0df84237300b5d"
format_stamp: "Formatted at 2025-09-01 14:19:02 on dist-test-slave-9gf0"
I20250901 14:19:02.267318  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-1-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-1-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-1-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:02.287599  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:02.288727  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:02.290189  5023 txn_system_client.cc:432] TxnSystemClient initialization is disabled...
I20250901 14:19:02.292503  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:02.292701  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.292914  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:02.293059  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.331576  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.194:36383
I20250901 14:19:02.331667 10783 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.194:36383 every 8 connection(s)
I20250901 14:19:02.336469  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:02.342607 10787 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:02.343400 10788 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:02.346889  5023 server_base.cc:1047] running on GCE node
W20250901 14:19:02.346870 10790 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:02.348146 10784 heartbeater.cc:344] Connected to a master server at 127.4.231.254:42633
I20250901 14:19:02.348196  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:02.348549  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:02.348598 10784 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:02.348724  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736342348710 us; error 0 us; skew 500 ppm
I20250901 14:19:02.349311  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:02.349385 10784 heartbeater.cc:507] Master 127.4.231.254:42633 requested a full tablet report, sending...
I20250901 14:19:02.351468 10581 ts_manager.cc:194] Registered new tserver with Master: 10f2a09e9e924e4bae0df84237300b5d (127.4.231.194:36383)
I20250901 14:19:02.352185  5023 webserver.cc:480] Webserver started at http://127.4.231.195:39069/ using document root <none> and password file <none>
I20250901 14:19:02.352838  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:02.353075  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:02.353379  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:02.353371 10581 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:42004
I20250901 14:19:02.354971  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-2-root/instance:
uuid: "fdaadf8def3b4e04b5c569cad3f269f3"
format_stamp: "Formatted at 2025-09-01 14:19:02 on dist-test-slave-9gf0"
I20250901 14:19:02.359411  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:19:02.362850 10795 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.363584  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:19:02.363894  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-2-root
uuid: "fdaadf8def3b4e04b5c569cad3f269f3"
format_stamp: "Formatted at 2025-09-01 14:19:02 on dist-test-slave-9gf0"
I20250901 14:19:02.364179  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-2-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-2-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts.1756736290962133-5023-0/minicluster-data/ts-2-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:02.384534  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:02.385856  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:02.387337  5023 txn_system_client.cc:432] TxnSystemClient initialization is disabled...
I20250901 14:19:02.389552  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:02.389741  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.389969  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:02.390111  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.429875  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.195:45009
I20250901 14:19:02.429965 10857 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.195:45009 every 8 connection(s)
I20250901 14:19:02.443176 10858 heartbeater.cc:344] Connected to a master server at 127.4.231.254:42633
I20250901 14:19:02.443567 10858 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:02.444258 10858 heartbeater.cc:507] Master 127.4.231.254:42633 requested a full tablet report, sending...
I20250901 14:19:02.446115 10581 ts_manager.cc:194] Registered new tserver with Master: fdaadf8def3b4e04b5c569cad3f269f3 (127.4.231.195:45009)
I20250901 14:19:02.446270  5023 internal_mini_cluster.cc:371] 3 TS(s) registered with all masters after 0.013422193s
I20250901 14:19:02.447429 10581 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:42430
I20250901 14:19:02.468698 10581 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:42444:
name: "table_with_auto_incrementing_column"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "auto_incrementing_id"
    type: INT64
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
    is_auto_incrementing: true
  }
}
num_replicas: 3
split_rows_range_bounds {
  rows: "\006\001\000\000\000\000\007\001\n\000\000\000\006\001\n\000\000\000\007\001\024\000\000\000""\006\001\000\000\000\000\007\001\n\000\000\000\006\001\n\000\000\000\007\001\024\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
W20250901 14:19:02.470988 10581 catalog_manager.cc:6944] The number of live tablet servers is not enough to re-replicate a tablet replica of the newly created table table_with_auto_incrementing_column in case of a server failure: 4 tablet servers would be needed, 3 are available. Consider bringing up more tablet servers.
I20250901 14:19:02.512967 10673 tablet_service.cc:1468] Processing CreateTablet for tablet 76085a4787e44b94946b5f63bea59a30 (DEFAULT_TABLE table=table_with_auto_incrementing_column [id=002a5e188473416080b973f76ebc74dd]), partition=RANGE (key) PARTITION 0 <= VALUES < 10
I20250901 14:19:02.513309 10672 tablet_service.cc:1468] Processing CreateTablet for tablet e2649165f03d498ab16e8d86cfaa1cc2 (DEFAULT_TABLE table=table_with_auto_incrementing_column [id=002a5e188473416080b973f76ebc74dd]), partition=RANGE (key) PARTITION 10 <= VALUES < 20
I20250901 14:19:02.513777 10823 tablet_service.cc:1468] Processing CreateTablet for tablet 76085a4787e44b94946b5f63bea59a30 (DEFAULT_TABLE table=table_with_auto_incrementing_column [id=002a5e188473416080b973f76ebc74dd]), partition=RANGE (key) PARTITION 0 <= VALUES < 10
I20250901 14:19:02.514506 10673 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 76085a4787e44b94946b5f63bea59a30. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:02.515028 10823 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 76085a4787e44b94946b5f63bea59a30. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:02.515111 10672 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet e2649165f03d498ab16e8d86cfaa1cc2. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:02.518882 10822 tablet_service.cc:1468] Processing CreateTablet for tablet e2649165f03d498ab16e8d86cfaa1cc2 (DEFAULT_TABLE table=table_with_auto_incrementing_column [id=002a5e188473416080b973f76ebc74dd]), partition=RANGE (key) PARTITION 10 <= VALUES < 20
I20250901 14:19:02.520321 10822 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet e2649165f03d498ab16e8d86cfaa1cc2. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:02.527741 10749 tablet_service.cc:1468] Processing CreateTablet for tablet 76085a4787e44b94946b5f63bea59a30 (DEFAULT_TABLE table=table_with_auto_incrementing_column [id=002a5e188473416080b973f76ebc74dd]), partition=RANGE (key) PARTITION 0 <= VALUES < 10
I20250901 14:19:02.528026 10748 tablet_service.cc:1468] Processing CreateTablet for tablet e2649165f03d498ab16e8d86cfaa1cc2 (DEFAULT_TABLE table=table_with_auto_incrementing_column [id=002a5e188473416080b973f76ebc74dd]), partition=RANGE (key) PARTITION 10 <= VALUES < 20
I20250901 14:19:02.529199 10749 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 76085a4787e44b94946b5f63bea59a30. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:02.529873 10748 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet e2649165f03d498ab16e8d86cfaa1cc2. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:02.553572 10873 tablet_bootstrap.cc:492] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d: Bootstrap starting.
I20250901 14:19:02.554967 10875 tablet_bootstrap.cc:492] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4: Bootstrap starting.
I20250901 14:19:02.560689 10873 tablet.cc:502] Time spent fetching auto increment counter: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.560671 10874 tablet_bootstrap.cc:492] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3: Bootstrap starting.
I20250901 14:19:02.562474 10875 tablet.cc:502] Time spent fetching auto increment counter: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.562624 10873 tablet_bootstrap.cc:654] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:02.564065 10875 tablet_bootstrap.cc:654] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:02.565413 10874 tablet.cc:502] Time spent fetching auto increment counter: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.567245 10874 tablet_bootstrap.cc:654] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:02.568603 10873 tablet_bootstrap.cc:492] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d: No bootstrap required, opened a new log
I20250901 14:19:02.569271 10873 ts_tablet_manager.cc:1397] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d: Time spent bootstrapping tablet: real 0.016s	user 0.008s	sys 0.003s
I20250901 14:19:02.572700 10873 raft_consensus.cc:357] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } }
I20250901 14:19:02.573427 10873 raft_consensus.cc:383] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:02.573765 10873 raft_consensus.cc:738] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 10f2a09e9e924e4bae0df84237300b5d, State: Initialized, Role: FOLLOWER
I20250901 14:19:02.574010 10875 tablet_bootstrap.cc:492] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4: No bootstrap required, opened a new log
I20250901 14:19:02.574047 10874 tablet_bootstrap.cc:492] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3: No bootstrap required, opened a new log
I20250901 14:19:02.574564 10875 ts_tablet_manager.cc:1397] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4: Time spent bootstrapping tablet: real 0.020s	user 0.013s	sys 0.004s
I20250901 14:19:02.574623 10874 ts_tablet_manager.cc:1397] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3: Time spent bootstrapping tablet: real 0.014s	user 0.008s	sys 0.003s
I20250901 14:19:02.574505 10873 consensus_queue.cc:260] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } }
I20250901 14:19:02.577083 10784 heartbeater.cc:499] Master 127.4.231.254:42633 was elected leader, sending a full tablet report...
I20250901 14:19:02.577214 10875 raft_consensus.cc:357] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } }
I20250901 14:19:02.577746 10873 ts_tablet_manager.cc:1428] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d: Time spent starting tablet: real 0.008s	user 0.003s	sys 0.005s
I20250901 14:19:02.578029 10875 raft_consensus.cc:383] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:02.577822 10874 raft_consensus.cc:357] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } }
I20250901 14:19:02.578398 10875 raft_consensus.cc:738] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 92294cc85fff48178783e1631493b9a4, State: Initialized, Role: FOLLOWER
I20250901 14:19:02.578599 10874 raft_consensus.cc:383] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:02.578904 10873 tablet_bootstrap.cc:492] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d: Bootstrap starting.
I20250901 14:19:02.578944 10874 raft_consensus.cc:738] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: fdaadf8def3b4e04b5c569cad3f269f3, State: Initialized, Role: FOLLOWER
I20250901 14:19:02.579206 10875 consensus_queue.cc:260] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } }
I20250901 14:19:02.579874 10874 consensus_queue.cc:260] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } }
W20250901 14:19:02.585515 10785 tablet.cc:2378] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d: Can't schedule compaction. Clean time has not been advanced past its initial value.
I20250901 14:19:02.587898 10873 tablet.cc:502] Time spent fetching auto increment counter: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.590030 10873 tablet_bootstrap.cc:654] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:02.601225 10709 heartbeater.cc:499] Master 127.4.231.254:42633 was elected leader, sending a full tablet report...
I20250901 14:19:02.602089 10875 ts_tablet_manager.cc:1428] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4: Time spent starting tablet: real 0.027s	user 0.023s	sys 0.005s
I20250901 14:19:02.602156 10858 heartbeater.cc:499] Master 127.4.231.254:42633 was elected leader, sending a full tablet report...
I20250901 14:19:02.603018 10875 tablet_bootstrap.cc:492] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4: Bootstrap starting.
I20250901 14:19:02.604808 10874 ts_tablet_manager.cc:1428] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3: Time spent starting tablet: real 0.030s	user 0.018s	sys 0.008s
I20250901 14:19:02.605881 10874 tablet_bootstrap.cc:492] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3: Bootstrap starting.
I20250901 14:19:02.608793 10875 tablet.cc:502] Time spent fetching auto increment counter: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.610226 10875 tablet_bootstrap.cc:654] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:02.611356 10874 tablet.cc:502] Time spent fetching auto increment counter: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:02.612852 10874 tablet_bootstrap.cc:654] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:02.618940 10873 tablet_bootstrap.cc:492] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d: No bootstrap required, opened a new log
I20250901 14:19:02.619366 10873 ts_tablet_manager.cc:1397] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d: Time spent bootstrapping tablet: real 0.041s	user 0.017s	sys 0.002s
I20250901 14:19:02.621415 10873 raft_consensus.cc:357] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } }
I20250901 14:19:02.622015 10873 raft_consensus.cc:383] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:02.622330 10873 raft_consensus.cc:738] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 10f2a09e9e924e4bae0df84237300b5d, State: Initialized, Role: FOLLOWER
I20250901 14:19:02.623117 10873 consensus_queue.cc:260] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } }
I20250901 14:19:02.624768 10875 tablet_bootstrap.cc:492] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4: No bootstrap required, opened a new log
I20250901 14:19:02.625166 10875 ts_tablet_manager.cc:1397] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4: Time spent bootstrapping tablet: real 0.022s	user 0.012s	sys 0.004s
I20250901 14:19:02.625466 10873 ts_tablet_manager.cc:1428] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d: Time spent starting tablet: real 0.006s	user 0.004s	sys 0.000s
I20250901 14:19:02.627234 10874 tablet_bootstrap.cc:492] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3: No bootstrap required, opened a new log
I20250901 14:19:02.627766 10874 ts_tablet_manager.cc:1397] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3: Time spent bootstrapping tablet: real 0.022s	user 0.012s	sys 0.007s
I20250901 14:19:02.627913 10875 raft_consensus.cc:357] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } }
I20250901 14:19:02.628670 10875 raft_consensus.cc:383] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:02.628968 10875 raft_consensus.cc:738] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 92294cc85fff48178783e1631493b9a4, State: Initialized, Role: FOLLOWER
I20250901 14:19:02.629801 10875 consensus_queue.cc:260] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } }
I20250901 14:19:02.630717 10874 raft_consensus.cc:357] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } }
I20250901 14:19:02.631393 10874 raft_consensus.cc:383] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:02.631660 10874 raft_consensus.cc:738] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: fdaadf8def3b4e04b5c569cad3f269f3, State: Initialized, Role: FOLLOWER
I20250901 14:19:02.632104 10875 ts_tablet_manager.cc:1428] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4: Time spent starting tablet: real 0.007s	user 0.006s	sys 0.000s
I20250901 14:19:02.632332 10874 consensus_queue.cc:260] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } }
I20250901 14:19:02.634553 10874 ts_tablet_manager.cc:1428] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3: Time spent starting tablet: real 0.006s	user 0.006s	sys 0.000s
I20250901 14:19:02.758690 10879 raft_consensus.cc:491] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Starting pre-election (no leader contacted us within the election timeout)
I20250901 14:19:02.759181 10879 raft_consensus.cc:513] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } }
I20250901 14:19:02.761073 10879 leader_election.cc:290] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [CANDIDATE]: Term 1 pre-election: Requested pre-vote from peers 92294cc85fff48178783e1631493b9a4 (127.4.231.193:46325), fdaadf8def3b4e04b5c569cad3f269f3 (127.4.231.195:45009)
I20250901 14:19:02.770085 10683 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "e2649165f03d498ab16e8d86cfaa1cc2" candidate_uuid: "10f2a09e9e924e4bae0df84237300b5d" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "92294cc85fff48178783e1631493b9a4" is_pre_election: true
I20250901 14:19:02.770803 10683 raft_consensus.cc:2466] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4 [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 10f2a09e9e924e4bae0df84237300b5d in term 0.
I20250901 14:19:02.771803 10833 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "e2649165f03d498ab16e8d86cfaa1cc2" candidate_uuid: "10f2a09e9e924e4bae0df84237300b5d" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" is_pre_election: true
I20250901 14:19:02.772079 10725 leader_election.cc:304] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [CANDIDATE]: Term 1 pre-election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 10f2a09e9e924e4bae0df84237300b5d, 92294cc85fff48178783e1631493b9a4; no voters: 
I20250901 14:19:02.772531 10833 raft_consensus.cc:2466] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3 [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 10f2a09e9e924e4bae0df84237300b5d in term 0.
I20250901 14:19:02.772912 10879 raft_consensus.cc:2802] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Leader pre-election won for term 1
I20250901 14:19:02.773270 10879 raft_consensus.cc:491] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Starting leader election (no leader contacted us within the election timeout)
I20250901 14:19:02.773638 10879 raft_consensus.cc:3058] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:02.778525 10879 raft_consensus.cc:513] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } }
I20250901 14:19:02.779984 10879 leader_election.cc:290] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [CANDIDATE]: Term 1 election: Requested vote from peers 92294cc85fff48178783e1631493b9a4 (127.4.231.193:46325), fdaadf8def3b4e04b5c569cad3f269f3 (127.4.231.195:45009)
I20250901 14:19:02.780874 10683 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "e2649165f03d498ab16e8d86cfaa1cc2" candidate_uuid: "10f2a09e9e924e4bae0df84237300b5d" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "92294cc85fff48178783e1631493b9a4"
I20250901 14:19:02.781025 10833 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "e2649165f03d498ab16e8d86cfaa1cc2" candidate_uuid: "10f2a09e9e924e4bae0df84237300b5d" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "fdaadf8def3b4e04b5c569cad3f269f3"
I20250901 14:19:02.781359 10683 raft_consensus.cc:3058] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:02.781553 10833 raft_consensus.cc:3058] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:02.786783 10683 raft_consensus.cc:2466] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4 [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 10f2a09e9e924e4bae0df84237300b5d in term 1.
I20250901 14:19:02.786814 10833 raft_consensus.cc:2466] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3 [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 10f2a09e9e924e4bae0df84237300b5d in term 1.
I20250901 14:19:02.787974 10725 leader_election.cc:304] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 10f2a09e9e924e4bae0df84237300b5d, 92294cc85fff48178783e1631493b9a4; no voters: 
I20250901 14:19:02.788694 10879 raft_consensus.cc:2802] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:02.789505 10879 raft_consensus.cc:695] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 1 LEADER]: Becoming Leader. State: Replica: 10f2a09e9e924e4bae0df84237300b5d, State: Running, Role: LEADER
I20250901 14:19:02.790166 10879 consensus_queue.cc:237] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 2, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } }
I20250901 14:19:02.799499 10581 catalog_manager.cc:5582] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d reported cstate change: term changed from 0 to 1, leader changed from <none> to 10f2a09e9e924e4bae0df84237300b5d (127.4.231.194). New cstate: current_term: 1 leader_uuid: "10f2a09e9e924e4bae0df84237300b5d" committed_config { opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } health_report { overall_health: HEALTHY } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } health_report { overall_health: UNKNOWN } } }
I20250901 14:19:02.844197 10879 raft_consensus.cc:491] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Starting pre-election (no leader contacted us within the election timeout)
I20250901 14:19:02.844740 10879 raft_consensus.cc:513] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } }
I20250901 14:19:02.846503 10879 leader_election.cc:290] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [CANDIDATE]: Term 1 pre-election: Requested pre-vote from peers 92294cc85fff48178783e1631493b9a4 (127.4.231.193:46325), fdaadf8def3b4e04b5c569cad3f269f3 (127.4.231.195:45009)
I20250901 14:19:02.847396 10683 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "76085a4787e44b94946b5f63bea59a30" candidate_uuid: "10f2a09e9e924e4bae0df84237300b5d" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "92294cc85fff48178783e1631493b9a4" is_pre_election: true
I20250901 14:19:02.847606 10833 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "76085a4787e44b94946b5f63bea59a30" candidate_uuid: "10f2a09e9e924e4bae0df84237300b5d" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" is_pre_election: true
I20250901 14:19:02.847949 10683 raft_consensus.cc:2466] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4 [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 10f2a09e9e924e4bae0df84237300b5d in term 0.
I20250901 14:19:02.848150 10833 raft_consensus.cc:2466] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3 [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 10f2a09e9e924e4bae0df84237300b5d in term 0.
I20250901 14:19:02.848829 10725 leader_election.cc:304] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [CANDIDATE]: Term 1 pre-election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 10f2a09e9e924e4bae0df84237300b5d, 92294cc85fff48178783e1631493b9a4; no voters: 
I20250901 14:19:02.849414 10879 raft_consensus.cc:2802] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Leader pre-election won for term 1
I20250901 14:19:02.849764 10879 raft_consensus.cc:491] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Starting leader election (no leader contacted us within the election timeout)
I20250901 14:19:02.849994 10879 raft_consensus.cc:3058] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:02.854753 10879 raft_consensus.cc:513] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } }
I20250901 14:19:02.856206 10879 leader_election.cc:290] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [CANDIDATE]: Term 1 election: Requested vote from peers 92294cc85fff48178783e1631493b9a4 (127.4.231.193:46325), fdaadf8def3b4e04b5c569cad3f269f3 (127.4.231.195:45009)
I20250901 14:19:02.857164 10683 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "76085a4787e44b94946b5f63bea59a30" candidate_uuid: "10f2a09e9e924e4bae0df84237300b5d" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "92294cc85fff48178783e1631493b9a4"
I20250901 14:19:02.857324 10833 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "76085a4787e44b94946b5f63bea59a30" candidate_uuid: "10f2a09e9e924e4bae0df84237300b5d" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "fdaadf8def3b4e04b5c569cad3f269f3"
I20250901 14:19:02.857856 10833 raft_consensus.cc:3058] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:02.857746 10683 raft_consensus.cc:3058] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:02.862830 10683 raft_consensus.cc:2466] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4 [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 10f2a09e9e924e4bae0df84237300b5d in term 1.
I20250901 14:19:02.862881 10833 raft_consensus.cc:2466] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3 [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 10f2a09e9e924e4bae0df84237300b5d in term 1.
I20250901 14:19:02.863816 10725 leader_election.cc:304] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 10f2a09e9e924e4bae0df84237300b5d, fdaadf8def3b4e04b5c569cad3f269f3; no voters: 
I20250901 14:19:02.864434 10879 raft_consensus.cc:2802] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:02.864838 10879 raft_consensus.cc:695] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 1 LEADER]: Becoming Leader. State: Replica: 10f2a09e9e924e4bae0df84237300b5d, State: Running, Role: LEADER
I20250901 14:19:02.865617 10879 consensus_queue.cc:237] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 2, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } }
I20250901 14:19:02.871690 10581 catalog_manager.cc:5582] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d reported cstate change: term changed from 0 to 1, leader changed from <none> to 10f2a09e9e924e4bae0df84237300b5d (127.4.231.194). New cstate: current_term: 1 leader_uuid: "10f2a09e9e924e4bae0df84237300b5d" committed_config { opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "10f2a09e9e924e4bae0df84237300b5d" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 36383 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:02.926024 10833 raft_consensus.cc:1273] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3 [term 1 FOLLOWER]: Refusing update from remote peer 10f2a09e9e924e4bae0df84237300b5d: Log matching property violated. Preceding OpId in replica: term: 0 index: 0. Preceding OpId from leader: term: 1 index: 2. (index mismatch)
I20250901 14:19:02.926141 10683 raft_consensus.cc:1273] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4 [term 1 FOLLOWER]: Refusing update from remote peer 10f2a09e9e924e4bae0df84237300b5d: Log matching property violated. Preceding OpId in replica: term: 0 index: 0. Preceding OpId from leader: term: 1 index: 2. (index mismatch)
I20250901 14:19:02.927479 10886 consensus_queue.cc:1035] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [LEADER]: Connected to new peer: Peer: permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.000s
I20250901 14:19:02.928062 10879 consensus_queue.cc:1035] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [LEADER]: Connected to new peer: Peer: permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.000s
I20250901 14:19:02.943239 10890 mvcc.cc:204] Tried to move back new op lower bound from 7195592060611960832 to 7195592060384133120. Current Snapshot: MvccSnapshot[applied={T|T < 7195592060611960832}]
I20250901 14:19:03.090956 10833 raft_consensus.cc:1273] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3 [term 1 FOLLOWER]: Refusing update from remote peer 10f2a09e9e924e4bae0df84237300b5d: Log matching property violated. Preceding OpId in replica: term: 0 index: 0. Preceding OpId from leader: term: 1 index: 2. (index mismatch)
I20250901 14:19:03.091159 10683 raft_consensus.cc:1273] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4 [term 1 FOLLOWER]: Refusing update from remote peer 10f2a09e9e924e4bae0df84237300b5d: Log matching property violated. Preceding OpId in replica: term: 0 index: 0. Preceding OpId from leader: term: 1 index: 2. (index mismatch)
I20250901 14:19:03.092248 10898 consensus_queue.cc:1035] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [LEADER]: Connected to new peer: Peer: permanent_uuid: "fdaadf8def3b4e04b5c569cad3f269f3" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 45009 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.000s
I20250901 14:19:03.092875 10886 consensus_queue.cc:1035] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [LEADER]: Connected to new peer: Peer: permanent_uuid: "92294cc85fff48178783e1631493b9a4" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 46325 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.000s
I20250901 14:19:03.321542  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:19:03.349411  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:03.350082  5023 tablet_replica.cc:331] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4: stopping tablet replica
I20250901 14:19:03.350658  5023 raft_consensus.cc:2241] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:19:03.351110  5023 raft_consensus.cc:2270] T 76085a4787e44b94946b5f63bea59a30 P 92294cc85fff48178783e1631493b9a4 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:03.353188  5023 tablet_replica.cc:331] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4: stopping tablet replica
I20250901 14:19:03.353824  5023 raft_consensus.cc:2241] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:19:03.354251  5023 raft_consensus.cc:2270] T e2649165f03d498ab16e8d86cfaa1cc2 P 92294cc85fff48178783e1631493b9a4 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:03.375077  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:19:03.385157  5023 tablet_server.cc:178] TabletServer@127.4.231.194:0 shutting down...
I20250901 14:19:03.405113  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:03.405804  5023 tablet_replica.cc:331] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d: stopping tablet replica
I20250901 14:19:03.406348  5023 raft_consensus.cc:2241] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:03.407086  5023 raft_consensus.cc:2270] T e2649165f03d498ab16e8d86cfaa1cc2 P 10f2a09e9e924e4bae0df84237300b5d [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:03.409152  5023 tablet_replica.cc:331] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d: stopping tablet replica
I20250901 14:19:03.409675  5023 raft_consensus.cc:2241] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:03.410394  5023 raft_consensus.cc:2270] T 76085a4787e44b94946b5f63bea59a30 P 10f2a09e9e924e4bae0df84237300b5d [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:03.431900  5023 tablet_server.cc:195] TabletServer@127.4.231.194:0 shutdown complete.
I20250901 14:19:03.445097  5023 tablet_server.cc:178] TabletServer@127.4.231.195:0 shutting down...
I20250901 14:19:03.467358  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:03.467906  5023 tablet_replica.cc:331] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3: stopping tablet replica
I20250901 14:19:03.468405  5023 raft_consensus.cc:2241] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:19:03.468856  5023 raft_consensus.cc:2270] T 76085a4787e44b94946b5f63bea59a30 P fdaadf8def3b4e04b5c569cad3f269f3 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:03.471377  5023 tablet_replica.cc:331] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3: stopping tablet replica
I20250901 14:19:03.471832  5023 raft_consensus.cc:2241] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:19:03.472240  5023 raft_consensus.cc:2270] T e2649165f03d498ab16e8d86cfaa1cc2 P fdaadf8def3b4e04b5c569cad3f269f3 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:03.482631  5023 tablet_server.cc:195] TabletServer@127.4.231.195:0 shutdown complete.
I20250901 14:19:03.495087  5023 master.cc:561] Master@127.4.231.254:42633 shutting down...
I20250901 14:19:03.511435  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:03.511999  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:03.512305  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P ba4f8e1997194819aeffc4c989d99461: stopping tablet replica
I20250901 14:19:03.530931  5023 master.cc:583] Master@127.4.231.254:42633 shutdown complete.
[       OK ] ClientTestAutoIncrementingColumn.ReadAndWriteUsingInserts (1626 ms)
[----------] 1 test from ClientTestAutoIncrementingColumn (1626 ms total)

[----------] 1 test from Params/ScanMultiTabletParamTest
[ RUN      ] Params/ScanMultiTabletParamTest.Test/0
I20250901 14:19:03.555922  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:36475
I20250901 14:19:03.556941  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:03.562109 10904 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:03.562204 10905 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:03.565039  5023 server_base.cc:1047] running on GCE node
W20250901 14:19:03.565936 10907 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:03.566720  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:03.566895  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:03.567058  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736343567040 us; error 0 us; skew 500 ppm
I20250901 14:19:03.567570  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:03.570502  5023 webserver.cc:480] Webserver started at http://127.4.231.254:44469/ using document root <none> and password file <none>
I20250901 14:19:03.570978  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:03.571148  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:03.571394  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:03.572463  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ScanMultiTabletParamTest.Test_0.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "349a4548940149f58653e63fd1c5e223"
format_stamp: "Formatted at 2025-09-01 14:19:03 on dist-test-slave-9gf0"
I20250901 14:19:03.576978  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.006s	sys 0.000s
I20250901 14:19:03.580461 10912 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:03.581264  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:19:03.581558  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ScanMultiTabletParamTest.Test_0.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "349a4548940149f58653e63fd1c5e223"
format_stamp: "Formatted at 2025-09-01 14:19:03 on dist-test-slave-9gf0"
I20250901 14:19:03.581869  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ScanMultiTabletParamTest.Test_0.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ScanMultiTabletParamTest.Test_0.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ScanMultiTabletParamTest.Test_0.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:03.608022  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:03.609004  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:03.663012  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:36475
I20250901 14:19:03.663206 10973 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:36475 every 8 connection(s)
I20250901 14:19:03.667732 10974 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:03.678936 10974 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223: Bootstrap starting.
I20250901 14:19:03.683423 10974 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:03.687750 10974 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223: No bootstrap required, opened a new log
I20250901 14:19:03.689810 10974 raft_consensus.cc:357] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "349a4548940149f58653e63fd1c5e223" member_type: VOTER }
I20250901 14:19:03.690194 10974 raft_consensus.cc:383] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:03.690425 10974 raft_consensus.cc:738] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 349a4548940149f58653e63fd1c5e223, State: Initialized, Role: FOLLOWER
I20250901 14:19:03.691013 10974 consensus_queue.cc:260] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "349a4548940149f58653e63fd1c5e223" member_type: VOTER }
I20250901 14:19:03.691463 10974 raft_consensus.cc:397] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:03.691681 10974 raft_consensus.cc:491] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:03.691951 10974 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:03.696722 10974 raft_consensus.cc:513] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "349a4548940149f58653e63fd1c5e223" member_type: VOTER }
I20250901 14:19:03.697266 10974 leader_election.cc:304] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 349a4548940149f58653e63fd1c5e223; no voters: 
I20250901 14:19:03.698655 10974 leader_election.cc:290] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:03.699002 10977 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:03.700711 10977 raft_consensus.cc:695] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 1 LEADER]: Becoming Leader. State: Replica: 349a4548940149f58653e63fd1c5e223, State: Running, Role: LEADER
I20250901 14:19:03.701438 10977 consensus_queue.cc:237] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "349a4548940149f58653e63fd1c5e223" member_type: VOTER }
I20250901 14:19:03.702311 10974 sys_catalog.cc:564] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:19:03.704747 10979 sys_catalog.cc:455] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [sys.catalog]: SysCatalogTable state changed. Reason: New leader 349a4548940149f58653e63fd1c5e223. Latest consensus state: current_term: 1 leader_uuid: "349a4548940149f58653e63fd1c5e223" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "349a4548940149f58653e63fd1c5e223" member_type: VOTER } }
I20250901 14:19:03.704777 10978 sys_catalog.cc:455] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "349a4548940149f58653e63fd1c5e223" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "349a4548940149f58653e63fd1c5e223" member_type: VOTER } }
I20250901 14:19:03.705395 10979 sys_catalog.cc:458] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:03.705472 10978 sys_catalog.cc:458] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:03.711580 10982 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:19:03.720834 10982 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:19:03.740427 10982 catalog_manager.cc:1349] Generated new cluster ID: 1f50948c30124236aef05557911fd54d
I20250901 14:19:03.740923 10982 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:19:03.741168  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:19:03.761229 10982 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:19:03.762634 10982 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:19:03.777563 10982 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223: Generated new TSK 0
I20250901 14:19:03.778162 10982 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:19:03.808305  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:03.814380 10995 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:03.815838 10996 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:03.818481 10998 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:03.818732  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:03.819684  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:03.819880  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:03.820057  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736343820039 us; error 0 us; skew 500 ppm
I20250901 14:19:03.820626  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:03.823244  5023 webserver.cc:480] Webserver started at http://127.4.231.193:34457/ using document root <none> and password file <none>
I20250901 14:19:03.823680  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:03.823861  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:03.824110  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:03.825189  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ScanMultiTabletParamTest.Test_0.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f"
format_stamp: "Formatted at 2025-09-01 14:19:03 on dist-test-slave-9gf0"
I20250901 14:19:03.830108  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.001s
I20250901 14:19:03.833757 11003 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:03.834586  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:19:03.834872  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ScanMultiTabletParamTest.Test_0.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f"
format_stamp: "Formatted at 2025-09-01 14:19:03 on dist-test-slave-9gf0"
I20250901 14:19:03.835129  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ScanMultiTabletParamTest.Test_0.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ScanMultiTabletParamTest.Test_0.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ScanMultiTabletParamTest.Test_0.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:03.858295  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:03.859366  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:03.867456  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:03.867892  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:03.868273  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:03.868523  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:03.938745  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:41349
I20250901 14:19:03.938836 11073 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:41349 every 8 connection(s)
I20250901 14:19:03.943557 11074 heartbeater.cc:344] Connected to a master server at 127.4.231.254:36475
I20250901 14:19:03.943918 11074 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:03.944700 11074 heartbeater.cc:507] Master 127.4.231.254:36475 requested a full tablet report, sending...
I20250901 14:19:03.946779 10929 ts_manager.cc:194] Registered new tserver with Master: 38dfe8c969264d1fb6ef07c4eb83cf7f (127.4.231.193:41349)
I20250901 14:19:03.946923  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004914048s
I20250901 14:19:03.949177 10929 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:32846
I20250901 14:19:03.962289 11074 heartbeater.cc:499] Master 127.4.231.254:36475 was elected leader, sending a full tablet report...
I20250901 14:19:03.971140 10929 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:32854:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:19:04.008620 11039 tablet_service.cc:1468] Processing CreateTablet for tablet d7b77ba6db8e44ada615bb1257d43e46 (DEFAULT_TABLE table=client-testtb [id=f02c06d71cf040cca9b7a05a9297e983]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:19:04.008921 11038 tablet_service.cc:1468] Processing CreateTablet for tablet 3ccdfc6d9c0a453f879f0d2ead891670 (DEFAULT_TABLE table=client-testtb [id=f02c06d71cf040cca9b7a05a9297e983]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:19:04.009735 11039 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet d7b77ba6db8e44ada615bb1257d43e46. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:04.010375 11038 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 3ccdfc6d9c0a453f879f0d2ead891670. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:04.028313 11084 tablet_bootstrap.cc:492] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Bootstrap starting.
I20250901 14:19:04.033144 11084 tablet_bootstrap.cc:654] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:04.038590 11084 tablet_bootstrap.cc:492] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f: No bootstrap required, opened a new log
I20250901 14:19:04.038983 11084 ts_tablet_manager.cc:1397] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent bootstrapping tablet: real 0.011s	user 0.009s	sys 0.000s
I20250901 14:19:04.040863 11084 raft_consensus.cc:357] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.041256 11084 raft_consensus.cc:383] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:04.041484 11084 raft_consensus.cc:738] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Initialized, Role: FOLLOWER
I20250901 14:19:04.042053 11084 consensus_queue.cc:260] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.042515 11084 raft_consensus.cc:397] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:04.042760 11084 raft_consensus.cc:491] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:04.043017 11084 raft_consensus.cc:3058] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:04.049036 11084 raft_consensus.cc:513] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.049670 11084 leader_election.cc:304] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 38dfe8c969264d1fb6ef07c4eb83cf7f; no voters: 
I20250901 14:19:04.050896 11084 leader_election.cc:290] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:04.051292 11086 raft_consensus.cc:2802] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:04.052889 11086 raft_consensus.cc:695] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Becoming Leader. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Running, Role: LEADER
I20250901 14:19:04.053668 11086 consensus_queue.cc:237] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.056572 11084 ts_tablet_manager.cc:1428] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent starting tablet: real 0.017s	user 0.013s	sys 0.004s
I20250901 14:19:04.057614 11084 tablet_bootstrap.cc:492] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Bootstrap starting.
I20250901 14:19:04.062947 10929 catalog_manager.cc:5582] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f reported cstate change: term changed from 0 to 1, leader changed from <none> to 38dfe8c969264d1fb6ef07c4eb83cf7f (127.4.231.193). New cstate: current_term: 1 leader_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:04.064998 11084 tablet_bootstrap.cc:654] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:04.071360 11084 tablet_bootstrap.cc:492] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f: No bootstrap required, opened a new log
I20250901 14:19:04.071777 11084 ts_tablet_manager.cc:1397] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent bootstrapping tablet: real 0.014s	user 0.007s	sys 0.004s
I20250901 14:19:04.074191 11084 raft_consensus.cc:357] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.074785 11084 raft_consensus.cc:383] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:04.075057 11084 raft_consensus.cc:738] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Initialized, Role: FOLLOWER
I20250901 14:19:04.075677 11084 consensus_queue.cc:260] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.076299 11084 raft_consensus.cc:397] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:04.076534 11084 raft_consensus.cc:491] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:04.076820 11084 raft_consensus.cc:3058] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:04.081941 11084 raft_consensus.cc:513] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.082641 11084 leader_election.cc:304] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 38dfe8c969264d1fb6ef07c4eb83cf7f; no voters: 
I20250901 14:19:04.083396 11087 raft_consensus.cc:2802] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:04.083855 11087 raft_consensus.cc:695] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Becoming Leader. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Running, Role: LEADER
I20250901 14:19:04.083943 11084 leader_election.cc:290] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:04.084548 11087 consensus_queue.cc:237] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.086203 11084 ts_tablet_manager.cc:1428] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent starting tablet: real 0.014s	user 0.015s	sys 0.000s
I20250901 14:19:04.091903 10929 catalog_manager.cc:5582] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f reported cstate change: term changed from 0 to 1, leader changed from <none> to 38dfe8c969264d1fb6ef07c4eb83cf7f (127.4.231.193). New cstate: current_term: 1 leader_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:04.120970 10929 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:32854:
name: "TestScanMultiTablet"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\n\000\000\000\004\001\000\024\000\000\000\004\001\000\036\000\000\000\004\001\000(\000\000\000""\004\001\000\n\000\000\000\004\001\000\024\000\000\000\004\001\000\036\000\000\000\004\001\000(\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:19:04.150472 11038 tablet_service.cc:1468] Processing CreateTablet for tablet f1e74da34f1c4bff8150f63cac1cfedb (DEFAULT_TABLE table=TestScanMultiTablet [id=e58a730b729c41a59feb7fd48a9aaec7]), partition=RANGE (key) PARTITION VALUES < 10
I20250901 14:19:04.151738 11038 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet f1e74da34f1c4bff8150f63cac1cfedb. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:04.151489 11039 tablet_service.cc:1468] Processing CreateTablet for tablet 3fce2a4e53e340c9b3a02e3974af6a37 (DEFAULT_TABLE table=TestScanMultiTablet [id=e58a730b729c41a59feb7fd48a9aaec7]), partition=RANGE (key) PARTITION 10 <= VALUES < 20
I20250901 14:19:04.152748 11039 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 3fce2a4e53e340c9b3a02e3974af6a37. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:04.158077 11037 tablet_service.cc:1468] Processing CreateTablet for tablet ea4c9cffd31b4c75bff64573a5c3b56b (DEFAULT_TABLE table=TestScanMultiTablet [id=e58a730b729c41a59feb7fd48a9aaec7]), partition=RANGE (key) PARTITION 20 <= VALUES < 30
I20250901 14:19:04.159507 11037 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet ea4c9cffd31b4c75bff64573a5c3b56b. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:04.162760 11036 tablet_service.cc:1468] Processing CreateTablet for tablet c356c74b7dc5420ebac08e97918489e1 (DEFAULT_TABLE table=TestScanMultiTablet [id=e58a730b729c41a59feb7fd48a9aaec7]), partition=RANGE (key) PARTITION 30 <= VALUES < 40
I20250901 14:19:04.164134 11036 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet c356c74b7dc5420ebac08e97918489e1. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:04.169137 11084 tablet_bootstrap.cc:492] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Bootstrap starting.
I20250901 14:19:04.170717 11035 tablet_service.cc:1468] Processing CreateTablet for tablet cb500eb44a6d46d5801d72a119a90d02 (DEFAULT_TABLE table=TestScanMultiTablet [id=e58a730b729c41a59feb7fd48a9aaec7]), partition=RANGE (key) PARTITION 40 <= VALUES
I20250901 14:19:04.171988 11035 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet cb500eb44a6d46d5801d72a119a90d02. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:04.175239 11084 tablet_bootstrap.cc:654] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:04.196094 11084 tablet_bootstrap.cc:492] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f: No bootstrap required, opened a new log
I20250901 14:19:04.196573 11084 ts_tablet_manager.cc:1397] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent bootstrapping tablet: real 0.028s	user 0.009s	sys 0.000s
I20250901 14:19:04.199126 11084 raft_consensus.cc:357] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.199721 11084 raft_consensus.cc:383] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:04.200008 11084 raft_consensus.cc:738] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Initialized, Role: FOLLOWER
I20250901 14:19:04.200670 11084 consensus_queue.cc:260] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.201138 11084 raft_consensus.cc:397] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:04.201354 11084 raft_consensus.cc:491] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:04.201639 11084 raft_consensus.cc:3058] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:04.206516 11084 raft_consensus.cc:513] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.207116 11084 leader_election.cc:304] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 38dfe8c969264d1fb6ef07c4eb83cf7f; no voters: 
I20250901 14:19:04.207597 11084 leader_election.cc:290] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:04.207783 11087 raft_consensus.cc:2802] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:04.208283 11087 raft_consensus.cc:695] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Becoming Leader. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Running, Role: LEADER
I20250901 14:19:04.208986 11087 consensus_queue.cc:237] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.209410 11084 ts_tablet_manager.cc:1428] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent starting tablet: real 0.013s	user 0.007s	sys 0.004s
I20250901 14:19:04.210374 11084 tablet_bootstrap.cc:492] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f: Bootstrap starting.
I20250901 14:19:04.216296 11084 tablet_bootstrap.cc:654] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:04.218580 10929 catalog_manager.cc:5582] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f reported cstate change: term changed from 0 to 1, leader changed from <none> to 38dfe8c969264d1fb6ef07c4eb83cf7f (127.4.231.193). New cstate: current_term: 1 leader_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:04.221239 11084 tablet_bootstrap.cc:492] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f: No bootstrap required, opened a new log
I20250901 14:19:04.221851 11084 ts_tablet_manager.cc:1397] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent bootstrapping tablet: real 0.012s	user 0.009s	sys 0.002s
I20250901 14:19:04.224982 11084 raft_consensus.cc:357] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.225765 11084 raft_consensus.cc:383] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:04.226089 11084 raft_consensus.cc:738] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Initialized, Role: FOLLOWER
I20250901 14:19:04.226899 11084 consensus_queue.cc:260] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.227682 11084 raft_consensus.cc:397] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:04.228029 11084 raft_consensus.cc:491] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:04.228422 11084 raft_consensus.cc:3058] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:04.235353 11084 raft_consensus.cc:513] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.235953 11084 leader_election.cc:304] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 38dfe8c969264d1fb6ef07c4eb83cf7f; no voters: 
I20250901 14:19:04.236472 11084 leader_election.cc:290] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:04.236617 11086 raft_consensus.cc:2802] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:04.237093 11086 raft_consensus.cc:695] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Becoming Leader. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Running, Role: LEADER
I20250901 14:19:04.237813 11086 consensus_queue.cc:237] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.238814 11084 ts_tablet_manager.cc:1428] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent starting tablet: real 0.017s	user 0.012s	sys 0.002s
I20250901 14:19:04.240118 11084 tablet_bootstrap.cc:492] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Bootstrap starting.
I20250901 14:19:04.244148 10929 catalog_manager.cc:5582] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f reported cstate change: term changed from 0 to 1, leader changed from <none> to 38dfe8c969264d1fb6ef07c4eb83cf7f (127.4.231.193). New cstate: current_term: 1 leader_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:04.246410 11084 tablet_bootstrap.cc:654] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:04.251119 11084 tablet_bootstrap.cc:492] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f: No bootstrap required, opened a new log
I20250901 14:19:04.251497 11084 ts_tablet_manager.cc:1397] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent bootstrapping tablet: real 0.012s	user 0.009s	sys 0.000s
I20250901 14:19:04.253696 11084 raft_consensus.cc:357] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.254186 11084 raft_consensus.cc:383] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:04.254421 11084 raft_consensus.cc:738] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Initialized, Role: FOLLOWER
I20250901 14:19:04.254966 11084 consensus_queue.cc:260] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.255468 11084 raft_consensus.cc:397] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:04.255759 11084 raft_consensus.cc:491] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:04.256084 11084 raft_consensus.cc:3058] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:04.261492 11084 raft_consensus.cc:513] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.262137 11084 leader_election.cc:304] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 38dfe8c969264d1fb6ef07c4eb83cf7f; no voters: 
I20250901 14:19:04.262759 11084 leader_election.cc:290] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:04.262918 11086 raft_consensus.cc:2802] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:04.263409 11086 raft_consensus.cc:695] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Becoming Leader. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Running, Role: LEADER
I20250901 14:19:04.264242 11086 consensus_queue.cc:237] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.264753 11084 ts_tablet_manager.cc:1428] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent starting tablet: real 0.013s	user 0.012s	sys 0.000s
I20250901 14:19:04.265874 11084 tablet_bootstrap.cc:492] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f: Bootstrap starting.
I20250901 14:19:04.271481 11084 tablet_bootstrap.cc:654] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:04.272910 10929 catalog_manager.cc:5582] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f reported cstate change: term changed from 0 to 1, leader changed from <none> to 38dfe8c969264d1fb6ef07c4eb83cf7f (127.4.231.193). New cstate: current_term: 1 leader_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:04.279654 11084 tablet_bootstrap.cc:492] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f: No bootstrap required, opened a new log
I20250901 14:19:04.280123 11084 ts_tablet_manager.cc:1397] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent bootstrapping tablet: real 0.014s	user 0.008s	sys 0.004s
I20250901 14:19:04.282133 11084 raft_consensus.cc:357] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.282537 11084 raft_consensus.cc:383] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:04.282770 11084 raft_consensus.cc:738] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Initialized, Role: FOLLOWER
I20250901 14:19:04.283316 11084 consensus_queue.cc:260] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.283881 11084 raft_consensus.cc:397] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:04.284150 11084 raft_consensus.cc:491] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:04.284449 11084 raft_consensus.cc:3058] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:04.288538 11084 raft_consensus.cc:513] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.289036 11084 leader_election.cc:304] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 38dfe8c969264d1fb6ef07c4eb83cf7f; no voters: 
I20250901 14:19:04.289593 11084 leader_election.cc:290] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:04.289745 11086 raft_consensus.cc:2802] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:04.290720 11086 raft_consensus.cc:695] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Becoming Leader. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Running, Role: LEADER
I20250901 14:19:04.291213 11084 ts_tablet_manager.cc:1428] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent starting tablet: real 0.011s	user 0.011s	sys 0.000s
I20250901 14:19:04.291466 11086 consensus_queue.cc:237] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.292011 11084 tablet_bootstrap.cc:492] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Bootstrap starting.
I20250901 14:19:04.297317 11084 tablet_bootstrap.cc:654] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:04.298296 10929 catalog_manager.cc:5582] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f reported cstate change: term changed from 0 to 1, leader changed from <none> to 38dfe8c969264d1fb6ef07c4eb83cf7f (127.4.231.193). New cstate: current_term: 1 leader_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:04.302556 11084 tablet_bootstrap.cc:492] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f: No bootstrap required, opened a new log
I20250901 14:19:04.302954 11084 ts_tablet_manager.cc:1397] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent bootstrapping tablet: real 0.011s	user 0.009s	sys 0.000s
I20250901 14:19:04.305027 11084 raft_consensus.cc:357] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.305574 11084 raft_consensus.cc:383] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:04.305837 11084 raft_consensus.cc:738] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Initialized, Role: FOLLOWER
I20250901 14:19:04.306324 11084 consensus_queue.cc:260] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.306737 11084 raft_consensus.cc:397] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:04.306946 11084 raft_consensus.cc:491] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:04.307224 11084 raft_consensus.cc:3058] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:04.312381 11084 raft_consensus.cc:513] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.313062 11084 leader_election.cc:304] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 38dfe8c969264d1fb6ef07c4eb83cf7f; no voters: 
I20250901 14:19:04.313797 11086 raft_consensus.cc:2802] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:04.314236 11086 raft_consensus.cc:695] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Becoming Leader. State: Replica: 38dfe8c969264d1fb6ef07c4eb83cf7f, State: Running, Role: LEADER
I20250901 14:19:04.314839 11086 consensus_queue.cc:237] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } }
I20250901 14:19:04.316815 11084 leader_election.cc:290] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:04.318837 11084 ts_tablet_manager.cc:1428] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f: Time spent starting tablet: real 0.016s	user 0.013s	sys 0.000s
I20250901 14:19:04.320996 10929 catalog_manager.cc:5582] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f reported cstate change: term changed from 0 to 1, leader changed from <none> to 38dfe8c969264d1fb6ef07c4eb83cf7f (127.4.231.193). New cstate: current_term: 1 leader_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "38dfe8c969264d1fb6ef07c4eb83cf7f" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 41349 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:05.212957  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:19:05.238423  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:05.239116  5023 tablet_replica.cc:331] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f: stopping tablet replica
I20250901 14:19:05.239835  5023 raft_consensus.cc:2241] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.240384  5023 raft_consensus.cc:2270] T cb500eb44a6d46d5801d72a119a90d02 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.242977  5023 tablet_replica.cc:331] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f: stopping tablet replica
I20250901 14:19:05.243711  5023 raft_consensus.cc:2241] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.244239  5023 raft_consensus.cc:2270] T ea4c9cffd31b4c75bff64573a5c3b56b P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.246768  5023 tablet_replica.cc:331] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f: stopping tablet replica
I20250901 14:19:05.247443  5023 raft_consensus.cc:2241] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.247969  5023 raft_consensus.cc:2270] T d7b77ba6db8e44ada615bb1257d43e46 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.250012  5023 tablet_replica.cc:331] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f: stopping tablet replica
I20250901 14:19:05.250636  5023 raft_consensus.cc:2241] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.251142  5023 raft_consensus.cc:2270] T c356c74b7dc5420ebac08e97918489e1 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.253609  5023 tablet_replica.cc:331] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f: stopping tablet replica
I20250901 14:19:05.254251  5023 raft_consensus.cc:2241] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.254716  5023 raft_consensus.cc:2270] T f1e74da34f1c4bff8150f63cac1cfedb P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.256693  5023 tablet_replica.cc:331] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f: stopping tablet replica
I20250901 14:19:05.257298  5023 raft_consensus.cc:2241] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.257819  5023 raft_consensus.cc:2270] T 3fce2a4e53e340c9b3a02e3974af6a37 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.260284  5023 tablet_replica.cc:331] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f: stopping tablet replica
I20250901 14:19:05.260890  5023 raft_consensus.cc:2241] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.261337  5023 raft_consensus.cc:2270] T 3ccdfc6d9c0a453f879f0d2ead891670 P 38dfe8c969264d1fb6ef07c4eb83cf7f [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.288878  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:19:05.305197  5023 master.cc:561] Master@127.4.231.254:36475 shutting down...
I20250901 14:19:05.328310  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.328950  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.329299  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 349a4548940149f58653e63fd1c5e223: stopping tablet replica
I20250901 14:19:05.350289  5023 master.cc:583] Master@127.4.231.254:36475 shutdown complete.
[       OK ] Params/ScanMultiTabletParamTest.Test/0 (1824 ms)
[----------] 1 test from Params/ScanMultiTabletParamTest (1824 ms total)

[----------] 1 test from KeepAlivePeriodically/KeepAlivePeriodicallyTest
[ RUN      ] KeepAlivePeriodically/KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically/1
I20250901 14:19:05.381979  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:42635
I20250901 14:19:05.383299  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:05.388734 11111 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:05.388989 11112 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:05.392076 11114 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:05.393280  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:05.394241  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:05.394444  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:05.394593  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736345394576 us; error 0 us; skew 500 ppm
I20250901 14:19:05.395105  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:05.397907  5023 webserver.cc:480] Webserver started at http://127.4.231.254:33901/ using document root <none> and password file <none>
I20250901 14:19:05.398411  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:05.398605  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:05.398885  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:05.400035  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "55eba3d176834b9d80f3534b67275ddb"
format_stamp: "Formatted at 2025-09-01 14:19:05 on dist-test-slave-9gf0"
I20250901 14:19:05.405223  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.005s	user 0.007s	sys 0.000s
I20250901 14:19:05.409061 11119 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:05.409987  5023 fs_manager.cc:730] Time spent opening block manager: real 0.003s	user 0.003s	sys 0.000s
I20250901 14:19:05.410357  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "55eba3d176834b9d80f3534b67275ddb"
format_stamp: "Formatted at 2025-09-01 14:19:05 on dist-test-slave-9gf0"
I20250901 14:19:05.410738  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:05.427284  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:05.428763  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:05.484404  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:42635
I20250901 14:19:05.484485 11180 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:42635 every 8 connection(s)
I20250901 14:19:05.489135 11181 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:05.500166 11181 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb: Bootstrap starting.
I20250901 14:19:05.504700 11181 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:05.508980 11181 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb: No bootstrap required, opened a new log
I20250901 14:19:05.510972 11181 raft_consensus.cc:357] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "55eba3d176834b9d80f3534b67275ddb" member_type: VOTER }
I20250901 14:19:05.511384 11181 raft_consensus.cc:383] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:05.511597 11181 raft_consensus.cc:738] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 55eba3d176834b9d80f3534b67275ddb, State: Initialized, Role: FOLLOWER
I20250901 14:19:05.512184 11181 consensus_queue.cc:260] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "55eba3d176834b9d80f3534b67275ddb" member_type: VOTER }
I20250901 14:19:05.512673 11181 raft_consensus.cc:397] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:05.512912 11181 raft_consensus.cc:491] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:05.513183 11181 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:05.518072 11181 raft_consensus.cc:513] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "55eba3d176834b9d80f3534b67275ddb" member_type: VOTER }
I20250901 14:19:05.518607 11181 leader_election.cc:304] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 55eba3d176834b9d80f3534b67275ddb; no voters: 
I20250901 14:19:05.519840 11181 leader_election.cc:290] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:05.520203 11184 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:05.521842 11184 raft_consensus.cc:695] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 1 LEADER]: Becoming Leader. State: Replica: 55eba3d176834b9d80f3534b67275ddb, State: Running, Role: LEADER
I20250901 14:19:05.522553 11184 consensus_queue.cc:237] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "55eba3d176834b9d80f3534b67275ddb" member_type: VOTER }
I20250901 14:19:05.523021 11181 sys_catalog.cc:564] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:19:05.525467 11185 sys_catalog.cc:455] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "55eba3d176834b9d80f3534b67275ddb" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "55eba3d176834b9d80f3534b67275ddb" member_type: VOTER } }
I20250901 14:19:05.525611 11186 sys_catalog.cc:455] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [sys.catalog]: SysCatalogTable state changed. Reason: New leader 55eba3d176834b9d80f3534b67275ddb. Latest consensus state: current_term: 1 leader_uuid: "55eba3d176834b9d80f3534b67275ddb" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "55eba3d176834b9d80f3534b67275ddb" member_type: VOTER } }
I20250901 14:19:05.526093 11185 sys_catalog.cc:458] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:05.526311 11186 sys_catalog.cc:458] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:05.544637 11189 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:19:05.553170 11189 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:19:05.562541  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:19:05.569258 11189 catalog_manager.cc:1349] Generated new cluster ID: 6d3e94bb23014cdd95fec157dc090ce9
I20250901 14:19:05.569684 11189 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:19:05.592806 11189 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:19:05.594434 11189 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:19:05.611850 11189 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb: Generated new TSK 0
I20250901 14:19:05.612406 11189 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:19:05.634804  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:05.640252 11202 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:05.641963 11203 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:05.643100  5023 server_base.cc:1047] running on GCE node
W20250901 14:19:05.643127 11205 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:05.644053  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:05.644268  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:05.644441  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736345644416 us; error 0 us; skew 500 ppm
I20250901 14:19:05.644959  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:05.647336  5023 webserver.cc:480] Webserver started at http://127.4.231.193:41443/ using document root <none> and password file <none>
I20250901 14:19:05.647822  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:05.648010  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:05.648263  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:05.649317  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "c7670554dbe94a3f843ea72e263a528a"
format_stamp: "Formatted at 2025-09-01 14:19:05 on dist-test-slave-9gf0"
I20250901 14:19:05.653741  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.000s
I20250901 14:19:05.657055 11210 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:05.657938  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:19:05.658210  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "c7670554dbe94a3f843ea72e263a528a"
format_stamp: "Formatted at 2025-09-01 14:19:05 on dist-test-slave-9gf0"
I20250901 14:19:05.658474  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:05.673717  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:05.674757  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:05.680001  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:05.680406  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:05.680796  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:05.681061  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:05.729110  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:36203
I20250901 14:19:05.729212 11280 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:36203 every 8 connection(s)
I20250901 14:19:05.733897 11281 heartbeater.cc:344] Connected to a master server at 127.4.231.254:42635
I20250901 14:19:05.734274 11281 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:05.735015 11281 heartbeater.cc:507] Master 127.4.231.254:42635 requested a full tablet report, sending...
I20250901 14:19:05.736804 11136 ts_manager.cc:194] Registered new tserver with Master: c7670554dbe94a3f843ea72e263a528a (127.4.231.193:36203)
I20250901 14:19:05.737162  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.003912455s
I20250901 14:19:05.738487 11136 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:40256
I20250901 14:19:05.751003 11281 heartbeater.cc:499] Master 127.4.231.254:42635 was elected leader, sending a full tablet report...
I20250901 14:19:05.758035 11135 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:40274:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:19:05.790627 11244 tablet_service.cc:1468] Processing CreateTablet for tablet 299a5e63b7ec4dde86d496db33d6fa78 (DEFAULT_TABLE table=client-testtb [id=33f12fd07d8d4722a0b845607c0d275f]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:19:05.790584 11245 tablet_service.cc:1468] Processing CreateTablet for tablet e5a9c9c8426e44d590cfdb1a8902c7af (DEFAULT_TABLE table=client-testtb [id=33f12fd07d8d4722a0b845607c0d275f]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:19:05.792053 11244 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 299a5e63b7ec4dde86d496db33d6fa78. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:05.792666 11245 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet e5a9c9c8426e44d590cfdb1a8902c7af. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:05.805385 11291 tablet_bootstrap.cc:492] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a: Bootstrap starting.
I20250901 14:19:05.810141 11291 tablet_bootstrap.cc:654] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:05.814136 11291 tablet_bootstrap.cc:492] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a: No bootstrap required, opened a new log
I20250901 14:19:05.814522 11291 ts_tablet_manager.cc:1397] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a: Time spent bootstrapping tablet: real 0.009s	user 0.008s	sys 0.000s
I20250901 14:19:05.816581 11291 raft_consensus.cc:357] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c7670554dbe94a3f843ea72e263a528a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36203 } }
I20250901 14:19:05.817050 11291 raft_consensus.cc:383] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:05.817270 11291 raft_consensus.cc:738] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: c7670554dbe94a3f843ea72e263a528a, State: Initialized, Role: FOLLOWER
I20250901 14:19:05.817836 11291 consensus_queue.cc:260] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c7670554dbe94a3f843ea72e263a528a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36203 } }
I20250901 14:19:05.818281 11291 raft_consensus.cc:397] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:05.818492 11291 raft_consensus.cc:491] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:05.818730 11291 raft_consensus.cc:3058] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:05.823593 11291 raft_consensus.cc:513] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c7670554dbe94a3f843ea72e263a528a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36203 } }
I20250901 14:19:05.824120 11291 leader_election.cc:304] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: c7670554dbe94a3f843ea72e263a528a; no voters: 
I20250901 14:19:05.825570 11291 leader_election.cc:290] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:05.825903 11293 raft_consensus.cc:2802] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:05.827708 11293 raft_consensus.cc:695] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 1 LEADER]: Becoming Leader. State: Replica: c7670554dbe94a3f843ea72e263a528a, State: Running, Role: LEADER
I20250901 14:19:05.828384 11291 ts_tablet_manager.cc:1428] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a: Time spent starting tablet: real 0.014s	user 0.013s	sys 0.000s
I20250901 14:19:05.828446 11293 consensus_queue.cc:237] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c7670554dbe94a3f843ea72e263a528a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36203 } }
I20250901 14:19:05.829427 11291 tablet_bootstrap.cc:492] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a: Bootstrap starting.
I20250901 14:19:05.834673 11291 tablet_bootstrap.cc:654] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:05.835162 11135 catalog_manager.cc:5582] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a reported cstate change: term changed from 0 to 1, leader changed from <none> to c7670554dbe94a3f843ea72e263a528a (127.4.231.193). New cstate: current_term: 1 leader_uuid: "c7670554dbe94a3f843ea72e263a528a" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c7670554dbe94a3f843ea72e263a528a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36203 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:05.839013 11291 tablet_bootstrap.cc:492] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a: No bootstrap required, opened a new log
I20250901 14:19:05.839447 11291 ts_tablet_manager.cc:1397] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a: Time spent bootstrapping tablet: real 0.010s	user 0.008s	sys 0.000s
I20250901 14:19:05.841861 11291 raft_consensus.cc:357] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c7670554dbe94a3f843ea72e263a528a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36203 } }
I20250901 14:19:05.842373 11291 raft_consensus.cc:383] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:05.842607 11291 raft_consensus.cc:738] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: c7670554dbe94a3f843ea72e263a528a, State: Initialized, Role: FOLLOWER
I20250901 14:19:05.843262 11291 consensus_queue.cc:260] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c7670554dbe94a3f843ea72e263a528a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36203 } }
I20250901 14:19:05.843847 11291 raft_consensus.cc:397] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:05.844133 11291 raft_consensus.cc:491] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:05.844456 11291 raft_consensus.cc:3058] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:05.849350 11291 raft_consensus.cc:513] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c7670554dbe94a3f843ea72e263a528a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36203 } }
I20250901 14:19:05.849897 11291 leader_election.cc:304] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: c7670554dbe94a3f843ea72e263a528a; no voters: 
I20250901 14:19:05.850304 11291 leader_election.cc:290] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:05.850478 11293 raft_consensus.cc:2802] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:05.850966 11293 raft_consensus.cc:695] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 1 LEADER]: Becoming Leader. State: Replica: c7670554dbe94a3f843ea72e263a528a, State: Running, Role: LEADER
I20250901 14:19:05.851694 11293 consensus_queue.cc:237] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c7670554dbe94a3f843ea72e263a528a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36203 } }
I20250901 14:19:05.852088 11291 ts_tablet_manager.cc:1428] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a: Time spent starting tablet: real 0.012s	user 0.014s	sys 0.000s
I20250901 14:19:05.857283 11135 catalog_manager.cc:5582] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a reported cstate change: term changed from 0 to 1, leader changed from <none> to c7670554dbe94a3f843ea72e263a528a (127.4.231.193). New cstate: current_term: 1 leader_uuid: "c7670554dbe94a3f843ea72e263a528a" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c7670554dbe94a3f843ea72e263a528a" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 36203 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:05.873395  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:19:05.892614  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:05.893311  5023 tablet_replica.cc:331] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a: stopping tablet replica
I20250901 14:19:05.893863  5023 raft_consensus.cc:2241] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.894302  5023 raft_consensus.cc:2270] T 299a5e63b7ec4dde86d496db33d6fa78 P c7670554dbe94a3f843ea72e263a528a [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.896574  5023 tablet_replica.cc:331] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a: stopping tablet replica
I20250901 14:19:05.897037  5023 raft_consensus.cc:2241] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.897430  5023 raft_consensus.cc:2270] T e5a9c9c8426e44d590cfdb1a8902c7af P c7670554dbe94a3f843ea72e263a528a [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.917692  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:19:05.928069  5023 master.cc:561] Master@127.4.231.254:42635 shutting down...
I20250901 14:19:05.946137  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:05.946655  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:05.947075  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 55eba3d176834b9d80f3534b67275ddb: stopping tablet replica
I20250901 14:19:05.967319  5023 master.cc:583] Master@127.4.231.254:42635 shutdown complete.
I20250901 14:19:05.980909  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:37943
I20250901 14:19:05.982018  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:05.986469 11300 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:05.987790 11301 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:05.988131 11303 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:05.988843  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:05.989787  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:05.989954  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:05.990068  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736345990058 us; error 0 us; skew 500 ppm
I20250901 14:19:05.990525  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:05.992691  5023 webserver.cc:480] Webserver started at http://127.4.231.254:40681/ using document root <none> and password file <none>
I20250901 14:19:05.993108  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:05.993259  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:05.993467  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:05.994556  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "7195772a26ff49c9a2d5b1804a264e50"
format_stamp: "Formatted at 2025-09-01 14:19:05 on dist-test-slave-9gf0"
I20250901 14:19:05.998858  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.002s
I20250901 14:19:06.002054 11308 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:06.002775  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:19:06.003047  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "7195772a26ff49c9a2d5b1804a264e50"
format_stamp: "Formatted at 2025-09-01 14:19:05 on dist-test-slave-9gf0"
I20250901 14:19:06.003321  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:06.015363  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:06.016418  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:06.057760  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:37943
I20250901 14:19:06.057850 11370 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:37943 every 8 connection(s)
I20250901 14:19:06.061468 11371 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:06.072108 11371 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50: Bootstrap starting.
I20250901 14:19:06.076272 11371 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:06.080165 11371 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50: No bootstrap required, opened a new log
I20250901 14:19:06.082136 11371 raft_consensus.cc:357] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7195772a26ff49c9a2d5b1804a264e50" member_type: VOTER }
I20250901 14:19:06.082504 11371 raft_consensus.cc:383] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:06.082759 11371 raft_consensus.cc:738] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 7195772a26ff49c9a2d5b1804a264e50, State: Initialized, Role: FOLLOWER
I20250901 14:19:06.083281 11371 consensus_queue.cc:260] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7195772a26ff49c9a2d5b1804a264e50" member_type: VOTER }
I20250901 14:19:06.083716 11371 raft_consensus.cc:397] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:06.083930 11371 raft_consensus.cc:491] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:06.084172 11371 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:06.088769 11371 raft_consensus.cc:513] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7195772a26ff49c9a2d5b1804a264e50" member_type: VOTER }
I20250901 14:19:06.089263 11371 leader_election.cc:304] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 7195772a26ff49c9a2d5b1804a264e50; no voters: 
I20250901 14:19:06.090420 11371 leader_election.cc:290] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:06.090767 11374 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:06.092106 11374 raft_consensus.cc:695] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 1 LEADER]: Becoming Leader. State: Replica: 7195772a26ff49c9a2d5b1804a264e50, State: Running, Role: LEADER
I20250901 14:19:06.092762 11374 consensus_queue.cc:237] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7195772a26ff49c9a2d5b1804a264e50" member_type: VOTER }
I20250901 14:19:06.093401 11371 sys_catalog.cc:564] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:19:06.097352 11376 sys_catalog.cc:455] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [sys.catalog]: SysCatalogTable state changed. Reason: New leader 7195772a26ff49c9a2d5b1804a264e50. Latest consensus state: current_term: 1 leader_uuid: "7195772a26ff49c9a2d5b1804a264e50" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7195772a26ff49c9a2d5b1804a264e50" member_type: VOTER } }
I20250901 14:19:06.098107 11376 sys_catalog.cc:458] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:06.099009 11375 sys_catalog.cc:455] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "7195772a26ff49c9a2d5b1804a264e50" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "7195772a26ff49c9a2d5b1804a264e50" member_type: VOTER } }
I20250901 14:19:06.099558 11375 sys_catalog.cc:458] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:06.100100 11381 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:19:06.104773 11381 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:19:06.108915  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:19:06.112977 11381 catalog_manager.cc:1349] Generated new cluster ID: a0df43ffa07249378995e2ab06aa7c75
I20250901 14:19:06.113209 11381 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:19:06.127200 11381 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:19:06.128443 11381 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:19:06.144598 11381 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50: Generated new TSK 0
I20250901 14:19:06.145191 11381 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:19:06.176301  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:06.182062 11392 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:06.183065 11393 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:06.184584 11395 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:06.184798  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:06.185676  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:06.185855  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:06.185976  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736346185964 us; error 0 us; skew 500 ppm
I20250901 14:19:06.186432  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:06.188787  5023 webserver.cc:480] Webserver started at http://127.4.231.193:33615/ using document root <none> and password file <none>
I20250901 14:19:06.189222  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:06.189373  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:06.189642  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:06.190686  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "5fe00bb74b1a45768a956b70ca27fca1"
format_stamp: "Formatted at 2025-09-01 14:19:06 on dist-test-slave-9gf0"
I20250901 14:19:06.195142  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.000s	sys 0.005s
I20250901 14:19:06.198297 11400 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:06.199090  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:19:06.199352  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "5fe00bb74b1a45768a956b70ca27fca1"
format_stamp: "Formatted at 2025-09-01 14:19:06 on dist-test-slave-9gf0"
I20250901 14:19:06.199651  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:06.220888  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:06.222378  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:06.229799  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:06.230024  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:06.230298  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:06.230458  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:06.285404  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:35067
I20250901 14:19:06.285506 11470 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:35067 every 8 connection(s)
I20250901 14:19:06.289795 11471 heartbeater.cc:344] Connected to a master server at 127.4.231.254:37943
I20250901 14:19:06.289853  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
I20250901 14:19:06.290177 11471 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:06.290849 11471 heartbeater.cc:507] Master 127.4.231.254:37943 requested a full tablet report, sending...
I20250901 14:19:06.292573 11325 ts_manager.cc:194] Registered new tserver with Master: 5fe00bb74b1a45768a956b70ca27fca1 (127.4.231.193:35067)
I20250901 14:19:06.294278 11325 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:40506
W20250901 14:19:06.296500 11473 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:06.297475 11474 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:06.298305 11476 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:06.299579  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:06.300269  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:06.300454  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:06.300619  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736346300606 us; error 0 us; skew 500 ppm
I20250901 14:19:06.301147  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:06.303468  5023 webserver.cc:480] Webserver started at http://127.4.231.194:42949/ using document root <none> and password file <none>
I20250901 14:19:06.303901  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:06.304090  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:06.304332  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:06.305426  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-1-root/instance:
uuid: "6b8548ae80e84dbfa3560d00428972e4"
format_stamp: "Formatted at 2025-09-01 14:19:06 on dist-test-slave-9gf0"
I20250901 14:19:06.307497 11471 heartbeater.cc:499] Master 127.4.231.254:37943 was elected leader, sending a full tablet report...
I20250901 14:19:06.310171  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.001s
I20250901 14:19:06.313714 11481 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:06.314460  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.001s
I20250901 14:19:06.314749  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-1-root
uuid: "6b8548ae80e84dbfa3560d00428972e4"
format_stamp: "Formatted at 2025-09-01 14:19:06 on dist-test-slave-9gf0"
I20250901 14:19:06.315008  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-1-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-1-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-1-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:06.326983  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:06.327916  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:06.332846  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:06.333127  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:06.333412  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:06.333636  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.001s	sys 0.000s
I20250901 14:19:06.387758  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.194:40287
I20250901 14:19:06.387853 11550 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.194:40287 every 8 connection(s)
I20250901 14:19:06.392357 11551 heartbeater.cc:344] Connected to a master server at 127.4.231.254:37943
I20250901 14:19:06.392359  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
I20250901 14:19:06.392936 11551 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:06.393908 11551 heartbeater.cc:507] Master 127.4.231.254:37943 requested a full tablet report, sending...
I20250901 14:19:06.395848 11324 ts_manager.cc:194] Registered new tserver with Master: 6b8548ae80e84dbfa3560d00428972e4 (127.4.231.194:40287)
I20250901 14:19:06.397630 11324 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:40532
W20250901 14:19:06.399525 11553 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:06.402892 11554 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:06.404706 11556 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:06.407992  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:06.408845  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:06.409040  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:06.409199  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736346409184 us; error 0 us; skew 500 ppm
I20250901 14:19:06.409745  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:06.410980 11551 heartbeater.cc:499] Master 127.4.231.254:37943 was elected leader, sending a full tablet report...
I20250901 14:19:06.412593  5023 webserver.cc:480] Webserver started at http://127.4.231.195:45329/ using document root <none> and password file <none>
I20250901 14:19:06.413076  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:06.413264  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:06.413555  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:06.414601  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-2-root/instance:
uuid: "2ed9aba178944de9b4862dadadd4dc1c"
format_stamp: "Formatted at 2025-09-01 14:19:06 on dist-test-slave-9gf0"
I20250901 14:19:06.418938  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:19:06.422503 11561 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:06.423256  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.003s	sys 0.000s
I20250901 14:19:06.423544  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-2-root
uuid: "2ed9aba178944de9b4862dadadd4dc1c"
format_stamp: "Formatted at 2025-09-01 14:19:06 on dist-test-slave-9gf0"
I20250901 14:19:06.423818  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-2-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-2-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.KeepAlivePeriodically_KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically_1.1756736290962133-5023-0/minicluster-data/ts-2-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:06.440604  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:06.441781  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:06.447791  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:06.448109  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:06.448417  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:06.448630  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:06.504278  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.195:41707
I20250901 14:19:06.504379 11630 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.195:41707 every 8 connection(s)
I20250901 14:19:06.508867 11631 heartbeater.cc:344] Connected to a master server at 127.4.231.254:37943
I20250901 14:19:06.509212 11631 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:06.509966 11631 heartbeater.cc:507] Master 127.4.231.254:37943 requested a full tablet report, sending...
I20250901 14:19:06.511622 11324 ts_manager.cc:194] Registered new tserver with Master: 2ed9aba178944de9b4862dadadd4dc1c (127.4.231.195:41707)
I20250901 14:19:06.512481  5023 internal_mini_cluster.cc:371] 3 TS(s) registered with all masters after 0.004941523s
I20250901 14:19:06.512935 11324 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:40560
I20250901 14:19:06.525560 11631 heartbeater.cc:499] Master 127.4.231.254:37943 was elected leader, sending a full tablet report...
I20250901 14:19:06.535004 11324 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:40568:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 3
split_rows_range_bounds {
}
partition_schema {
  hash_schema {
    columns {
      name: "key"
    }
    num_buckets: 3
    seed: 0
  }
}
W20250901 14:19:06.537032 11324 catalog_manager.cc:6944] The number of live tablet servers is not enough to re-replicate a tablet replica of the newly created table client-testtb in case of a server failure: 4 tablet servers would be needed, 3 are available. Consider bringing up more tablet servers.
I20250901 14:19:06.600288 11516 tablet_service.cc:1468] Processing CreateTablet for tablet 562e8cc5927d4f588bb4f62c6df9edae (DEFAULT_TABLE table=client-testtb [id=a8220c91db4341e591d40cc8d14153d9]), partition=HASH (key) PARTITION 0, RANGE (key) PARTITION UNBOUNDED
I20250901 14:19:06.601604 11516 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 562e8cc5927d4f588bb4f62c6df9edae. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:06.605691 11515 tablet_service.cc:1468] Processing CreateTablet for tablet e93dd0403bf842f2af8815e9ce4738a4 (DEFAULT_TABLE table=client-testtb [id=a8220c91db4341e591d40cc8d14153d9]), partition=HASH (key) PARTITION 1, RANGE (key) PARTITION UNBOUNDED
I20250901 14:19:06.607040 11515 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet e93dd0403bf842f2af8815e9ce4738a4. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:06.608217 11596 tablet_service.cc:1468] Processing CreateTablet for tablet 562e8cc5927d4f588bb4f62c6df9edae (DEFAULT_TABLE table=client-testtb [id=a8220c91db4341e591d40cc8d14153d9]), partition=HASH (key) PARTITION 0, RANGE (key) PARTITION UNBOUNDED
I20250901 14:19:06.608386 11514 tablet_service.cc:1468] Processing CreateTablet for tablet 0b67635175bb438bbd63127da1d1bdda (DEFAULT_TABLE table=client-testtb [id=a8220c91db4341e591d40cc8d14153d9]), partition=HASH (key) PARTITION 2, RANGE (key) PARTITION UNBOUNDED
I20250901 14:19:06.609570 11596 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 562e8cc5927d4f588bb4f62c6df9edae. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:06.609722 11514 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 0b67635175bb438bbd63127da1d1bdda. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:06.610062 11433 tablet_service.cc:1468] Processing CreateTablet for tablet 0b67635175bb438bbd63127da1d1bdda (DEFAULT_TABLE table=client-testtb [id=a8220c91db4341e591d40cc8d14153d9]), partition=HASH (key) PARTITION 2, RANGE (key) PARTITION UNBOUNDED
I20250901 14:19:06.611300 11433 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 0b67635175bb438bbd63127da1d1bdda. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:06.620497 11595 tablet_service.cc:1468] Processing CreateTablet for tablet e93dd0403bf842f2af8815e9ce4738a4 (DEFAULT_TABLE table=client-testtb [id=a8220c91db4341e591d40cc8d14153d9]), partition=HASH (key) PARTITION 1, RANGE (key) PARTITION UNBOUNDED
I20250901 14:19:06.619125 11434 tablet_service.cc:1468] Processing CreateTablet for tablet e93dd0403bf842f2af8815e9ce4738a4 (DEFAULT_TABLE table=client-testtb [id=a8220c91db4341e591d40cc8d14153d9]), partition=HASH (key) PARTITION 1, RANGE (key) PARTITION UNBOUNDED
I20250901 14:19:06.621789 11595 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet e93dd0403bf842f2af8815e9ce4738a4. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:06.622397 11434 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet e93dd0403bf842f2af8815e9ce4738a4. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:06.623665 11435 tablet_service.cc:1468] Processing CreateTablet for tablet 562e8cc5927d4f588bb4f62c6df9edae (DEFAULT_TABLE table=client-testtb [id=a8220c91db4341e591d40cc8d14153d9]), partition=HASH (key) PARTITION 0, RANGE (key) PARTITION UNBOUNDED
I20250901 14:19:06.624927 11435 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 562e8cc5927d4f588bb4f62c6df9edae. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:06.639554 11594 tablet_service.cc:1468] Processing CreateTablet for tablet 0b67635175bb438bbd63127da1d1bdda (DEFAULT_TABLE table=client-testtb [id=a8220c91db4341e591d40cc8d14153d9]), partition=HASH (key) PARTITION 2, RANGE (key) PARTITION UNBOUNDED
I20250901 14:19:06.640921 11594 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 0b67635175bb438bbd63127da1d1bdda. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:06.670972 11646 tablet_bootstrap.cc:492] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4: Bootstrap starting.
I20250901 14:19:06.682107 11646 tablet_bootstrap.cc:654] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:06.680143 11645 tablet_bootstrap.cc:492] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1: Bootstrap starting.
I20250901 14:19:06.685107 11647 tablet_bootstrap.cc:492] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c: Bootstrap starting.
I20250901 14:19:06.693830 11645 tablet_bootstrap.cc:654] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:06.696138 11646 tablet_bootstrap.cc:492] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4: No bootstrap required, opened a new log
I20250901 14:19:06.696635 11647 tablet_bootstrap.cc:654] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:06.696650 11646 ts_tablet_manager.cc:1397] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4: Time spent bootstrapping tablet: real 0.026s	user 0.010s	sys 0.001s
I20250901 14:19:06.706825 11647 tablet_bootstrap.cc:492] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c: No bootstrap required, opened a new log
I20250901 14:19:06.707264 11647 ts_tablet_manager.cc:1397] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c: Time spent bootstrapping tablet: real 0.022s	user 0.009s	sys 0.006s
I20250901 14:19:06.707209 11646 raft_consensus.cc:357] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.708169 11646 raft_consensus.cc:383] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:06.708472 11646 raft_consensus.cc:738] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 6b8548ae80e84dbfa3560d00428972e4, State: Initialized, Role: FOLLOWER
I20250901 14:19:06.709160 11646 consensus_queue.cc:260] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.709849 11647 raft_consensus.cc:357] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.710590 11647 raft_consensus.cc:383] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:06.710934 11647 raft_consensus.cc:738] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 2ed9aba178944de9b4862dadadd4dc1c, State: Initialized, Role: FOLLOWER
I20250901 14:19:06.711860 11647 consensus_queue.cc:260] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.714428 11645 tablet_bootstrap.cc:492] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1: No bootstrap required, opened a new log
I20250901 14:19:06.714883 11645 ts_tablet_manager.cc:1397] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1: Time spent bootstrapping tablet: real 0.035s	user 0.008s	sys 0.012s
I20250901 14:19:06.712728 11646 ts_tablet_manager.cc:1428] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4: Time spent starting tablet: real 0.015s	user 0.006s	sys 0.000s
I20250901 14:19:06.717625 11645 raft_consensus.cc:357] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.718360 11645 raft_consensus.cc:383] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:06.718647 11645 raft_consensus.cc:738] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 5fe00bb74b1a45768a956b70ca27fca1, State: Initialized, Role: FOLLOWER
I20250901 14:19:06.722821 11646 tablet_bootstrap.cc:492] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4: Bootstrap starting.
I20250901 14:19:06.719449 11645 consensus_queue.cc:260] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.729331 11646 tablet_bootstrap.cc:654] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:06.730438 11647 ts_tablet_manager.cc:1428] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c: Time spent starting tablet: real 0.023s	user 0.013s	sys 0.008s
I20250901 14:19:06.731338 11647 tablet_bootstrap.cc:492] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c: Bootstrap starting.
I20250901 14:19:06.739154 11647 tablet_bootstrap.cc:654] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:06.738348 11645 ts_tablet_manager.cc:1428] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1: Time spent starting tablet: real 0.023s	user 0.007s	sys 0.008s
I20250901 14:19:06.740727 11645 tablet_bootstrap.cc:492] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1: Bootstrap starting.
I20250901 14:19:06.740775 11646 tablet_bootstrap.cc:492] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4: No bootstrap required, opened a new log
I20250901 14:19:06.743070 11646 ts_tablet_manager.cc:1397] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4: Time spent bootstrapping tablet: real 0.020s	user 0.014s	sys 0.001s
I20250901 14:19:06.749634 11646 raft_consensus.cc:357] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.750408 11646 raft_consensus.cc:383] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:06.750726 11646 raft_consensus.cc:738] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 6b8548ae80e84dbfa3560d00428972e4, State: Initialized, Role: FOLLOWER
I20250901 14:19:06.751271 11645 tablet_bootstrap.cc:654] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:06.751533 11646 consensus_queue.cc:260] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.753931 11647 tablet_bootstrap.cc:492] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c: No bootstrap required, opened a new log
I20250901 14:19:06.753957 11646 ts_tablet_manager.cc:1428] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4: Time spent starting tablet: real 0.007s	user 0.006s	sys 0.000s
I20250901 14:19:06.754571 11647 ts_tablet_manager.cc:1397] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c: Time spent bootstrapping tablet: real 0.023s	user 0.014s	sys 0.009s
I20250901 14:19:06.755133 11646 tablet_bootstrap.cc:492] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4: Bootstrap starting.
I20250901 14:19:06.759291 11647 raft_consensus.cc:357] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.760270 11647 raft_consensus.cc:383] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:06.760646 11647 raft_consensus.cc:738] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 2ed9aba178944de9b4862dadadd4dc1c, State: Initialized, Role: FOLLOWER
I20250901 14:19:06.761926 11646 tablet_bootstrap.cc:654] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:06.761677 11647 consensus_queue.cc:260] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.765129 11647 ts_tablet_manager.cc:1428] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c: Time spent starting tablet: real 0.010s	user 0.004s	sys 0.003s
I20250901 14:19:06.766475 11647 tablet_bootstrap.cc:492] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c: Bootstrap starting.
I20250901 14:19:06.775673 11646 tablet_bootstrap.cc:492] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4: No bootstrap required, opened a new log
I20250901 14:19:06.776111 11646 ts_tablet_manager.cc:1397] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4: Time spent bootstrapping tablet: real 0.021s	user 0.012s	sys 0.004s
I20250901 14:19:06.776109 11647 tablet_bootstrap.cc:654] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:06.778690 11646 raft_consensus.cc:357] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.779500 11646 raft_consensus.cc:383] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:06.779983 11645 tablet_bootstrap.cc:492] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1: No bootstrap required, opened a new log
I20250901 14:19:06.779811 11646 raft_consensus.cc:738] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 6b8548ae80e84dbfa3560d00428972e4, State: Initialized, Role: FOLLOWER
I20250901 14:19:06.780652 11645 ts_tablet_manager.cc:1397] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1: Time spent bootstrapping tablet: real 0.040s	user 0.019s	sys 0.012s
I20250901 14:19:06.781293 11646 consensus_queue.cc:260] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.783687 11651 raft_consensus.cc:491] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Starting pre-election (no leader contacted us within the election timeout)
I20250901 14:19:06.784286 11651 raft_consensus.cc:513] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.785723 11645 raft_consensus.cc:357] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.786505 11645 raft_consensus.cc:383] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:06.786854 11645 raft_consensus.cc:738] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 5fe00bb74b1a45768a956b70ca27fca1, State: Initialized, Role: FOLLOWER
I20250901 14:19:06.787662 11645 consensus_queue.cc:260] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.791824 11645 ts_tablet_manager.cc:1428] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1: Time spent starting tablet: real 0.011s	user 0.000s	sys 0.005s
I20250901 14:19:06.792801 11645 tablet_bootstrap.cc:492] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1: Bootstrap starting.
I20250901 14:19:06.797894 11651 leader_election.cc:290] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [CANDIDATE]: Term 1 pre-election: Requested pre-vote from peers 5fe00bb74b1a45768a956b70ca27fca1 (127.4.231.193:35067), 2ed9aba178944de9b4862dadadd4dc1c (127.4.231.195:41707)
I20250901 14:19:06.801082 11645 tablet_bootstrap.cc:654] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:06.804605 11647 tablet_bootstrap.cc:492] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c: No bootstrap required, opened a new log
I20250901 14:19:06.805186 11647 ts_tablet_manager.cc:1397] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c: Time spent bootstrapping tablet: real 0.039s	user 0.016s	sys 0.019s
I20250901 14:19:06.806145 11646 ts_tablet_manager.cc:1428] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4: Time spent starting tablet: real 0.030s	user 0.007s	sys 0.000s
I20250901 14:19:06.808127 11647 raft_consensus.cc:357] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.808995 11647 raft_consensus.cc:383] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:06.809253 11653 raft_consensus.cc:491] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Starting pre-election (no leader contacted us within the election timeout)
I20250901 14:19:06.809365 11647 raft_consensus.cc:738] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 2ed9aba178944de9b4862dadadd4dc1c, State: Initialized, Role: FOLLOWER
I20250901 14:19:06.809747 11653 raft_consensus.cc:513] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.810241 11647 consensus_queue.cc:260] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.812314 11653 leader_election.cc:290] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [CANDIDATE]: Term 1 pre-election: Requested pre-vote from peers 6b8548ae80e84dbfa3560d00428972e4 (127.4.231.194:40287), 2ed9aba178944de9b4862dadadd4dc1c (127.4.231.195:41707)
I20250901 14:19:06.812889 11647 ts_tablet_manager.cc:1428] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c: Time spent starting tablet: real 0.007s	user 0.007s	sys 0.000s
I20250901 14:19:06.829926 11446 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "e93dd0403bf842f2af8815e9ce4738a4" candidate_uuid: "6b8548ae80e84dbfa3560d00428972e4" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "5fe00bb74b1a45768a956b70ca27fca1" is_pre_election: true
I20250901 14:19:06.830736 11446 raft_consensus.cc:2466] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 6b8548ae80e84dbfa3560d00428972e4 in term 0.
I20250901 14:19:06.832041 11484 leader_election.cc:304] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [CANDIDATE]: Term 1 pre-election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 5fe00bb74b1a45768a956b70ca27fca1, 6b8548ae80e84dbfa3560d00428972e4; no voters: 
I20250901 14:19:06.832899 11651 raft_consensus.cc:2802] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Leader pre-election won for term 1
I20250901 14:19:06.833240 11651 raft_consensus.cc:491] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Starting leader election (no leader contacted us within the election timeout)
I20250901 14:19:06.833600 11651 raft_consensus.cc:3058] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:06.840615 11651 raft_consensus.cc:513] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.843169 11526 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "e93dd0403bf842f2af8815e9ce4738a4" candidate_uuid: "5fe00bb74b1a45768a956b70ca27fca1" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "6b8548ae80e84dbfa3560d00428972e4" is_pre_election: true
I20250901 14:19:06.844125 11526 raft_consensus.cc:2391] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 1 FOLLOWER]: Leader pre-election vote request: Denying vote to candidate 5fe00bb74b1a45768a956b70ca27fca1 in current term 1: Already voted for candidate 6b8548ae80e84dbfa3560d00428972e4 in this term.
I20250901 14:19:06.844545 11446 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "e93dd0403bf842f2af8815e9ce4738a4" candidate_uuid: "6b8548ae80e84dbfa3560d00428972e4" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "5fe00bb74b1a45768a956b70ca27fca1"
I20250901 14:19:06.845109 11446 raft_consensus.cc:3058] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:06.852123 11606 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "e93dd0403bf842f2af8815e9ce4738a4" candidate_uuid: "5fe00bb74b1a45768a956b70ca27fca1" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "2ed9aba178944de9b4862dadadd4dc1c" is_pre_election: true
I20250901 14:19:06.852758 11606 raft_consensus.cc:2466] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 5fe00bb74b1a45768a956b70ca27fca1 in term 0.
I20250901 14:19:06.853876 11645 tablet_bootstrap.cc:492] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1: No bootstrap required, opened a new log
I20250901 14:19:06.854305 11645 ts_tablet_manager.cc:1397] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1: Time spent bootstrapping tablet: real 0.062s	user 0.024s	sys 0.013s
I20250901 14:19:06.854136 11446 raft_consensus.cc:2466] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 6b8548ae80e84dbfa3560d00428972e4 in term 1.
I20250901 14:19:06.855652 11606 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "e93dd0403bf842f2af8815e9ce4738a4" candidate_uuid: "6b8548ae80e84dbfa3560d00428972e4" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "2ed9aba178944de9b4862dadadd4dc1c" is_pre_election: true
I20250901 14:19:06.855931 11484 leader_election.cc:304] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 5fe00bb74b1a45768a956b70ca27fca1, 6b8548ae80e84dbfa3560d00428972e4; no voters: 
I20250901 14:19:06.856243 11606 raft_consensus.cc:2466] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 6b8548ae80e84dbfa3560d00428972e4 in term 0.
I20250901 14:19:06.856993 11605 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "e93dd0403bf842f2af8815e9ce4738a4" candidate_uuid: "6b8548ae80e84dbfa3560d00428972e4" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "2ed9aba178944de9b4862dadadd4dc1c"
I20250901 14:19:06.857007 11645 raft_consensus.cc:357] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.857921 11605 raft_consensus.cc:3058] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:06.858172 11645 raft_consensus.cc:383] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:06.858606 11645 raft_consensus.cc:738] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 5fe00bb74b1a45768a956b70ca27fca1, State: Initialized, Role: FOLLOWER
I20250901 14:19:06.859268 11664 raft_consensus.cc:2802] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:06.859268 11645 consensus_queue.cc:260] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.861081 11645 ts_tablet_manager.cc:1428] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1: Time spent starting tablet: real 0.007s	user 0.004s	sys 0.000s
I20250901 14:19:06.862104 11404 leader_election.cc:304] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [CANDIDATE]: Term 1 pre-election: Election decided. Result: candidate won. Election summary: received 3 responses out of 3 voters: 2 yes votes; 1 no votes. yes voters: 2ed9aba178944de9b4862dadadd4dc1c, 5fe00bb74b1a45768a956b70ca27fca1; no voters: 6b8548ae80e84dbfa3560d00428972e4
I20250901 14:19:06.863245 11653 raft_consensus.cc:2762] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 FOLLOWER]: Leader pre-election decision vote started in defunct term 0: won
I20250901 14:19:06.864027 11651 leader_election.cc:290] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [CANDIDATE]: Term 1 election: Requested vote from peers 5fe00bb74b1a45768a956b70ca27fca1 (127.4.231.193:35067), 2ed9aba178944de9b4862dadadd4dc1c (127.4.231.195:41707)
I20250901 14:19:06.865518 11605 raft_consensus.cc:2466] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 6b8548ae80e84dbfa3560d00428972e4 in term 1.
I20250901 14:19:06.869040 11664 raft_consensus.cc:695] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 1 LEADER]: Becoming Leader. State: Replica: 6b8548ae80e84dbfa3560d00428972e4, State: Running, Role: LEADER
I20250901 14:19:06.869869 11664 consensus_queue.cc:237] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 2, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.876439 11325 catalog_manager.cc:5582] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 reported cstate change: term changed from 0 to 1, leader changed from <none> to 6b8548ae80e84dbfa3560d00428972e4 (127.4.231.194). New cstate: current_term: 1 leader_uuid: "6b8548ae80e84dbfa3560d00428972e4" committed_config { opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } health_report { overall_health: HEALTHY } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } health_report { overall_health: UNKNOWN } } }
I20250901 14:19:06.880558 11653 raft_consensus.cc:491] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Starting pre-election (no leader contacted us within the election timeout)
I20250901 14:19:06.881002 11653 raft_consensus.cc:513] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.882628 11653 leader_election.cc:290] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [CANDIDATE]: Term 1 pre-election: Requested pre-vote from peers 6b8548ae80e84dbfa3560d00428972e4 (127.4.231.194:40287), 2ed9aba178944de9b4862dadadd4dc1c (127.4.231.195:41707)
I20250901 14:19:06.883455 11526 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "562e8cc5927d4f588bb4f62c6df9edae" candidate_uuid: "5fe00bb74b1a45768a956b70ca27fca1" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "6b8548ae80e84dbfa3560d00428972e4" is_pre_election: true
I20250901 14:19:06.884110 11526 raft_consensus.cc:2466] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 5fe00bb74b1a45768a956b70ca27fca1 in term 0.
I20250901 14:19:06.884981 11605 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "562e8cc5927d4f588bb4f62c6df9edae" candidate_uuid: "5fe00bb74b1a45768a956b70ca27fca1" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "2ed9aba178944de9b4862dadadd4dc1c" is_pre_election: true
I20250901 14:19:06.885303 11401 leader_election.cc:304] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [CANDIDATE]: Term 1 pre-election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 5fe00bb74b1a45768a956b70ca27fca1, 6b8548ae80e84dbfa3560d00428972e4; no voters: 
I20250901 14:19:06.885689 11605 raft_consensus.cc:2466] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 5fe00bb74b1a45768a956b70ca27fca1 in term 0.
I20250901 14:19:06.886467 11653 raft_consensus.cc:2802] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Leader pre-election won for term 1
I20250901 14:19:06.886821 11653 raft_consensus.cc:491] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Starting leader election (no leader contacted us within the election timeout)
I20250901 14:19:06.887095 11653 raft_consensus.cc:3058] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:06.892037 11653 raft_consensus.cc:513] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.893710 11653 leader_election.cc:290] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [CANDIDATE]: Term 1 election: Requested vote from peers 6b8548ae80e84dbfa3560d00428972e4 (127.4.231.194:40287), 2ed9aba178944de9b4862dadadd4dc1c (127.4.231.195:41707)
I20250901 14:19:06.894312 11526 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "562e8cc5927d4f588bb4f62c6df9edae" candidate_uuid: "5fe00bb74b1a45768a956b70ca27fca1" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "6b8548ae80e84dbfa3560d00428972e4"
I20250901 14:19:06.894654 11605 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "562e8cc5927d4f588bb4f62c6df9edae" candidate_uuid: "5fe00bb74b1a45768a956b70ca27fca1" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "2ed9aba178944de9b4862dadadd4dc1c"
I20250901 14:19:06.894819 11526 raft_consensus.cc:3058] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:06.895148 11605 raft_consensus.cc:3058] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:06.901103 11605 raft_consensus.cc:2466] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 5fe00bb74b1a45768a956b70ca27fca1 in term 1.
I20250901 14:19:06.902060 11404 leader_election.cc:304] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 2ed9aba178944de9b4862dadadd4dc1c, 5fe00bb74b1a45768a956b70ca27fca1; no voters: 
I20250901 14:19:06.902756 11653 raft_consensus.cc:2802] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:06.904376 11526 raft_consensus.cc:2466] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4 [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 5fe00bb74b1a45768a956b70ca27fca1 in term 1.
I20250901 14:19:06.904824 11653 raft_consensus.cc:695] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 LEADER]: Becoming Leader. State: Replica: 5fe00bb74b1a45768a956b70ca27fca1, State: Running, Role: LEADER
I20250901 14:19:06.905925 11653 consensus_queue.cc:237] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 2, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.913661 11324 catalog_manager.cc:5582] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 reported cstate change: term changed from 0 to 1, leader changed from <none> to 5fe00bb74b1a45768a956b70ca27fca1 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "5fe00bb74b1a45768a956b70ca27fca1" committed_config { opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } health_report { overall_health: HEALTHY } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } health_report { overall_health: UNKNOWN } } }
I20250901 14:19:06.956050 11652 raft_consensus.cc:491] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Starting pre-election (no leader contacted us within the election timeout)
I20250901 14:19:06.956542 11652 raft_consensus.cc:513] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Starting pre-election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.958681 11652 leader_election.cc:290] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [CANDIDATE]: Term 1 pre-election: Requested pre-vote from peers 5fe00bb74b1a45768a956b70ca27fca1 (127.4.231.193:35067), 6b8548ae80e84dbfa3560d00428972e4 (127.4.231.194:40287)
I20250901 14:19:06.971860 11526 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "0b67635175bb438bbd63127da1d1bdda" candidate_uuid: "2ed9aba178944de9b4862dadadd4dc1c" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "6b8548ae80e84dbfa3560d00428972e4" is_pre_election: true
I20250901 14:19:06.972515 11446 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "0b67635175bb438bbd63127da1d1bdda" candidate_uuid: "2ed9aba178944de9b4862dadadd4dc1c" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "5fe00bb74b1a45768a956b70ca27fca1" is_pre_election: true
I20250901 14:19:06.972589 11526 raft_consensus.cc:2466] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 2ed9aba178944de9b4862dadadd4dc1c in term 0.
I20250901 14:19:06.973146 11446 raft_consensus.cc:2466] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Leader pre-election vote request: Granting yes vote for candidate 2ed9aba178944de9b4862dadadd4dc1c in term 0.
I20250901 14:19:06.973824 11562 leader_election.cc:304] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [CANDIDATE]: Term 1 pre-election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 2ed9aba178944de9b4862dadadd4dc1c, 6b8548ae80e84dbfa3560d00428972e4; no voters: 
I20250901 14:19:06.974571 11652 raft_consensus.cc:2802] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Leader pre-election won for term 1
I20250901 14:19:06.974915 11652 raft_consensus.cc:491] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Starting leader election (no leader contacted us within the election timeout)
I20250901 14:19:06.975167 11652 raft_consensus.cc:3058] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:06.981828 11652 raft_consensus.cc:513] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:06.983520 11652 leader_election.cc:290] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [CANDIDATE]: Term 1 election: Requested vote from peers 5fe00bb74b1a45768a956b70ca27fca1 (127.4.231.193:35067), 6b8548ae80e84dbfa3560d00428972e4 (127.4.231.194:40287)
I20250901 14:19:06.984217 11446 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "0b67635175bb438bbd63127da1d1bdda" candidate_uuid: "2ed9aba178944de9b4862dadadd4dc1c" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "5fe00bb74b1a45768a956b70ca27fca1"
I20250901 14:19:06.984417 11526 tablet_service.cc:1813] Received RequestConsensusVote() RPC: tablet_id: "0b67635175bb438bbd63127da1d1bdda" candidate_uuid: "2ed9aba178944de9b4862dadadd4dc1c" candidate_term: 1 candidate_status { last_received { term: 0 index: 0 } } ignore_live_leader: false dest_uuid: "6b8548ae80e84dbfa3560d00428972e4"
I20250901 14:19:06.984722 11446 raft_consensus.cc:3058] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:06.984915 11526 raft_consensus.cc:3058] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:06.990345 11526 raft_consensus.cc:2466] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4 [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 2ed9aba178944de9b4862dadadd4dc1c in term 1.
I20250901 14:19:06.990375 11446 raft_consensus.cc:2466] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 FOLLOWER]: Leader election vote request: Granting yes vote for candidate 2ed9aba178944de9b4862dadadd4dc1c in term 1.
I20250901 14:19:06.991395 11562 leader_election.cc:304] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 2 responses out of 3 voters: 2 yes votes; 0 no votes. yes voters: 2ed9aba178944de9b4862dadadd4dc1c, 6b8548ae80e84dbfa3560d00428972e4; no voters: 
I20250901 14:19:06.992277 11652 raft_consensus.cc:2802] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:06.994854 11652 raft_consensus.cc:695] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 1 LEADER]: Becoming Leader. State: Replica: 2ed9aba178944de9b4862dadadd4dc1c, State: Running, Role: LEADER
I20250901 14:19:06.995657 11652 consensus_queue.cc:237] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 2, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } }
I20250901 14:19:07.002528 11323 catalog_manager.cc:5582] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c reported cstate change: term changed from 0 to 1, leader changed from <none> to 2ed9aba178944de9b4862dadadd4dc1c (127.4.231.195). New cstate: current_term: 1 leader_uuid: "2ed9aba178944de9b4862dadadd4dc1c" committed_config { opid_index: -1 OBSOLETE_local: false peers { permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 } health_report { overall_health: UNKNOWN } } peers { permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:07.313548 11653 consensus_queue.cc:1035] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [LEADER]: Connected to new peer: Peer: permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.001s
I20250901 14:19:07.333078 11653 consensus_queue.cc:1035] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [LEADER]: Connected to new peer: Peer: permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.000s
I20250901 14:19:07.431754 11664 consensus_queue.cc:1035] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [LEADER]: Connected to new peer: Peer: permanent_uuid: "2ed9aba178944de9b4862dadadd4dc1c" member_type: VOTER last_known_addr { host: "127.4.231.195" port: 41707 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.000s
I20250901 14:19:07.446789 11664 consensus_queue.cc:1035] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [LEADER]: Connected to new peer: Peer: permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.000s
I20250901 14:19:07.515642 11652 consensus_queue.cc:1035] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [LEADER]: Connected to new peer: Peer: permanent_uuid: "5fe00bb74b1a45768a956b70ca27fca1" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 35067 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.000s
I20250901 14:19:07.540326 11685 consensus_queue.cc:1035] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [LEADER]: Connected to new peer: Peer: permanent_uuid: "6b8548ae80e84dbfa3560d00428972e4" member_type: VOTER last_known_addr { host: "127.4.231.194" port: 40287 }, Status: LMP_MISMATCH, Last received: 0.0, Next index: 1, Last known committed idx: 0, Time since last communication: 0.002s
W20250901 14:19:08.084057 11680 log.cc:927] Time spent T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c: Append to log took a long time: real 0.059s	user 0.011s	sys 0.000s
W20250901 14:19:08.262926 11668 log.cc:927] Time spent T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1: Append to log took a long time: real 0.064s	user 0.010s	sys 0.003s
W20250901 14:19:08.287978 11678 log.cc:927] Time spent T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c: Append to log took a long time: real 0.065s	user 0.010s	sys 0.000s
W20250901 14:19:08.737082 11637 outbound_call.cc:321] RPC callback for RPC call kudu.tserver.TabletServerService.Write -> {remote=127.4.231.195:41707, user_credentials={real_user=slave}} blocked reactor thread for 84230.7us
W20250901 14:19:08.761351 11634 outbound_call.cc:321] RPC callback for RPC call kudu.tserver.TabletServerService.Write -> {remote=127.4.231.194:40287, user_credentials={real_user=slave}} blocked reactor thread for 102284us
/home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/client/client-test.cc:3150: Skipped
test is skipped; set KUDU_ALLOW_SLOW_TESTS=1 to run
I20250901 14:19:08.921844  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:19:08.954958  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:08.955624  5023 tablet_replica.cc:331] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1: stopping tablet replica
I20250901 14:19:08.956321  5023 raft_consensus.cc:2241] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:19:08.956866  5023 raft_consensus.cc:2270] T 0b67635175bb438bbd63127da1d1bdda P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:08.960273  5023 tablet_replica.cc:331] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1: stopping tablet replica
I20250901 14:19:08.960840  5023 raft_consensus.cc:2241] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:08.961656  5023 raft_consensus.cc:2270] T 562e8cc5927d4f588bb4f62c6df9edae P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:08.964577  5023 tablet_replica.cc:331] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1: stopping tablet replica
I20250901 14:19:08.965045  5023 raft_consensus.cc:2241] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:19:08.965512  5023 raft_consensus.cc:2270] T e93dd0403bf842f2af8815e9ce4738a4 P 5fe00bb74b1a45768a956b70ca27fca1 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:08.988564  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:19:09.001259  5023 tablet_server.cc:178] TabletServer@127.4.231.194:0 shutting down...
I20250901 14:19:09.024462  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:09.025050  5023 tablet_replica.cc:331] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4: stopping tablet replica
I20250901 14:19:09.025679  5023 raft_consensus.cc:2241] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:19:09.026151  5023 raft_consensus.cc:2270] T 0b67635175bb438bbd63127da1d1bdda P 6b8548ae80e84dbfa3560d00428972e4 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:09.029599  5023 tablet_replica.cc:331] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4: stopping tablet replica
I20250901 14:19:09.030110  5023 raft_consensus.cc:2241] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:09.030807  5023 raft_consensus.cc:2270] T e93dd0403bf842f2af8815e9ce4738a4 P 6b8548ae80e84dbfa3560d00428972e4 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:09.033980  5023 tablet_replica.cc:331] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4: stopping tablet replica
I20250901 14:19:09.034461  5023 raft_consensus.cc:2241] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4 [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:19:09.034909  5023 raft_consensus.cc:2270] T 562e8cc5927d4f588bb4f62c6df9edae P 6b8548ae80e84dbfa3560d00428972e4 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:09.058539  5023 tablet_server.cc:195] TabletServer@127.4.231.194:0 shutdown complete.
I20250901 14:19:09.070387  5023 tablet_server.cc:178] TabletServer@127.4.231.195:0 shutting down...
I20250901 14:19:09.092785  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:09.093335  5023 tablet_replica.cc:331] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c: stopping tablet replica
I20250901 14:19:09.093933  5023 raft_consensus.cc:2241] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:09.094650  5023 raft_consensus.cc:2270] T 0b67635175bb438bbd63127da1d1bdda P 2ed9aba178944de9b4862dadadd4dc1c [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:09.097972  5023 tablet_replica.cc:331] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c: stopping tablet replica
I20250901 14:19:09.098471  5023 raft_consensus.cc:2241] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:19:09.098922  5023 raft_consensus.cc:2270] T e93dd0403bf842f2af8815e9ce4738a4 P 2ed9aba178944de9b4862dadadd4dc1c [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:09.101969  5023 tablet_replica.cc:331] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c: stopping tablet replica
I20250901 14:19:09.102452  5023 raft_consensus.cc:2241] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c [term 1 FOLLOWER]: Raft consensus shutting down.
I20250901 14:19:09.102942  5023 raft_consensus.cc:2270] T 562e8cc5927d4f588bb4f62c6df9edae P 2ed9aba178944de9b4862dadadd4dc1c [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:09.125308  5023 tablet_server.cc:195] TabletServer@127.4.231.195:0 shutdown complete.
I20250901 14:19:09.138074  5023 master.cc:561] Master@127.4.231.254:37943 shutting down...
I20250901 14:19:09.159252  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:09.159811  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:09.160118  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 7195772a26ff49c9a2d5b1804a264e50: stopping tablet replica
I20250901 14:19:09.178897  5023 master.cc:583] Master@127.4.231.254:37943 shutdown complete.
[  SKIPPED ] KeepAlivePeriodically/KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically/1 (3824 ms)
[----------] 1 test from KeepAlivePeriodically/KeepAlivePeriodicallyTest (3824 ms total)

[----------] 1 test from IntColEncodings/IntEncodingNullPredicatesTest
[ RUN      ] IntColEncodings/IntEncodingNullPredicatesTest.TestIntEncodings/2
I20250901 14:19:09.205178  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:45303
I20250901 14:19:09.206288  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:09.210937 11710 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:09.211198 11711 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:09.212163 11713 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:09.215242  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:09.216019  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:09.216185  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:09.216305  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736349216291 us; error 0 us; skew 500 ppm
I20250901 14:19:09.216806  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:09.219027  5023 webserver.cc:480] Webserver started at http://127.4.231.254:42183/ using document root <none> and password file <none>
I20250901 14:19:09.219453  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:09.219604  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:09.219862  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:09.220875  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.IntColEncodings_IntEncodingNullPredicatesTest.TestIntEncodings_2.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "0ba351a000e14f8886567d2c322bbe0c"
format_stamp: "Formatted at 2025-09-01 14:19:09 on dist-test-slave-9gf0"
I20250901 14:19:09.225138  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.005s	sys 0.000s
I20250901 14:19:09.228272 11718 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:09.228988  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.000s	sys 0.002s
I20250901 14:19:09.229247  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.IntColEncodings_IntEncodingNullPredicatesTest.TestIntEncodings_2.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "0ba351a000e14f8886567d2c322bbe0c"
format_stamp: "Formatted at 2025-09-01 14:19:09 on dist-test-slave-9gf0"
I20250901 14:19:09.229511  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.IntColEncodings_IntEncodingNullPredicatesTest.TestIntEncodings_2.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.IntColEncodings_IntEncodingNullPredicatesTest.TestIntEncodings_2.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.IntColEncodings_IntEncodingNullPredicatesTest.TestIntEncodings_2.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:09.263386  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:09.264394  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:09.305266  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:45303
I20250901 14:19:09.305356 11779 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:45303 every 8 connection(s)
I20250901 14:19:09.308925 11780 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:09.319620 11780 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c: Bootstrap starting.
I20250901 14:19:09.324026 11780 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:09.328184 11780 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c: No bootstrap required, opened a new log
I20250901 14:19:09.330142 11780 raft_consensus.cc:357] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0ba351a000e14f8886567d2c322bbe0c" member_type: VOTER }
I20250901 14:19:09.330554 11780 raft_consensus.cc:383] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:09.330829 11780 raft_consensus.cc:738] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 0ba351a000e14f8886567d2c322bbe0c, State: Initialized, Role: FOLLOWER
I20250901 14:19:09.331405 11780 consensus_queue.cc:260] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0ba351a000e14f8886567d2c322bbe0c" member_type: VOTER }
I20250901 14:19:09.331862 11780 raft_consensus.cc:397] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:09.332073 11780 raft_consensus.cc:491] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:09.332324 11780 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:09.336983 11780 raft_consensus.cc:513] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0ba351a000e14f8886567d2c322bbe0c" member_type: VOTER }
I20250901 14:19:09.337682 11780 leader_election.cc:304] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 0ba351a000e14f8886567d2c322bbe0c; no voters: 
I20250901 14:19:09.338841 11780 leader_election.cc:290] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:09.339156 11783 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:09.340503 11783 raft_consensus.cc:695] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 1 LEADER]: Becoming Leader. State: Replica: 0ba351a000e14f8886567d2c322bbe0c, State: Running, Role: LEADER
I20250901 14:19:09.341158 11783 consensus_queue.cc:237] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0ba351a000e14f8886567d2c322bbe0c" member_type: VOTER }
I20250901 14:19:09.341807 11780 sys_catalog.cc:564] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:19:09.344182 11785 sys_catalog.cc:455] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [sys.catalog]: SysCatalogTable state changed. Reason: New leader 0ba351a000e14f8886567d2c322bbe0c. Latest consensus state: current_term: 1 leader_uuid: "0ba351a000e14f8886567d2c322bbe0c" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0ba351a000e14f8886567d2c322bbe0c" member_type: VOTER } }
I20250901 14:19:09.344091 11784 sys_catalog.cc:455] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "0ba351a000e14f8886567d2c322bbe0c" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "0ba351a000e14f8886567d2c322bbe0c" member_type: VOTER } }
I20250901 14:19:09.344920 11785 sys_catalog.cc:458] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:09.345256 11784 sys_catalog.cc:458] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:09.348064 11788 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:19:09.352610 11788 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:19:09.357650  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:19:09.361054 11788 catalog_manager.cc:1349] Generated new cluster ID: 692b3186939a4ad8976f7eacfdc0d1e2
I20250901 14:19:09.361297 11788 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:19:09.387498 11788 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:19:09.388723 11788 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:19:09.405992 11788 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c: Generated new TSK 0
I20250901 14:19:09.406561 11788 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:19:09.424454  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:09.430050 11801 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:09.431236 11802 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:09.432439 11804 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:09.433823  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:09.434592  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:09.434772  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:09.434927  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736349434910 us; error 0 us; skew 500 ppm
I20250901 14:19:09.435424  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:09.438357  5023 webserver.cc:480] Webserver started at http://127.4.231.193:43789/ using document root <none> and password file <none>
I20250901 14:19:09.438812  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:09.438983  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:09.439232  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:09.440238  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.IntColEncodings_IntEncodingNullPredicatesTest.TestIntEncodings_2.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "c57df4fd0db2463694802379df6bda33"
format_stamp: "Formatted at 2025-09-01 14:19:09 on dist-test-slave-9gf0"
I20250901 14:19:09.444530  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.006s	sys 0.000s
I20250901 14:19:09.447902 11809 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:09.448616  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.000s
I20250901 14:19:09.448885  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.IntColEncodings_IntEncodingNullPredicatesTest.TestIntEncodings_2.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "c57df4fd0db2463694802379df6bda33"
format_stamp: "Formatted at 2025-09-01 14:19:09 on dist-test-slave-9gf0"
I20250901 14:19:09.449131  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.IntColEncodings_IntEncodingNullPredicatesTest.TestIntEncodings_2.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.IntColEncodings_IntEncodingNullPredicatesTest.TestIntEncodings_2.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.IntColEncodings_IntEncodingNullPredicatesTest.TestIntEncodings_2.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:09.463157  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:09.464174  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:09.469235  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:09.469552  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:09.469841  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:09.470048  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:09.518527  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:43811
I20250901 14:19:09.518620 11879 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:43811 every 8 connection(s)
I20250901 14:19:09.523351 11880 heartbeater.cc:344] Connected to a master server at 127.4.231.254:45303
I20250901 14:19:09.523763 11880 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:09.524546 11880 heartbeater.cc:507] Master 127.4.231.254:45303 requested a full tablet report, sending...
I20250901 14:19:09.526433 11735 ts_manager.cc:194] Registered new tserver with Master: c57df4fd0db2463694802379df6bda33 (127.4.231.193:43811)
I20250901 14:19:09.526547  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004989334s
I20250901 14:19:09.528116 11735 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:47840
I20250901 14:19:09.540604 11880 heartbeater.cc:499] Master 127.4.231.254:45303 was elected leader, sending a full tablet report...
I20250901 14:19:09.547425 11734 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:47858:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:19:09.580816 11845 tablet_service.cc:1468] Processing CreateTablet for tablet 98699d1f7bc6488781cf7f3c368ee5b7 (DEFAULT_TABLE table=client-testtb [id=a8e4bf2ab79f497fab11e23aa589744e]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:19:09.581171 11844 tablet_service.cc:1468] Processing CreateTablet for tablet 34e8a6a96e0e4bfca44e1767f1a946f5 (DEFAULT_TABLE table=client-testtb [id=a8e4bf2ab79f497fab11e23aa589744e]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:19:09.582078 11845 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 98699d1f7bc6488781cf7f3c368ee5b7. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:09.582717 11844 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 34e8a6a96e0e4bfca44e1767f1a946f5. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:09.594616 11890 tablet_bootstrap.cc:492] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33: Bootstrap starting.
I20250901 14:19:09.599270 11890 tablet_bootstrap.cc:654] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:09.603169 11890 tablet_bootstrap.cc:492] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33: No bootstrap required, opened a new log
I20250901 14:19:09.603591 11890 ts_tablet_manager.cc:1397] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33: Time spent bootstrapping tablet: real 0.009s	user 0.004s	sys 0.003s
I20250901 14:19:09.605898 11890 raft_consensus.cc:357] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.606297 11890 raft_consensus.cc:383] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:09.606534 11890 raft_consensus.cc:738] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: c57df4fd0db2463694802379df6bda33, State: Initialized, Role: FOLLOWER
I20250901 14:19:09.607100 11890 consensus_queue.cc:260] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.607731 11890 raft_consensus.cc:397] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:09.608054 11890 raft_consensus.cc:491] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:09.608348 11890 raft_consensus.cc:3058] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:09.613773 11890 raft_consensus.cc:513] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.614462 11890 leader_election.cc:304] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: c57df4fd0db2463694802379df6bda33; no voters: 
I20250901 14:19:09.615808 11890 leader_election.cc:290] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:09.616063 11892 raft_consensus.cc:2802] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:09.617442 11892 raft_consensus.cc:695] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 1 LEADER]: Becoming Leader. State: Replica: c57df4fd0db2463694802379df6bda33, State: Running, Role: LEADER
I20250901 14:19:09.618273 11890 ts_tablet_manager.cc:1428] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33: Time spent starting tablet: real 0.014s	user 0.010s	sys 0.006s
I20250901 14:19:09.618160 11892 consensus_queue.cc:237] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.619020 11890 tablet_bootstrap.cc:492] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33: Bootstrap starting.
I20250901 14:19:09.624691 11890 tablet_bootstrap.cc:654] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:09.625801 11734 catalog_manager.cc:5582] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 reported cstate change: term changed from 0 to 1, leader changed from <none> to c57df4fd0db2463694802379df6bda33 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "c57df4fd0db2463694802379df6bda33" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:09.629364 11890 tablet_bootstrap.cc:492] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33: No bootstrap required, opened a new log
I20250901 14:19:09.629848 11890 ts_tablet_manager.cc:1397] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33: Time spent bootstrapping tablet: real 0.011s	user 0.009s	sys 0.001s
I20250901 14:19:09.631888 11890 raft_consensus.cc:357] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.632308 11890 raft_consensus.cc:383] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:09.632537 11890 raft_consensus.cc:738] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: c57df4fd0db2463694802379df6bda33, State: Initialized, Role: FOLLOWER
I20250901 14:19:09.633157 11890 consensus_queue.cc:260] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.633842 11890 raft_consensus.cc:397] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:09.634111 11890 raft_consensus.cc:491] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:09.634402 11890 raft_consensus.cc:3058] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:09.640905 11890 raft_consensus.cc:513] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.641623 11890 leader_election.cc:304] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: c57df4fd0db2463694802379df6bda33; no voters: 
I20250901 14:19:09.642133 11890 leader_election.cc:290] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:09.642274 11892 raft_consensus.cc:2802] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:09.642752 11892 raft_consensus.cc:695] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 1 LEADER]: Becoming Leader. State: Replica: c57df4fd0db2463694802379df6bda33, State: Running, Role: LEADER
I20250901 14:19:09.643384 11892 consensus_queue.cc:237] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.643596 11890 ts_tablet_manager.cc:1428] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33: Time spent starting tablet: real 0.013s	user 0.013s	sys 0.000s
I20250901 14:19:09.648305 11734 catalog_manager.cc:5582] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 reported cstate change: term changed from 0 to 1, leader changed from <none> to c57df4fd0db2463694802379df6bda33 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "c57df4fd0db2463694802379df6bda33" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:09.663507 11734 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:47858:
name: "IntEncodingNullPredicatesTestTable"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: true
    encoding: RLE
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
}
num_replicas: 1
split_rows_range_bounds {
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:19:09.680665 11845 tablet_service.cc:1468] Processing CreateTablet for tablet dcdd9e0854a34aa899d2177623072225 (DEFAULT_TABLE table=IntEncodingNullPredicatesTestTable [id=33648ec6897d4e158d168b435c394f1c]), partition=RANGE (key) PARTITION UNBOUNDED
I20250901 14:19:09.681740 11845 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet dcdd9e0854a34aa899d2177623072225. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:09.694362 11890 tablet_bootstrap.cc:492] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33: Bootstrap starting.
I20250901 14:19:09.699023 11890 tablet_bootstrap.cc:654] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:09.702912 11890 tablet_bootstrap.cc:492] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33: No bootstrap required, opened a new log
I20250901 14:19:09.703359 11890 ts_tablet_manager.cc:1397] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33: Time spent bootstrapping tablet: real 0.009s	user 0.007s	sys 0.001s
I20250901 14:19:09.705147 11890 raft_consensus.cc:357] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.705611 11890 raft_consensus.cc:383] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:09.705844 11890 raft_consensus.cc:738] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: c57df4fd0db2463694802379df6bda33, State: Initialized, Role: FOLLOWER
I20250901 14:19:09.706350 11890 consensus_queue.cc:260] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.706804 11890 raft_consensus.cc:397] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:09.707020 11890 raft_consensus.cc:491] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:09.707273 11890 raft_consensus.cc:3058] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:09.712795 11890 raft_consensus.cc:513] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.713335 11890 leader_election.cc:304] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: c57df4fd0db2463694802379df6bda33; no voters: 
I20250901 14:19:09.713922 11890 leader_election.cc:290] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:09.714071 11892 raft_consensus.cc:2802] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:09.714591 11892 raft_consensus.cc:695] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 1 LEADER]: Becoming Leader. State: Replica: c57df4fd0db2463694802379df6bda33, State: Running, Role: LEADER
I20250901 14:19:09.715212 11892 consensus_queue.cc:237] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } }
I20250901 14:19:09.715461 11890 ts_tablet_manager.cc:1428] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33: Time spent starting tablet: real 0.012s	user 0.012s	sys 0.000s
I20250901 14:19:09.720778 11734 catalog_manager.cc:5582] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 reported cstate change: term changed from 0 to 1, leader changed from <none> to c57df4fd0db2463694802379df6bda33 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "c57df4fd0db2463694802379df6bda33" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "c57df4fd0db2463694802379df6bda33" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 43811 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:10.194836  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:19:10.228138  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:10.229143  5023 tablet_replica.cc:331] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33: stopping tablet replica
I20250901 14:19:10.230868  5023 raft_consensus.cc:2241] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:10.231528  5023 raft_consensus.cc:2270] T dcdd9e0854a34aa899d2177623072225 P c57df4fd0db2463694802379df6bda33 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:10.234854  5023 tablet_replica.cc:331] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33: stopping tablet replica
I20250901 14:19:10.235329  5023 raft_consensus.cc:2241] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:10.235766  5023 raft_consensus.cc:2270] T 34e8a6a96e0e4bfca44e1767f1a946f5 P c57df4fd0db2463694802379df6bda33 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:10.237831  5023 tablet_replica.cc:331] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33: stopping tablet replica
I20250901 14:19:10.238260  5023 raft_consensus.cc:2241] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:10.238631  5023 raft_consensus.cc:2270] T 98699d1f7bc6488781cf7f3c368ee5b7 P c57df4fd0db2463694802379df6bda33 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:10.262137  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:19:10.273854  5023 master.cc:561] Master@127.4.231.254:45303 shutting down...
I20250901 14:19:10.296504  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:10.297144  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:10.297596  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 0ba351a000e14f8886567d2c322bbe0c: stopping tablet replica
I20250901 14:19:10.318037  5023 master.cc:583] Master@127.4.231.254:45303 shutdown complete.
[       OK ] IntColEncodings/IntEncodingNullPredicatesTest.TestIntEncodings/2 (1135 ms)
[----------] 1 test from IntColEncodings/IntEncodingNullPredicatesTest (1136 ms total)

[----------] 2 tests from Params/ClientTestImmutableColumn
[ RUN      ] Params/ClientTestImmutableColumn.TestUpdate/2
I20250901 14:19:10.341521  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:40105
I20250901 14:19:10.342597  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:10.347590 11904 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:10.348661 11905 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:10.351385 11907 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:10.352576  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:10.353485  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:10.353755  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:10.353931  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736350353917 us; error 0 us; skew 500 ppm
I20250901 14:19:10.354589  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:10.357208  5023 webserver.cc:480] Webserver started at http://127.4.231.254:36839/ using document root <none> and password file <none>
I20250901 14:19:10.357843  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:10.358071  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:10.358372  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:10.359771  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpdate_2.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "fc344738408f44f190af16fd500b0d13"
format_stamp: "Formatted at 2025-09-01 14:19:10 on dist-test-slave-9gf0"
I20250901 14:19:10.365398  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.005s	user 0.007s	sys 0.000s
I20250901 14:19:10.369632 11912 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:10.370483  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.004s	sys 0.000s
I20250901 14:19:10.370820  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpdate_2.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "fc344738408f44f190af16fd500b0d13"
format_stamp: "Formatted at 2025-09-01 14:19:10 on dist-test-slave-9gf0"
I20250901 14:19:10.371163  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpdate_2.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpdate_2.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpdate_2.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:10.397346  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:10.398695  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:10.450306  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:40105
I20250901 14:19:10.450397 11973 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:40105 every 8 connection(s)
I20250901 14:19:10.455040 11974 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:10.465613 11974 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13: Bootstrap starting.
I20250901 14:19:10.469833 11974 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:10.474014 11974 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13: No bootstrap required, opened a new log
I20250901 14:19:10.475899 11974 raft_consensus.cc:357] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "fc344738408f44f190af16fd500b0d13" member_type: VOTER }
I20250901 14:19:10.476305 11974 raft_consensus.cc:383] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:10.476526 11974 raft_consensus.cc:738] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: fc344738408f44f190af16fd500b0d13, State: Initialized, Role: FOLLOWER
I20250901 14:19:10.477025 11974 consensus_queue.cc:260] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "fc344738408f44f190af16fd500b0d13" member_type: VOTER }
I20250901 14:19:10.477453 11974 raft_consensus.cc:397] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:10.477700 11974 raft_consensus.cc:491] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:10.477942 11974 raft_consensus.cc:3058] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:10.482697 11974 raft_consensus.cc:513] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "fc344738408f44f190af16fd500b0d13" member_type: VOTER }
I20250901 14:19:10.483198 11974 leader_election.cc:304] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: fc344738408f44f190af16fd500b0d13; no voters: 
I20250901 14:19:10.484580 11974 leader_election.cc:290] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:10.484915 11977 raft_consensus.cc:2802] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:10.486759 11977 raft_consensus.cc:695] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 1 LEADER]: Becoming Leader. State: Replica: fc344738408f44f190af16fd500b0d13, State: Running, Role: LEADER
I20250901 14:19:10.487478 11977 consensus_queue.cc:237] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "fc344738408f44f190af16fd500b0d13" member_type: VOTER }
I20250901 14:19:10.488240 11974 sys_catalog.cc:564] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:19:10.490610 11978 sys_catalog.cc:455] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "fc344738408f44f190af16fd500b0d13" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "fc344738408f44f190af16fd500b0d13" member_type: VOTER } }
I20250901 14:19:10.490705 11979 sys_catalog.cc:455] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [sys.catalog]: SysCatalogTable state changed. Reason: New leader fc344738408f44f190af16fd500b0d13. Latest consensus state: current_term: 1 leader_uuid: "fc344738408f44f190af16fd500b0d13" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "fc344738408f44f190af16fd500b0d13" member_type: VOTER } }
I20250901 14:19:10.491230 11978 sys_catalog.cc:458] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:10.491313 11979 sys_catalog.cc:458] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:10.500231 11982 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:19:10.506971 11982 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:19:10.509820  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:19:10.517654 11982 catalog_manager.cc:1349] Generated new cluster ID: aa44913cf2f347fc9d21bb9cad2fe17b
I20250901 14:19:10.517958 11982 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:19:10.536074 11982 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:19:10.537968 11982 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:19:10.552806 11982 catalog_manager.cc:5955] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13: Generated new TSK 0
I20250901 14:19:10.553485 11982 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:19:10.576323  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:10.583281 11996 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:10.583523 11995 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:10.586211 11998 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:10.586925  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:10.587777  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:10.587965  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:10.588129  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736350588108 us; error 0 us; skew 500 ppm
I20250901 14:19:10.588635  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:10.591439  5023 webserver.cc:480] Webserver started at http://127.4.231.193:44763/ using document root <none> and password file <none>
I20250901 14:19:10.591893  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:10.592072  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:10.592315  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:10.593366  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpdate_2.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "3a0b51b9a2344bfcba735b2e8946e66c"
format_stamp: "Formatted at 2025-09-01 14:19:10 on dist-test-slave-9gf0"
I20250901 14:19:10.598119  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.003s	sys 0.001s
I20250901 14:19:10.601617 12003 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:10.602522  5023 fs_manager.cc:730] Time spent opening block manager: real 0.002s	user 0.002s	sys 0.001s
I20250901 14:19:10.602826  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpdate_2.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "3a0b51b9a2344bfcba735b2e8946e66c"
format_stamp: "Formatted at 2025-09-01 14:19:10 on dist-test-slave-9gf0"
I20250901 14:19:10.603108  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpdate_2.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpdate_2.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpdate_2.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:10.628295  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:10.629381  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:10.637661  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:10.638021  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:10.638314  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:10.638478  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:10.700964  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:37539
I20250901 14:19:10.701105 12073 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:37539 every 8 connection(s)
I20250901 14:19:10.706202 12074 heartbeater.cc:344] Connected to a master server at 127.4.231.254:40105
I20250901 14:19:10.706593 12074 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:10.707317 12074 heartbeater.cc:507] Master 127.4.231.254:40105 requested a full tablet report, sending...
I20250901 14:19:10.709193 11929 ts_manager.cc:194] Registered new tserver with Master: 3a0b51b9a2344bfcba735b2e8946e66c (127.4.231.193:37539)
I20250901 14:19:10.709280  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.004944485s
I20250901 14:19:10.710942 11929 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:56734
I20250901 14:19:10.723484 12074 heartbeater.cc:499] Master 127.4.231.254:40105 was elected leader, sending a full tablet report...
I20250901 14:19:10.740082 11928 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:56758:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "imm_val"
    type: INT32
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: true
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:19:10.776332 12038 tablet_service.cc:1468] Processing CreateTablet for tablet cd0f408895a24210835d3bd3915764fa (DEFAULT_TABLE table=client-testtb [id=23140c57465c4e67a6fcfa23deddd5c0]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:19:10.776652 12039 tablet_service.cc:1468] Processing CreateTablet for tablet 7ec289082f734fb9b25aa73d2975515c (DEFAULT_TABLE table=client-testtb [id=23140c57465c4e67a6fcfa23deddd5c0]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:19:10.777416 12038 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet cd0f408895a24210835d3bd3915764fa. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:10.778053 12039 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 7ec289082f734fb9b25aa73d2975515c. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:10.795414 12084 tablet_bootstrap.cc:492] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c: Bootstrap starting.
I20250901 14:19:10.799995 12084 tablet_bootstrap.cc:654] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:10.804832 12084 tablet_bootstrap.cc:492] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c: No bootstrap required, opened a new log
I20250901 14:19:10.805310 12084 ts_tablet_manager.cc:1397] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c: Time spent bootstrapping tablet: real 0.013s	user 0.008s	sys 0.000s
I20250901 14:19:10.807142 12084 raft_consensus.cc:357] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37539 } }
I20250901 14:19:10.807571 12084 raft_consensus.cc:383] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:10.807862 12084 raft_consensus.cc:738] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 3a0b51b9a2344bfcba735b2e8946e66c, State: Initialized, Role: FOLLOWER
I20250901 14:19:10.808459 12084 consensus_queue.cc:260] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37539 } }
I20250901 14:19:10.809028 12084 raft_consensus.cc:397] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:10.809259 12084 raft_consensus.cc:491] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:10.809553 12084 raft_consensus.cc:3058] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:10.815294 12084 raft_consensus.cc:513] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37539 } }
I20250901 14:19:10.815861 12084 leader_election.cc:304] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 3a0b51b9a2344bfcba735b2e8946e66c; no voters: 
I20250901 14:19:10.817241 12084 leader_election.cc:290] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:10.817592 12086 raft_consensus.cc:2802] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:10.820142 12084 ts_tablet_manager.cc:1428] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c: Time spent starting tablet: real 0.015s	user 0.015s	sys 0.000s
I20250901 14:19:10.820107 12086 raft_consensus.cc:695] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 1 LEADER]: Becoming Leader. State: Replica: 3a0b51b9a2344bfcba735b2e8946e66c, State: Running, Role: LEADER
I20250901 14:19:10.821110 12084 tablet_bootstrap.cc:492] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c: Bootstrap starting.
I20250901 14:19:10.820905 12086 consensus_queue.cc:237] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37539 } }
I20250901 14:19:10.826874 12084 tablet_bootstrap.cc:654] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:10.827329 11929 catalog_manager.cc:5582] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c reported cstate change: term changed from 0 to 1, leader changed from <none> to 3a0b51b9a2344bfcba735b2e8946e66c (127.4.231.193). New cstate: current_term: 1 leader_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37539 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:10.838621 12084 tablet_bootstrap.cc:492] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c: No bootstrap required, opened a new log
I20250901 14:19:10.839174 12084 ts_tablet_manager.cc:1397] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c: Time spent bootstrapping tablet: real 0.018s	user 0.014s	sys 0.001s
I20250901 14:19:10.841742 12084 raft_consensus.cc:357] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37539 } }
I20250901 14:19:10.842283 12084 raft_consensus.cc:383] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:10.842582 12084 raft_consensus.cc:738] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 3a0b51b9a2344bfcba735b2e8946e66c, State: Initialized, Role: FOLLOWER
I20250901 14:19:10.843174 12084 consensus_queue.cc:260] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37539 } }
I20250901 14:19:10.843849 12084 raft_consensus.cc:397] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:10.844136 12084 raft_consensus.cc:491] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:10.844497 12084 raft_consensus.cc:3058] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:10.849411 12084 raft_consensus.cc:513] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37539 } }
I20250901 14:19:10.850107 12084 leader_election.cc:304] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 3a0b51b9a2344bfcba735b2e8946e66c; no voters: 
I20250901 14:19:10.850605 12084 leader_election.cc:290] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:10.850768 12086 raft_consensus.cc:2802] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:10.851399 12086 raft_consensus.cc:695] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 1 LEADER]: Becoming Leader. State: Replica: 3a0b51b9a2344bfcba735b2e8946e66c, State: Running, Role: LEADER
I20250901 14:19:10.852126 12086 consensus_queue.cc:237] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37539 } }
I20250901 14:19:10.852506 12084 ts_tablet_manager.cc:1428] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c: Time spent starting tablet: real 0.013s	user 0.009s	sys 0.001s
I20250901 14:19:10.858142 11929 catalog_manager.cc:5582] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c reported cstate change: term changed from 0 to 1, leader changed from <none> to 3a0b51b9a2344bfcba735b2e8946e66c (127.4.231.193). New cstate: current_term: 1 leader_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "3a0b51b9a2344bfcba735b2e8946e66c" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 37539 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:10.983376  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:19:11.004724  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:11.005507  5023 tablet_replica.cc:331] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c: stopping tablet replica
I20250901 14:19:11.006081  5023 raft_consensus.cc:2241] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:11.006548  5023 raft_consensus.cc:2270] T 7ec289082f734fb9b25aa73d2975515c P 3a0b51b9a2344bfcba735b2e8946e66c [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:11.008811  5023 tablet_replica.cc:331] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c: stopping tablet replica
I20250901 14:19:11.009253  5023 raft_consensus.cc:2241] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:11.009694  5023 raft_consensus.cc:2270] T cd0f408895a24210835d3bd3915764fa P 3a0b51b9a2344bfcba735b2e8946e66c [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:11.022529  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:19:11.034978  5023 master.cc:561] Master@127.4.231.254:40105 shutting down...
I20250901 14:19:11.056552  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:11.057211  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:11.057642  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P fc344738408f44f190af16fd500b0d13: stopping tablet replica
I20250901 14:19:11.077445  5023 master.cc:583] Master@127.4.231.254:40105 shutdown complete.
[       OK ] Params/ClientTestImmutableColumn.TestUpdate/2 (758 ms)
[ RUN      ] Params/ClientTestImmutableColumn.TestUpsertIgnore/2
I20250901 14:19:11.100240  5023 internal_mini_cluster.cc:156] Creating distributed mini masters. Addrs: 127.4.231.254:40513
I20250901 14:19:11.101233  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:11.106263 12096 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:11.106338 12095 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:11.109675 12098 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:11.110083  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:11.111120  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:11.111327  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:11.111488  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736351111470 us; error 0 us; skew 500 ppm
I20250901 14:19:11.111996  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:11.114742  5023 webserver.cc:480] Webserver started at http://127.4.231.254:34247/ using document root <none> and password file <none>
I20250901 14:19:11.115213  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:11.115397  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:11.115680  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:11.116760  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpsertIgnore_2.1756736290962133-5023-0/minicluster-data/master-0-root/instance:
uuid: "5f0b10a62656428ca24164b4a2eed20a"
format_stamp: "Formatted at 2025-09-01 14:19:11 on dist-test-slave-9gf0"
I20250901 14:19:11.121480  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.004s	user 0.006s	sys 0.000s
I20250901 14:19:11.125174 12103 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:11.126111  5023 fs_manager.cc:730] Time spent opening block manager: real 0.003s	user 0.003s	sys 0.000s
I20250901 14:19:11.126451  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpsertIgnore_2.1756736290962133-5023-0/minicluster-data/master-0-root
uuid: "5f0b10a62656428ca24164b4a2eed20a"
format_stamp: "Formatted at 2025-09-01 14:19:11 on dist-test-slave-9gf0"
I20250901 14:19:11.126797  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpsertIgnore_2.1756736290962133-5023-0/minicluster-data/master-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpsertIgnore_2.1756736290962133-5023-0/minicluster-data/master-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpsertIgnore_2.1756736290962133-5023-0/minicluster-data/master-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:11.143898  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:11.145277  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:11.196125  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.254:40513
I20250901 14:19:11.196200 12164 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.254:40513 every 8 connection(s)
I20250901 14:19:11.200770 12165 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 00000000000000000000000000000000. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:11.211416 12165 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a: Bootstrap starting.
I20250901 14:19:11.215587 12165 tablet_bootstrap.cc:654] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:11.219661 12165 tablet_bootstrap.cc:492] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a: No bootstrap required, opened a new log
I20250901 14:19:11.221704 12165 raft_consensus.cc:357] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "5f0b10a62656428ca24164b4a2eed20a" member_type: VOTER }
I20250901 14:19:11.222110 12165 raft_consensus.cc:383] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:11.222342 12165 raft_consensus.cc:738] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: 5f0b10a62656428ca24164b4a2eed20a, State: Initialized, Role: FOLLOWER
I20250901 14:19:11.222855 12165 consensus_queue.cc:260] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "5f0b10a62656428ca24164b4a2eed20a" member_type: VOTER }
I20250901 14:19:11.223337 12165 raft_consensus.cc:397] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:11.223558 12165 raft_consensus.cc:491] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:11.223806 12165 raft_consensus.cc:3058] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:11.228838 12165 raft_consensus.cc:513] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "5f0b10a62656428ca24164b4a2eed20a" member_type: VOTER }
I20250901 14:19:11.229562 12165 leader_election.cc:304] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: 5f0b10a62656428ca24164b4a2eed20a; no voters: 
I20250901 14:19:11.230881 12165 leader_election.cc:290] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:11.231201 12168 raft_consensus.cc:2802] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:11.232802 12168 raft_consensus.cc:695] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 1 LEADER]: Becoming Leader. State: Replica: 5f0b10a62656428ca24164b4a2eed20a, State: Running, Role: LEADER
I20250901 14:19:11.233565 12168 consensus_queue.cc:237] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "5f0b10a62656428ca24164b4a2eed20a" member_type: VOTER }
I20250901 14:19:11.234421 12165 sys_catalog.cc:564] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [sys.catalog]: configured and running, proceeding with master startup.
I20250901 14:19:11.236377 12169 sys_catalog.cc:455] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [sys.catalog]: SysCatalogTable state changed. Reason: RaftConsensus started. Latest consensus state: current_term: 1 leader_uuid: "5f0b10a62656428ca24164b4a2eed20a" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "5f0b10a62656428ca24164b4a2eed20a" member_type: VOTER } }
I20250901 14:19:11.236944 12169 sys_catalog.cc:458] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:11.245517 12174 catalog_manager.cc:1477] Loading table and tablet metadata into memory...
I20250901 14:19:11.252215 12170 sys_catalog.cc:455] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [sys.catalog]: SysCatalogTable state changed. Reason: New leader 5f0b10a62656428ca24164b4a2eed20a. Latest consensus state: current_term: 1 leader_uuid: "5f0b10a62656428ca24164b4a2eed20a" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "5f0b10a62656428ca24164b4a2eed20a" member_type: VOTER } }
I20250901 14:19:11.252974 12170 sys_catalog.cc:458] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [sys.catalog]: This master's current role is: LEADER
I20250901 14:19:11.254383 12174 catalog_manager.cc:1486] Initializing Kudu cluster ID...
I20250901 14:19:11.265648 12174 catalog_manager.cc:1349] Generated new cluster ID: 84743e06dd9b42a890a4900e3912eb99
I20250901 14:19:11.266011 12174 catalog_manager.cc:1497] Initializing Kudu internal certificate authority...
I20250901 14:19:11.266037  5023 internal_mini_cluster.cc:184] Waiting to initialize catalog manager on master 0
I20250901 14:19:11.285777 12174 catalog_manager.cc:1372] Generated new certificate authority record
I20250901 14:19:11.287075 12174 catalog_manager.cc:1506] Loading token signing keys...
I20250901 14:19:11.301659 12174 catalog_manager.cc:5955] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a: Generated new TSK 0
I20250901 14:19:11.302383 12174 catalog_manager.cc:1516] Initializing in-progress tserver states...
I20250901 14:19:11.332811  5023 file_cache.cc:492] Constructed file cache file cache with capacity 419430
W20250901 14:19:11.340078 12187 instance_detector.cc:116] could not retrieve AWS instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:11.340168 12188 instance_detector.cc:116] could not retrieve Azure instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
W20250901 14:19:11.344025 12190 instance_detector.cc:116] could not retrieve OpenStack instance metadata: Network error: curl error: HTTP response code said error: The requested URL returned error: 404
I20250901 14:19:11.344379  5023 server_base.cc:1047] running on GCE node
I20250901 14:19:11.345443  5023 hybrid_clock.cc:584] initializing the hybrid clock with 'system_unsync' time source
W20250901 14:19:11.345687  5023 system_unsync_time.cc:38] NTP support is disabled. Clock error bounds will not be accurate. This configuration is not suitable for distributed clusters.
I20250901 14:19:11.345844  5023 hybrid_clock.cc:648] HybridClock initialized: now 1756736351345827 us; error 0 us; skew 500 ppm
I20250901 14:19:11.346380  5023 server_base.cc:847] Flag tcmalloc_max_total_thread_cache_bytes is not working since tcmalloc is not enabled.
I20250901 14:19:11.348970  5023 webserver.cc:480] Webserver started at http://127.4.231.193:38725/ using document root <none> and password file <none>
I20250901 14:19:11.349591  5023 fs_manager.cc:362] Metadata directory not provided
I20250901 14:19:11.349797  5023 fs_manager.cc:368] Using write-ahead log directory (fs_wal_dir) as metadata directory
I20250901 14:19:11.350042  5023 server_base.cc:895] This appears to be a new deployment of Kudu; creating new FS layout
I20250901 14:19:11.351135  5023 fs_manager.cc:1068] Generated new instance metadata in path /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpsertIgnore_2.1756736290962133-5023-0/minicluster-data/ts-0-root/instance:
uuid: "a4cecb446c2a4bf888983fe97fa2c012"
format_stamp: "Formatted at 2025-09-01 14:19:11 on dist-test-slave-9gf0"
I20250901 14:19:11.381038  5023 fs_manager.cc:696] Time spent creating directory manager: real 0.028s	user 0.006s	sys 0.000s
I20250901 14:19:11.387567 12196 log_block_manager.cc:3788] Time spent loading block containers with low live blocks: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:11.388435  5023 fs_manager.cc:730] Time spent opening block manager: real 0.003s	user 0.005s	sys 0.000s
I20250901 14:19:11.388724  5023 fs_manager.cc:647] Opened local filesystem: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpsertIgnore_2.1756736290962133-5023-0/minicluster-data/ts-0-root
uuid: "a4cecb446c2a4bf888983fe97fa2c012"
format_stamp: "Formatted at 2025-09-01 14:19:11 on dist-test-slave-9gf0"
I20250901 14:19:11.389191  5023 fs_report.cc:389] FS layout report
--------------------
wal directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpsertIgnore_2.1756736290962133-5023-0/minicluster-data/ts-0-root
metadata directory: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpsertIgnore_2.1756736290962133-5023-0/minicluster-data/ts-0-root
1 data directories: /tmp/dist-test-task5DXX1o/test-tmp/client-test.2.Params_ClientTestImmutableColumn.TestUpsertIgnore_2.1756736290962133-5023-0/minicluster-data/ts-0-root/data
Total live blocks: 0
Total live bytes: 0
Total live bytes (after alignment): 0
Total number of LBM containers: 0 (0 full)
Did not check for missing blocks
Did not check for orphaned blocks
Total full LBM containers with extra space: 0 (0 repaired)
Total full LBM container extra space in bytes: 0 (0 repaired)
Total incomplete LBM containers: 0 (0 repaired)
Total LBM partial records: 0 (0 repaired)
Total corrupted LBM metadata records in RocksDB: 0 (0 repaired)
I20250901 14:19:11.412434  5023 rpc_server.cc:225] running with OpenSSL 1.1.1  11 Sep 2018
I20250901 14:19:11.414049  5023 kserver.cc:163] Server-wide thread pool size limit: 3276
I20250901 14:19:11.430576  5023 ts_tablet_manager.cc:579] Loaded tablet metadata (0 total tablets, 0 live tablets)
I20250901 14:19:11.430980  5023 ts_tablet_manager.cc:525] Time spent load tablet metadata: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:11.431339  5023 ts_tablet_manager.cc:610] Registered 0 tablets
I20250901 14:19:11.431555  5023 ts_tablet_manager.cc:589] Time spent register tablets: real 0.000s	user 0.000s	sys 0.000s
I20250901 14:19:11.510246  5023 rpc_server.cc:307] RPC server started. Bound to: 127.4.231.193:38767
I20250901 14:19:11.510340 12266 acceptor_pool.cc:272] collecting diagnostics on the listening RPC socket 127.4.231.193:38767 every 8 connection(s)
I20250901 14:19:11.518512 12267 heartbeater.cc:344] Connected to a master server at 127.4.231.254:40513
I20250901 14:19:11.519074 12267 heartbeater.cc:461] Registering TS with master...
I20250901 14:19:11.520211 12267 heartbeater.cc:507] Master 127.4.231.254:40513 requested a full tablet report, sending...
I20250901 14:19:11.522842 12120 ts_manager.cc:194] Registered new tserver with Master: a4cecb446c2a4bf888983fe97fa2c012 (127.4.231.193:38767)
I20250901 14:19:11.523660  5023 internal_mini_cluster.cc:371] 1 TS(s) registered with all masters after 0.007380047s
I20250901 14:19:11.525300 12120 master_service.cc:496] Signed X509 certificate for tserver {username='slave'} at 127.0.0.1:41708
I20250901 14:19:11.539106 12267 heartbeater.cc:499] Master 127.4.231.254:40513 was elected leader, sending a full tablet report...
I20250901 14:19:11.553304 12120 catalog_manager.cc:2232] Servicing CreateTable request from {username='slave'} at 127.0.0.1:41714:
name: "client-testtb"
schema {
  columns {
    name: "key"
    type: INT32
    is_key: true
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "int_val"
    type: INT32
    is_key: false
    is_nullable: false
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "string_val"
    type: STRING
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "non_null_with_default"
    type: INT32
    is_key: false
    is_nullable: false
    read_default_value: "90\000\000"
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: false
  }
  columns {
    name: "imm_val"
    type: INT32
    is_key: false
    is_nullable: true
    encoding: AUTO_ENCODING
    compression: DEFAULT_COMPRESSION
    cfile_block_size: 0
    immutable: true
  }
}
num_replicas: 1
split_rows_range_bounds {
  rows: "\004\001\000\t\000\000\000""\004\001\000\t\000\000\000"
  indirect_data: """"
}
partition_schema {
  range_schema {
    columns {
      name: "key"
    }
  }
}
I20250901 14:19:11.601478 12232 tablet_service.cc:1468] Processing CreateTablet for tablet 522d2d9ecee944448a875c38e4d63a7f (DEFAULT_TABLE table=client-testtb [id=f9d489205a364e4a9b34716cdea88915]), partition=RANGE (key) PARTITION VALUES < 9
I20250901 14:19:11.601783 12231 tablet_service.cc:1468] Processing CreateTablet for tablet 761119eab3ff4f18afce2cea8b7fa0f5 (DEFAULT_TABLE table=client-testtb [id=f9d489205a364e4a9b34716cdea88915]), partition=RANGE (key) PARTITION 9 <= VALUES
I20250901 14:19:11.603404 12231 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 761119eab3ff4f18afce2cea8b7fa0f5. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:11.604184 12232 data_dirs.cc:400] Could only allocate 1 dirs of requested 3 for tablet 522d2d9ecee944448a875c38e4d63a7f. 1 dirs total, 0 dirs full, 0 dirs failed
I20250901 14:19:11.634933 12277 tablet_bootstrap.cc:492] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012: Bootstrap starting.
I20250901 14:19:11.641861 12277 tablet_bootstrap.cc:654] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:11.647244 12277 tablet_bootstrap.cc:492] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012: No bootstrap required, opened a new log
I20250901 14:19:11.647678 12277 ts_tablet_manager.cc:1397] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012: Time spent bootstrapping tablet: real 0.013s	user 0.010s	sys 0.000s
I20250901 14:19:11.649900 12277 raft_consensus.cc:357] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4cecb446c2a4bf888983fe97fa2c012" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38767 } }
I20250901 14:19:11.650700 12277 raft_consensus.cc:383] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:11.651005 12277 raft_consensus.cc:738] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: a4cecb446c2a4bf888983fe97fa2c012, State: Initialized, Role: FOLLOWER
I20250901 14:19:11.651870 12277 consensus_queue.cc:260] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4cecb446c2a4bf888983fe97fa2c012" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38767 } }
I20250901 14:19:11.652522 12277 raft_consensus.cc:397] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:11.652848 12277 raft_consensus.cc:491] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:11.653206 12277 raft_consensus.cc:3058] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:11.659654 12277 raft_consensus.cc:513] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4cecb446c2a4bf888983fe97fa2c012" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38767 } }
I20250901 14:19:11.660320 12277 leader_election.cc:304] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: a4cecb446c2a4bf888983fe97fa2c012; no voters: 
I20250901 14:19:11.662020 12277 leader_election.cc:290] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:11.662595 12279 raft_consensus.cc:2802] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:11.665478 12277 ts_tablet_manager.cc:1428] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012: Time spent starting tablet: real 0.018s	user 0.009s	sys 0.010s
I20250901 14:19:11.665467 12279 raft_consensus.cc:695] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 1 LEADER]: Becoming Leader. State: Replica: a4cecb446c2a4bf888983fe97fa2c012, State: Running, Role: LEADER
I20250901 14:19:11.666837 12277 tablet_bootstrap.cc:492] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012: Bootstrap starting.
I20250901 14:19:11.666577 12279 consensus_queue.cc:237] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4cecb446c2a4bf888983fe97fa2c012" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38767 } }
I20250901 14:19:11.674077 12277 tablet_bootstrap.cc:654] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012: Neither blocks nor log segments found. Creating new log.
I20250901 14:19:11.675757 12119 catalog_manager.cc:5582] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 reported cstate change: term changed from 0 to 1, leader changed from <none> to a4cecb446c2a4bf888983fe97fa2c012 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "a4cecb446c2a4bf888983fe97fa2c012" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4cecb446c2a4bf888983fe97fa2c012" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38767 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:11.688212 12277 tablet_bootstrap.cc:492] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012: No bootstrap required, opened a new log
I20250901 14:19:11.688797 12277 ts_tablet_manager.cc:1397] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012: Time spent bootstrapping tablet: real 0.022s	user 0.014s	sys 0.003s
I20250901 14:19:11.691812 12277 raft_consensus.cc:357] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Replica starting. Triggering 0 pending ops. Active config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4cecb446c2a4bf888983fe97fa2c012" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38767 } }
I20250901 14:19:11.692418 12277 raft_consensus.cc:383] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Consensus starting up: Expiring failure detector timer to make a prompt election more likely
I20250901 14:19:11.692734 12277 raft_consensus.cc:738] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Becoming Follower/Learner. State: Replica: a4cecb446c2a4bf888983fe97fa2c012, State: Initialized, Role: FOLLOWER
I20250901 14:19:11.693519 12277 consensus_queue.cc:260] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [NON_LEADER]: Queue going to NON_LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 0, Majority size: -1, State: 0, Mode: NON_LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4cecb446c2a4bf888983fe97fa2c012" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38767 } }
I20250901 14:19:11.694211 12277 raft_consensus.cc:397] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Only one voter in the Raft config. Triggering election immediately
I20250901 14:19:11.694538 12277 raft_consensus.cc:491] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Starting leader election (initial election of a single-replica configuration)
I20250901 14:19:11.694911 12277 raft_consensus.cc:3058] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 0 FOLLOWER]: Advancing to term 1
I20250901 14:19:11.700930 12277 raft_consensus.cc:513] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 1 FOLLOWER]: Starting leader election with config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4cecb446c2a4bf888983fe97fa2c012" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38767 } }
I20250901 14:19:11.701577 12277 leader_election.cc:304] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [CANDIDATE]: Term 1 election: Election decided. Result: candidate won. Election summary: received 1 responses out of 1 voters: 1 yes votes; 0 no votes. yes voters: a4cecb446c2a4bf888983fe97fa2c012; no voters: 
I20250901 14:19:11.702114 12277 leader_election.cc:290] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [CANDIDATE]: Term 1 election: Requested vote from peers 
I20250901 14:19:11.702328 12279 raft_consensus.cc:2802] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 1 FOLLOWER]: Leader election won for term 1
I20250901 14:19:11.702903 12279 raft_consensus.cc:695] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 1 LEADER]: Becoming Leader. State: Replica: a4cecb446c2a4bf888983fe97fa2c012, State: Running, Role: LEADER
I20250901 14:19:11.703857 12277 ts_tablet_manager.cc:1428] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012: Time spent starting tablet: real 0.015s	user 0.014s	sys 0.000s
I20250901 14:19:11.703610 12279 consensus_queue.cc:237] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [LEADER]: Queue going to LEADER mode. State: All replicated index: 0, Majority replicated index: 0, Committed index: 0, Last appended: 0.0, Last appended by leader: 0, Current term: 1, Majority size: 1, State: 0, Mode: LEADER, active raft config: opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4cecb446c2a4bf888983fe97fa2c012" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38767 } }
I20250901 14:19:11.711094 12120 catalog_manager.cc:5582] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 reported cstate change: term changed from 0 to 1, leader changed from <none> to a4cecb446c2a4bf888983fe97fa2c012 (127.4.231.193). New cstate: current_term: 1 leader_uuid: "a4cecb446c2a4bf888983fe97fa2c012" committed_config { opid_index: -1 OBSOLETE_local: true peers { permanent_uuid: "a4cecb446c2a4bf888983fe97fa2c012" member_type: VOTER last_known_addr { host: "127.4.231.193" port: 38767 } health_report { overall_health: HEALTHY } } }
I20250901 14:19:11.844386  5023 tablet_server.cc:178] TabletServer@127.4.231.193:0 shutting down...
I20250901 14:19:11.871704  5023 ts_tablet_manager.cc:1500] Shutting down tablet manager...
I20250901 14:19:11.872488  5023 tablet_replica.cc:331] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012: stopping tablet replica
I20250901 14:19:11.873075  5023 raft_consensus.cc:2241] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:11.873591  5023 raft_consensus.cc:2270] T 522d2d9ecee944448a875c38e4d63a7f P a4cecb446c2a4bf888983fe97fa2c012 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:11.875954  5023 tablet_replica.cc:331] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012: stopping tablet replica
I20250901 14:19:11.876406  5023 raft_consensus.cc:2241] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:11.876791  5023 raft_consensus.cc:2270] T 761119eab3ff4f18afce2cea8b7fa0f5 P a4cecb446c2a4bf888983fe97fa2c012 [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:11.900012  5023 tablet_server.cc:195] TabletServer@127.4.231.193:0 shutdown complete.
I20250901 14:19:11.913682  5023 master.cc:561] Master@127.4.231.254:40513 shutting down...
I20250901 14:19:11.949090  5023 raft_consensus.cc:2241] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 1 LEADER]: Raft consensus shutting down.
I20250901 14:19:11.949796  5023 raft_consensus.cc:2270] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a [term 1 FOLLOWER]: Raft consensus is shut down!
I20250901 14:19:11.950201  5023 tablet_replica.cc:331] T 00000000000000000000000000000000 P 5f0b10a62656428ca24164b4a2eed20a: stopping tablet replica
I20250901 14:19:11.971853  5023 master.cc:583] Master@127.4.231.254:40513 shutdown complete.
[       OK ] Params/ClientTestImmutableColumn.TestUpsertIgnore/2 (906 ms)
[----------] 2 tests from Params/ClientTestImmutableColumn (1665 ms total)

[----------] Global test environment tear-down
[==========] 28 tests from 7 test suites ran. (60870 ms total)
[  PASSED  ] 27 tests.
[  SKIPPED ] 1 test, listed below:
[  SKIPPED ] KeepAlivePeriodically/KeepAlivePeriodicallyTest.TestStopKeepAlivePeriodically/1
I20250901 14:19:12.088478  5023 logging.cc:424] LogThrottler /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/tablet/tablet.cc:2378: suppressed but not reported on 14 messages since previous log ~9 seconds ago
I20250901 14:19:12.088886  5023 logging.cc:424] LogThrottler /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/rpc/proxy.cc:239: suppressed but not reported on 13 messages since previous log ~41 seconds ago
I20250901 14:19:12.089075  5023 logging.cc:424] LogThrottler /home/jenkins-slave/workspace/build_and_test_flaky/src/kudu/codegen/compilation_manager.cc:203: suppressed but not reported on 46 messages since previous log ~52 seconds ago
ThreadSanitizer: reported 7 warnings