From 969c840db52da796c319f84c9a9a20b1de902ccf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Fri, 31 Oct 2025 15:41:35 +0100 Subject: [PATCH] log,blocks: avoid `ComputeTotalSize` and `GetHash` work when logging is disabled `PartiallyDownloadedBlock::FillBlock()` computed the block header hash and summed missing transaction sizes for debug logging unconditionally, including when cmpctblock debug logging is disabled. Guard the debug-only hash and size computations with `LogAcceptCategory`. Since `txn_available` is invalidated after the first loop (needed for efficient moving), we compute `tx_missing_size` by iterating `vtx_missing` directly. This is safe because the later `tx_missing_offset` check guarantees `vtx_missing` was fully consumed during reconstruction. Use `block.GetHash()` instead of `header.GetHash()`, since header is cleared before logging. No behavior change when debug logging is enabled: the reported counts, hashes, and byte totals remain the same. --- src/blockencodings.cpp | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/src/blockencodings.cpp b/src/blockencodings.cpp index eebf7bf466a..fd528309902 100644 --- a/src/blockencodings.cpp +++ b/src/blockencodings.cpp @@ -192,40 +192,44 @@ ReadStatus PartiallyDownloadedBlock::FillBlock(CBlock& block, const std::vector< { if (header.IsNull()) return READ_STATUS_INVALID; - uint256 hash = header.GetHash(); block = header; block.vtx.resize(txn_available.size()); - unsigned int tx_missing_size = 0; size_t tx_missing_offset = 0; for (size_t i = 0; i < txn_available.size(); i++) { if (!txn_available[i]) { - if (vtx_missing.size() <= tx_missing_offset) + if (tx_missing_offset >= vtx_missing.size()) { return READ_STATUS_INVALID; + } block.vtx[i] = vtx_missing[tx_missing_offset++]; - tx_missing_size += block.vtx[i]->ComputeTotalSize(); - } else + } else { block.vtx[i] = std::move(txn_available[i]); + } } // Make sure we can't call FillBlock again. header.SetNull(); txn_available.clear(); - if (vtx_missing.size() != tx_missing_offset) + if (vtx_missing.size() != tx_missing_offset) { return READ_STATUS_INVALID; + } // Check for possible mutations early now that we have a seemingly good block IsBlockMutatedFn check_mutated{m_check_block_mutated_mock ? m_check_block_mutated_mock : IsBlockMutated}; - if (check_mutated(/*block=*/block, - /*check_witness_root=*/segwit_active)) { + if (check_mutated(/*block=*/block, /*check_witness_root=*/segwit_active)) { return READ_STATUS_FAILED; // Possible Short ID collision } - LogDebug(BCLog::CMPCTBLOCK, "Successfully reconstructed block %s with %u txn prefilled, %u txn from mempool (incl at least %u from extra pool) and %u txn (%u bytes) requested\n", hash.ToString(), prefilled_count, mempool_count, extra_count, vtx_missing.size(), tx_missing_size); - if (vtx_missing.size() < 5) { - for (const auto& tx : vtx_missing) { - LogDebug(BCLog::CMPCTBLOCK, "Reconstructed block %s required tx %s\n", hash.ToString(), tx->GetHash().ToString()); + if (LogAcceptCategory(BCLog::CMPCTBLOCK, BCLog::Level::Debug)) { + const uint256 hash{block.GetHash()}; + uint32_t tx_missing_size{0}; + for (const auto& tx : vtx_missing) tx_missing_size += tx->ComputeTotalSize(); + LogDebug(BCLog::CMPCTBLOCK, "Successfully reconstructed block %s with %u txn prefilled, %u txn from mempool (incl at least %u from extra pool) and %u txn (%u bytes) requested\n", hash.ToString(), prefilled_count, mempool_count, extra_count, vtx_missing.size(), tx_missing_size); + if (vtx_missing.size() < 5) { + for (const auto& tx : vtx_missing) { + LogDebug(BCLog::CMPCTBLOCK, "Reconstructed block %s required tx %s\n", hash.ToString(), tx->GetHash().ToString()); + } } }