Merge bitcoin/bitcoin#33192: refactor: unify container presence checks

d9319b06cf82664d55f255387a348135fd7f91c7 refactor: unify container presence checks - non-trivial counts (Lőrinc)
039307554eb311ce41648d1f9a12b543f480f871 refactor: unify container presence checks - trivial counts (Lőrinc)
8bb9219b6301215f53e43967d17445aaf1b81090 refactor: unify container presence checks - find (Lőrinc)

Pull request description:

  ### Summary
  Instead of counting occurrences in sets and maps, the C++20 `::contains` method expresses the intent unambiguously and can return early on first encounter.

  ### Context
  Applied clang‑tidy's [readability‑container‑contains](https://clang.llvm.org/extra/clang-tidy/checks/readability/container-contains.html) check, though many cases required manual changes since tidy couldn't fix them automatically.

  ### Changes
  The changes made here were:

  | From                   | To               |
  |------------------------|------------------|
  | `m.find(k) == m.end()` | `!m.contains(k)` |
  | `m.find(k) != m.end()` | `m.contains(k)`  |
  | `m.count(k)`           | `m.contains(k)`  |
  | `!m.count(k)`          | `!m.contains(k)` |
  | `m.count(k) == 0`      | `!m.contains(k)` |
  | `m.count(k) != 1`      | `!m.contains(k)` |
  | `m.count(k) == 1`      | `m.contains(k)`  |
  | `m.count(k) < 1`       | `!m.contains(k)`  |
  | `m.count(k) > 0`       | `m.contains(k)`  |
  | `m.count(k) != 0`      | `m.contains(k)`  |

  > Note that `== 1`/`!= 1`/`< 1` only apply to simple [maps](https://en.cppreference.com/w/cpp/container/map/contains)/[sets](https://en.cppreference.com/w/cpp/container/set/contains) and had to be changed manually.

  There are many other cases that could have been changed, but we've reverted most of those to reduce conflict with other open PRs.

  -----

  <details>
  <summary>clang-tidy command on Mac</summary>

  ```bash
  rm -rfd build && \
  cmake -B build \
    -DCMAKE_C_COMPILER="$(brew --prefix llvm)/bin/clang" \
    -DCMAKE_CXX_COMPILER="$(brew --prefix llvm)/bin/clang++" \
    -DCMAKE_OSX_SYSROOT="$(xcrun --show-sdk-path)" \
    -DCMAKE_C_FLAGS="-target arm64-apple-macos11" \
    -DCMAKE_CXX_FLAGS="-target arm64-apple-macos11" \
    -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DBUILD_BENCH=ON -DBUILD_FUZZ_BINARY=ON -DBUILD_FOR_FUZZING=ON

   "$(brew --prefix llvm)/bin/run-clang-tidy" -quiet -p build -j$(nproc) -checks='-*,readability-container-contains' | grep -v 'clang-tidy'
  ```

  </details>

  Note: this is a take 2 of https://github.com/bitcoin/bitcoin/pull/33094 with fewer contentious changes.

ACKs for top commit:
  optout21:
    reACK d9319b06cf82664d55f255387a348135fd7f91c7
  sedited:
    ACK d9319b06cf82664d55f255387a348135fd7f91c7
  janb84:
    re ACK d9319b06cf82664d55f255387a348135fd7f91c7
  pablomartin4btc:
    re-ACK d9319b06cf82664d55f255387a348135fd7f91c7
  ryanofsky:
    Code review ACK d9319b06cf82664d55f255387a348135fd7f91c7. I manually reviewed the full change, and it seems there are a lot of positive comments about this and no more very significant conflicts, so I will merge it shortly.

Tree-SHA512: e4415221676cfb88413ccc446e5f4369df7a55b6642347277667b973f515c3c8ee5bfa9ee0022479c8de945c89fbc9ff61bd8ba086e70f30298cbc1762610fe1
This commit is contained in:
Ryan Ofsky 2025-12-17 16:17:09 -05:00
commit ab513103df
No known key found for this signature in database
GPG Key ID: 46800E30FC748A66
63 changed files with 184 additions and 184 deletions

View File

@ -457,7 +457,7 @@ void AddrManImpl::Delete(nid_type nId)
{
AssertLockHeld(cs);
assert(mapInfo.count(nId) != 0);
assert(mapInfo.contains(nId));
AddrInfo& info = mapInfo[nId];
assert(!info.fInTried);
assert(info.nRefCount == 0);
@ -516,7 +516,7 @@ void AddrManImpl::MakeTried(AddrInfo& info, nid_type nId)
if (vvTried[nKBucket][nKBucketPos] != -1) {
// find an item to evict
nid_type nIdEvict = vvTried[nKBucket][nKBucketPos];
assert(mapInfo.count(nIdEvict) == 1);
assert(mapInfo.contains(nIdEvict));
AddrInfo& infoOld = mapInfo[nIdEvict];
// Remove the to-be-evicted item from the tried set.
@ -919,7 +919,7 @@ void AddrManImpl::ResolveCollisions_()
bool erase_collision = false;
// If id_new not found in mapInfo remove it from m_tried_collisions
if (mapInfo.count(id_new) != 1) {
if (!mapInfo.contains(id_new)) {
erase_collision = true;
} else {
AddrInfo& info_new = mapInfo[id_new];
@ -985,7 +985,7 @@ std::pair<CAddress, NodeSeconds> AddrManImpl::SelectTriedCollision_()
nid_type id_new = *it;
// If id_new not found in mapInfo remove it from m_tried_collisions
if (mapInfo.count(id_new) != 1) {
if (!mapInfo.contains(id_new)) {
m_tried_collisions.erase(it);
return {};
}
@ -1115,7 +1115,7 @@ int AddrManImpl::CheckAddrman() const
for (int n = 0; n < ADDRMAN_TRIED_BUCKET_COUNT; n++) {
for (int i = 0; i < ADDRMAN_BUCKET_SIZE; i++) {
if (vvTried[n][i] != -1) {
if (!setTried.count(vvTried[n][i]))
if (!setTried.contains(vvTried[n][i]))
return -11;
const auto it{mapInfo.find(vvTried[n][i])};
if (it == mapInfo.end() || it->second.GetTriedBucket(nKey, m_netgroupman) != n) {
@ -1132,7 +1132,7 @@ int AddrManImpl::CheckAddrman() const
for (int n = 0; n < ADDRMAN_NEW_BUCKET_COUNT; n++) {
for (int i = 0; i < ADDRMAN_BUCKET_SIZE; i++) {
if (vvNew[n][i] != -1) {
if (!mapNew.count(vvNew[n][i]))
if (!mapNew.contains(vvNew[n][i]))
return -12;
const auto it{mapInfo.find(vvNew[n][i])};
if (it == mapInfo.end() || it->second.GetBucketPosition(nKey, true, n) != i) {

View File

@ -1128,7 +1128,7 @@ static void ParseGetInfoResult(UniValue& result)
const std::string proxy = network["proxy"].getValStr();
if (proxy.empty()) continue;
// Add proxy to ordered_proxy if has not been processed
if (proxy_networks.find(proxy) == proxy_networks.end()) ordered_proxies.push_back(proxy);
if (!proxy_networks.contains(proxy)) ordered_proxies.push_back(proxy);
proxy_networks[proxy].push_back(network["name"].getValStr());
}

View File

@ -588,7 +588,7 @@ static void MutateTxSign(CMutableTransaction& tx, const std::string& flagStr)
CCoinsView viewDummy;
CCoinsViewCache view(&viewDummy);
if (!registers.count("privatekeys"))
if (!registers.contains("privatekeys"))
throw std::runtime_error("privatekeys register variable must be set.");
FillableSigningProvider tempKeystore;
UniValue keysObj = registers["privatekeys"];
@ -604,7 +604,7 @@ static void MutateTxSign(CMutableTransaction& tx, const std::string& flagStr)
}
// Add previous txouts given in the RPC call:
if (!registers.count("prevtxs"))
if (!registers.contains("prevtxs"))
throw std::runtime_error("prevtxs register variable must be set.");
UniValue prevtxsObj = registers["prevtxs"];
{

View File

@ -164,7 +164,7 @@ std::list<SectionInfo> ArgsManager::GetUnrecognizedSections() const
LOCK(cs_args);
std::list<SectionInfo> unrecognized = m_config_sections;
unrecognized.remove_if([](const SectionInfo& appeared){ return available_sections.find(appeared.m_name) != available_sections.end(); });
unrecognized.remove_if([](const SectionInfo& appeared){ return available_sections.contains(appeared.m_name); });
return unrecognized;
}
@ -832,7 +832,7 @@ std::variant<ChainType, std::string> ArgsManager::GetChainArg() const
bool ArgsManager::UseDefaultSection(const std::string& arg) const
{
return m_network == ChainTypeToString(ChainType::MAIN) || m_network_only_args.count(arg) == 0;
return m_network == ChainTypeToString(ChainType::MAIN) || !m_network_only_args.contains(arg);
}
common::SettingsValue ArgsManager::GetSetting(const std::string& arg) const

View File

@ -142,7 +142,7 @@ static bool HTTPReq_JSONRPC(const std::any& context, HTTPRequest* req)
jreq.URI = req->GetURI();
UniValue reply;
bool user_has_whitelist = g_rpc_whitelist.count(jreq.authUser);
bool user_has_whitelist = g_rpc_whitelist.contains(jreq.authUser);
if (!user_has_whitelist && g_rpc_whitelist_default) {
LogWarning("RPC User %s not allowed to call any methods", jreq.authUser);
req->WriteReply(HTTP_FORBIDDEN);
@ -151,7 +151,7 @@ static bool HTTPReq_JSONRPC(const std::any& context, HTTPRequest* req)
// singleton request
} else if (valRequest.isObject()) {
jreq.parse(valRequest);
if (user_has_whitelist && !g_rpc_whitelist[jreq.authUser].count(jreq.strMethod)) {
if (user_has_whitelist && !g_rpc_whitelist[jreq.authUser].contains(jreq.strMethod)) {
LogWarning("RPC User %s not allowed to call method %s", jreq.authUser, jreq.strMethod);
req->WriteReply(HTTP_FORBIDDEN);
return false;
@ -181,7 +181,7 @@ static bool HTTPReq_JSONRPC(const std::any& context, HTTPRequest* req)
const UniValue& request = valRequest[reqIdx].get_obj();
// Parse method
std::string strMethod = request.find_value("method").get_str();
if (!g_rpc_whitelist[jreq.authUser].count(strMethod)) {
if (!g_rpc_whitelist[jreq.authUser].contains(strMethod)) {
LogWarning("RPC User %s not allowed to call method %s", jreq.authUser, strMethod);
req->WriteReply(HTTP_FORBIDDEN);
return false;
@ -307,7 +307,7 @@ static bool InitRPCAuthentication()
for (const std::string& strRPCWhitelist : gArgs.GetArgs("-rpcwhitelist")) {
auto pos = strRPCWhitelist.find(':');
std::string strUser = strRPCWhitelist.substr(0, pos);
bool intersect = g_rpc_whitelist.count(strUser);
bool intersect = g_rpc_whitelist.contains(strUser);
std::set<std::string>& whitelist = g_rpc_whitelist[strUser];
if (pos != std::string::npos) {
std::string strWhitelist = strRPCWhitelist.substr(pos + 1);

View File

@ -968,7 +968,7 @@ bool AppInitParameterInteraction(const ArgsManager& args)
// Signal NODE_COMPACT_FILTERS if peerblockfilters and basic filters index are both enabled.
if (args.GetBoolArg("-peerblockfilters", DEFAULT_PEERBLOCKFILTERS)) {
if (g_enabled_filter_types.count(BlockFilterType::BASIC) != 1) {
if (!g_enabled_filter_types.contains(BlockFilterType::BASIC)) {
return InitError(_("Cannot set -peerblockfilters without -blockfilterindex."));
}

View File

@ -40,7 +40,7 @@ CMerkleBlock::CMerkleBlock(const CBlock& block, CBloomFilter* filter, const std:
for (unsigned int i = 0; i < block.vtx.size(); i++)
{
const Txid& hash{block.vtx[i]->GetHash()};
if (txids && txids->count(hash)) {
if (txids && txids->contains(hash)) {
vMatch.push_back(true);
} else if (filter && filter->IsRelevantAndUpdate(*block.vtx[i])) {
vMatch.push_back(true);

View File

@ -329,7 +329,7 @@ bool SeenLocal(const CService& addr)
bool IsLocal(const CService& addr)
{
LOCK(g_maplocalhost_mutex);
return mapLocalHost.count(addr) > 0;
return mapLocalHost.contains(addr);
}
bool CConnman::AlreadyConnectedToHost(std::string_view host) const
@ -2629,7 +2629,7 @@ void CConnman::ThreadOpenConnections(const std::vector<std::string> connect, std
// (e.g. in case of -onlynet changes by the user), fixed seeds will
// be loaded only for networks for which we have no addresses.
seed_addrs.erase(std::remove_if(seed_addrs.begin(), seed_addrs.end(),
[&fixed_seed_networks](const CAddress& addr) { return fixed_seed_networks.count(addr.GetNetwork()) == 0; }),
[&fixed_seed_networks](const CAddress& addr) { return !fixed_seed_networks.contains(addr.GetNetwork()); }),
seed_addrs.end());
CNetAddr local;
local.SetInternal("fixedseeds");
@ -2776,7 +2776,7 @@ void CConnman::ThreadOpenConnections(const std::vector<std::string> connect, std
m_anchors.pop_back();
if (!addr.IsValid() || IsLocal(addr) || !g_reachable_nets.Contains(addr) ||
!m_msgproc->HasAllDesirableServiceFlags(addr.nServices) ||
outbound_ipv46_peer_netgroups.count(m_netgroupman.GetGroup(addr))) continue;
outbound_ipv46_peer_netgroups.contains(m_netgroupman.GetGroup(addr))) continue;
addrConnect = addr;
LogDebug(BCLog::NET, "Trying to make an anchor connection to %s\n", addrConnect.ToStringAddrPort());
break;
@ -2822,7 +2822,7 @@ void CConnman::ThreadOpenConnections(const std::vector<std::string> connect, std
}
// Require outbound IPv4/IPv6 connections, other than feelers, to be to distinct network groups
if (!fFeeler && outbound_ipv46_peer_netgroups.count(m_netgroupman.GetGroup(addr))) {
if (!fFeeler && outbound_ipv46_peer_netgroups.contains(m_netgroupman.GetGroup(addr))) {
continue;
}

View File

@ -1159,7 +1159,7 @@ std::chrono::microseconds PeerManagerImpl::NextInvToInbounds(std::chrono::micros
bool PeerManagerImpl::IsBlockRequested(const uint256& hash)
{
return mapBlocksInFlight.count(hash);
return mapBlocksInFlight.contains(hash);
}
bool PeerManagerImpl::IsBlockRequestedFromOutbound(const uint256& hash)

View File

@ -132,7 +132,7 @@ public:
{
AssertLockNotHeld(m_mutex);
LOCK(m_mutex);
return m_reachable.count(net) > 0;
return m_reachable.contains(net);
}
[[nodiscard]] bool Contains(const CNetAddr& addr) const EXCLUSIVE_LOCKS_REQUIRED(!m_mutex)

View File

@ -73,7 +73,7 @@ MiniMiner::MiniMiner(const CTxMemPool& mempool, const std::vector<COutPoint>& ou
// Add every entry to m_entries_by_txid and m_entries, except the ones that will be replaced.
for (const auto& txiter : cluster) {
if (!m_to_be_replaced.count(txiter->GetTx().GetHash())) {
if (!m_to_be_replaced.contains(txiter->GetTx().GetHash())) {
auto [ancestor_count, ancestor_size, ancestor_fee] = mempool.CalculateAncestorData(*txiter);
auto [mapiter, success] = m_entries_by_txid.emplace(txiter->GetTx().GetHash(),
MiniMinerMempoolEntry{/*tx_in=*/txiter->GetSharedTx(),
@ -101,13 +101,13 @@ MiniMiner::MiniMiner(const CTxMemPool& mempool, const std::vector<COutPoint>& ou
// Cache descendants for future use. Unlike the real mempool, a descendant MiniMinerMempoolEntry
// will not exist without its ancestor MiniMinerMempoolEntry, so these sets won't be invalidated.
std::vector<MockEntryMap::iterator> cached_descendants;
const bool remove{m_to_be_replaced.count(txid) > 0};
const bool remove{m_to_be_replaced.contains(txid)};
CTxMemPool::setEntries descendants;
mempool.CalculateDescendants(txiter, descendants);
Assume(descendants.count(txiter) > 0);
Assume(descendants.contains(txiter));
for (const auto& desc_txiter : descendants) {
const auto txid_desc = desc_txiter->GetTx().GetHash();
const bool remove_desc{m_to_be_replaced.count(txid_desc) > 0};
const bool remove_desc{m_to_be_replaced.contains(txid_desc)};
auto desc_it{m_entries_by_txid.find(txid_desc)};
Assume((desc_it == m_entries_by_txid.end()) == remove_desc);
if (remove) Assume(remove_desc);
@ -136,7 +136,7 @@ MiniMiner::MiniMiner(const std::vector<MiniMinerMempoolEntry>& manual_entries,
for (const auto& entry : manual_entries) {
const auto& txid = entry.GetTx().GetHash();
// We need to know the descendant set of every transaction.
if (!Assume(descendant_caches.count(txid) > 0)) {
if (!Assume(descendant_caches.contains(txid))) {
m_ready_to_calculate = false;
return;
}
@ -201,7 +201,7 @@ void MiniMiner::DeleteAncestorPackage(const std::set<MockEntryMap::iterator, Ite
Assume(ancestors.size() >= 1);
// "Mine" all transactions in this ancestor set.
for (auto& anc : ancestors) {
Assume(m_in_block.count(anc->first) == 0);
Assume(!m_in_block.contains(anc->first));
m_in_block.insert(anc->first);
m_total_fees += anc->second.GetModifiedFee();
m_total_vsize += anc->second.GetTxSize();
@ -240,7 +240,7 @@ void MiniMiner::SanityCheck() const
entry->second.GetModFeesWithAncestors() >= entry->second.GetModifiedFee();}));
// None of the entries should be to-be-replaced transactions
Assume(std::all_of(m_to_be_replaced.begin(), m_to_be_replaced.end(),
[&](const auto& txid){return m_entries_by_txid.find(txid) == m_entries_by_txid.end();}));
[&](const auto& txid){ return !m_entries_by_txid.contains(txid); }));
}
void MiniMiner::BuildMockTemplate(std::optional<CFeeRate> target_feerate)
@ -274,7 +274,7 @@ void MiniMiner::BuildMockTemplate(std::optional<CFeeRate> target_feerate)
ancestors.insert(*iter);
for (const auto& input : (*iter)->second.GetTx().vin) {
if (auto parent_it{m_entries_by_txid.find(input.prevout.hash)}; parent_it != m_entries_by_txid.end()) {
if (ancestors.count(parent_it) == 0) {
if (!ancestors.contains(parent_it)) {
to_process.insert(parent_it);
}
}
@ -400,7 +400,7 @@ std::optional<CAmount> MiniMiner::CalculateTotalBumpFees(const CFeeRate& target_
for (const auto& [txid, outpoints] : m_requested_outpoints_by_txid) {
// Skip any ancestors that already have a miner score higher than the target feerate
// (already "made it" into the block)
if (m_in_block.count(txid)) continue;
if (m_in_block.contains(txid)) continue;
auto iter = m_entries_by_txid.find(txid);
if (iter == m_entries_by_txid.end()) continue;
to_process.insert(iter);
@ -413,7 +413,7 @@ std::optional<CAmount> MiniMiner::CalculateTotalBumpFees(const CFeeRate& target_
const CTransaction& tx = (*iter)->second.GetTx();
for (const auto& input : tx.vin) {
if (auto parent_it{m_entries_by_txid.find(input.prevout.hash)}; parent_it != m_entries_by_txid.end()) {
if (!has_been_processed.count(input.prevout.hash)) {
if (!has_been_processed.contains(input.prevout.hash)) {
to_process.insert(parent_it);
}
ancestors.insert(parent_it);

View File

@ -77,7 +77,7 @@ public:
// Read the version
uint16_t version;
s >> version;
if (m_supported_versions.find(version) == m_supported_versions.end()) {
if (!m_supported_versions.contains(version)) {
throw std::ios_base::failure(strprintf("Version of snapshot %s does not match any of the supported versions.", version));
}

View File

@ -598,7 +598,7 @@ void CBlockPolicyEstimator::processTransaction(const NewMempoolTransactionInfo&
LOCK(m_cs_fee_estimator);
const unsigned int txHeight = tx.info.txHeight;
const auto& hash = tx.info.m_tx->GetHash();
if (mapMemPoolTxs.count(hash)) {
if (mapMemPoolTxs.contains(hash)) {
LogDebug(BCLog::ESTIMATEFEE, "Blockpolicy error mempool tx %s already being tracked\n",
hash.ToString());
return;

View File

@ -26,7 +26,7 @@ bool IsTopoSortedPackage(const Package& txns, std::unordered_set<Txid, SaltedTxi
// than its child.
for (const auto& tx : txns) {
for (const auto& input : tx->vin) {
if (later_txids.find(input.prevout.hash) != later_txids.end()) {
if (later_txids.contains(input.prevout.hash)) {
// The parent is a subsequent transaction in the package.
return false;
}
@ -62,7 +62,7 @@ bool IsConsistentPackage(const Package& txns)
return false;
}
for (const auto& input : tx->vin) {
if (inputs_seen.find(input.prevout) != inputs_seen.end()) {
if (inputs_seen.contains(input.prevout)) {
// This input is also present in another tx in the package.
return false;
}
@ -130,7 +130,7 @@ bool IsChildWithParents(const Package& package)
// Every transaction must be a parent of the last transaction in the package.
return std::all_of(package.cbegin(), package.cend() - 1,
[&input_txids](const auto& ptx) { return input_txids.count(ptx->GetHash()) > 0; });
[&input_txids](const auto& ptx) { return input_txids.contains(ptx->GetHash()); });
}
bool IsChildWithParentsTree(const Package& package)
@ -142,7 +142,7 @@ bool IsChildWithParentsTree(const Package& package)
// Each parent must not have an input who is one of the other parents.
return std::all_of(package.cbegin(), package.cend() - 1, [&](const auto& ptx) {
for (const auto& input : ptx->vin) {
if (parent_txids.count(input.prevout.hash) > 0) return false;
if (parent_txids.contains(input.prevout.hash)) return false;
}
return true;
});

View File

@ -88,7 +88,7 @@ std::optional<std::string> EntriesAndTxidsDisjoint(const CTxMemPool::setEntries&
{
for (CTxMemPool::txiter ancestorIt : ancestors) {
const Txid& hashAncestor = ancestorIt->GetTx().GetHash();
if (direct_conflicts.count(hashAncestor)) {
if (direct_conflicts.contains(hashAncestor)) {
return strprintf("%s spends conflicting transaction %s",
txid.ToString(),
hashAncestor.ToString());

View File

@ -30,7 +30,7 @@ std::vector<size_t> FindInPackageParents(const Package& package, const CTransact
// We assume the package is sorted, so that we don't need to continue
// looking past the transaction itself.
if (&(*tx) == &(*ptx)) break;
if (possible_parents.count(tx->GetHash())) {
if (possible_parents.contains(tx->GetHash())) {
in_package_parents.push_back(i);
}
}
@ -240,7 +240,7 @@ std::optional<std::pair<std::string, CTransactionRef>> SingleTRUCChecks(const CT
// TRUC transaction can only have 1 descendant.
const bool child_will_be_replaced = !descendants.empty() &&
std::any_of(descendants.cbegin(), descendants.cend(),
[&direct_conflicts](const CTxMemPool::txiter& child){return direct_conflicts.count(child->GetTx().GetHash()) > 0;});
[&direct_conflicts](const CTxMemPool::txiter& child){return direct_conflicts.contains(child->GetTx().GetHash());});
if (pool.GetDescendantCount(parent_entry) + 1 > TRUC_DESCENDANT_LIMIT && !child_will_be_replaced) {
// Allow sibling eviction for TRUC transaction: if another child already exists, even if
// we don't conflict inputs with it, consider evicting it under RBF rules. We rely on TRUC rules

View File

@ -38,7 +38,7 @@ bool PartiallySignedTransaction::Merge(const PartiallySignedTransaction& psbt)
outputs[i].Merge(psbt.outputs[i]);
}
for (auto& xpub_pair : psbt.m_xpubs) {
if (m_xpubs.count(xpub_pair.first) == 0) {
if (!m_xpubs.contains(xpub_pair.first)) {
m_xpubs[xpub_pair.first] = xpub_pair.second;
} else {
m_xpubs[xpub_pair.first].insert(xpub_pair.second.begin(), xpub_pair.second.end());

View File

@ -158,7 +158,7 @@ void DeserializeHDKeypaths(Stream& s, const std::vector<unsigned char>& key, std
if (!pubkey.IsFullyValid()) {
throw std::ios_base::failure("Invalid pubkey");
}
if (hd_keypaths.count(pubkey) > 0) {
if (hd_keypaths.contains(pubkey)) {
throw std::ios_base::failure("Duplicate Key, pubkey derivation path already provided");
}
@ -518,7 +518,7 @@ struct PSBTInput
if (!pubkey.IsFullyValid()) {
throw std::ios_base::failure("Invalid pubkey");
}
if (partial_sigs.count(pubkey.GetID()) > 0) {
if (partial_sigs.contains(pubkey.GetID())) {
throw std::ios_base::failure("Duplicate Key, input partial signature for pubkey already provided");
}
@ -599,7 +599,7 @@ struct PSBTInput
// Read in the hash from key
std::vector<unsigned char> hash_vec(key.begin() + 1, key.end());
uint160 hash(hash_vec);
if (ripemd160_preimages.count(hash) > 0) {
if (ripemd160_preimages.contains(hash)) {
throw std::ios_base::failure("Duplicate Key, input ripemd160 preimage already provided");
}
@ -620,7 +620,7 @@ struct PSBTInput
// Read in the hash from key
std::vector<unsigned char> hash_vec(key.begin() + 1, key.end());
uint256 hash(hash_vec);
if (sha256_preimages.count(hash) > 0) {
if (sha256_preimages.contains(hash)) {
throw std::ios_base::failure("Duplicate Key, input sha256 preimage already provided");
}
@ -641,7 +641,7 @@ struct PSBTInput
// Read in the hash from key
std::vector<unsigned char> hash_vec(key.begin() + 1, key.end());
uint160 hash(hash_vec);
if (hash160_preimages.count(hash) > 0) {
if (hash160_preimages.contains(hash)) {
throw std::ios_base::failure("Duplicate Key, input hash160 preimage already provided");
}
@ -662,7 +662,7 @@ struct PSBTInput
// Read in the hash from key
std::vector<unsigned char> hash_vec(key.begin() + 1, key.end());
uint256 hash(hash_vec);
if (hash256_preimages.count(hash) > 0) {
if (hash256_preimages.contains(hash)) {
throw std::ios_base::failure("Duplicate Key, input hash256 preimage already provided");
}
@ -828,7 +828,7 @@ struct PSBTInput
this_prop.subtype = ReadCompactSize(skey);
this_prop.key = key;
if (m_proprietary.count(this_prop) > 0) {
if (m_proprietary.contains(this_prop)) {
throw std::ios_base::failure("Duplicate Key, proprietary key already found");
}
s >> this_prop.value;
@ -837,7 +837,7 @@ struct PSBTInput
}
// Unknown stuff
default:
if (unknown.count(key) > 0) {
if (unknown.contains(key)) {
throw std::ios_base::failure("Duplicate Key, key for unknown value already provided");
}
// Read in the value
@ -1088,7 +1088,7 @@ struct PSBTOutput
this_prop.subtype = ReadCompactSize(skey);
this_prop.key = key;
if (m_proprietary.count(this_prop) > 0) {
if (m_proprietary.contains(this_prop)) {
throw std::ios_base::failure("Duplicate Key, proprietary key already found");
}
s >> this_prop.value;
@ -1097,7 +1097,7 @@ struct PSBTOutput
}
// Unknown stuff
default: {
if (unknown.count(key) > 0) {
if (unknown.contains(key)) {
throw std::ios_base::failure("Duplicate Key, key for unknown value already provided");
}
// Read in the value
@ -1276,7 +1276,7 @@ struct PartiallySignedTransaction
if (!xpub.pubkey.IsFullyValid()) {
throw std::ios_base::failure("Invalid pubkey");
}
if (global_xpubs.count(xpub) > 0) {
if (global_xpubs.contains(xpub)) {
throw std::ios_base::failure("Duplicate key, global xpub already provided");
}
global_xpubs.insert(xpub);
@ -1286,7 +1286,7 @@ struct PartiallySignedTransaction
// Note that we store these swapped to make searches faster.
// Serialization uses xpub -> keypath to enqure key uniqueness
if (m_xpubs.count(keypath) == 0) {
if (!m_xpubs.contains(keypath)) {
// Make a new set to put the xpub in
m_xpubs[keypath] = {xpub};
} else {
@ -1317,7 +1317,7 @@ struct PartiallySignedTransaction
this_prop.subtype = ReadCompactSize(skey);
this_prop.key = key;
if (m_proprietary.count(this_prop) > 0) {
if (m_proprietary.contains(this_prop)) {
throw std::ios_base::failure("Duplicate Key, proprietary key already found");
}
s >> this_prop.value;
@ -1326,7 +1326,7 @@ struct PartiallySignedTransaction
}
// Unknown stuff
default: {
if (unknown.count(key) > 0) {
if (unknown.contains(key)) {
throw std::ios_base::failure("Duplicate Key, key for unknown value already provided");
}
// Read in the value

View File

@ -70,7 +70,7 @@ bool WalletFrame::addView(WalletView* walletView)
{
if (!clientModel) return false;
if (mapWalletViews.count(walletView->getWalletModel()) > 0) return false;
if (mapWalletViews.contains(walletView->getWalletModel())) return false;
walletView->setClientModel(clientModel);
walletView->showOutOfSyncWarning(bOutOfSync);
@ -90,7 +90,7 @@ bool WalletFrame::addView(WalletView* walletView)
void WalletFrame::setCurrentWallet(WalletModel* wallet_model)
{
if (mapWalletViews.count(wallet_model) == 0) return;
if (!mapWalletViews.contains(wallet_model)) return;
// Stop the effect of hidden widgets on the size hint of the shown one in QStackedWidget.
WalletView* view_about_to_hide = currentWalletView();
@ -116,7 +116,7 @@ void WalletFrame::setCurrentWallet(WalletModel* wallet_model)
void WalletFrame::removeWallet(WalletModel* wallet_model)
{
if (mapWalletViews.count(wallet_model) == 0) return;
if (!mapWalletViews.contains(wallet_model)) return;
WalletView *walletView = mapWalletViews.take(wallet_model);
walletStack->removeWidget(walletView);

View File

@ -1896,7 +1896,7 @@ static inline bool SetHasKeys(const std::set<T>& set) {return false;}
template<typename T, typename Tk, typename... Args>
static inline bool SetHasKeys(const std::set<T>& set, const Tk& key, const Args&... args)
{
return (set.count(key) != 0) || SetHasKeys(set, args...);
return (set.contains(key)) || SetHasKeys(set, args...);
}
// outpoint (needed for the utxo index) + nHeight + fCoinBase
@ -1987,12 +1987,12 @@ static RPCHelpMan getblockstats()
const CBlockUndo& blockUndo = GetUndoChecked(chainman.m_blockman, pindex);
const bool do_all = stats.size() == 0; // Calculate everything if nothing selected (default)
const bool do_mediantxsize = do_all || stats.count("mediantxsize") != 0;
const bool do_medianfee = do_all || stats.count("medianfee") != 0;
const bool do_feerate_percentiles = do_all || stats.count("feerate_percentiles") != 0;
const bool do_mediantxsize = do_all || stats.contains("mediantxsize");
const bool do_medianfee = do_all || stats.contains("medianfee");
const bool do_feerate_percentiles = do_all || stats.contains("feerate_percentiles");
const bool loop_inputs = do_all || do_medianfee || do_feerate_percentiles ||
SetHasKeys(stats, "utxo_increase", "utxo_increase_actual", "utxo_size_inc", "utxo_size_inc_actual", "totalfee", "avgfee", "avgfeerate", "minfee", "maxfee", "minfeerate", "maxfeerate");
const bool loop_outputs = do_all || loop_inputs || stats.count("total_out");
const bool loop_outputs = do_all || loop_inputs || stats.contains("total_out");
const bool do_calculate_size = do_mediantxsize ||
SetHasKeys(stats, "total_size", "avgtxsize", "mintxsize", "maxtxsize", "swtotal_size");
const bool do_calculate_weight = do_all || SetHasKeys(stats, "total_weight", "avgfeerate", "swtotal_weight", "avgfeerate", "feerate_percentiles", "minfeerate", "maxfeerate");
@ -2185,7 +2185,7 @@ bool FindScriptPubKey(std::atomic<int>& scan_progress, const std::atomic<bool>&
uint32_t high = 0x100 * *UCharCast(key.hash.begin()) + *(UCharCast(key.hash.begin()) + 1);
scan_progress = (int)(high * 100.0 / 65536.0 + 0.5);
}
if (needles.count(coin.out.scriptPubKey)) {
if (needles.contains(coin.out.scriptPubKey)) {
out_results.emplace(key, coin);
}
cursor->Next();
@ -2460,7 +2460,7 @@ static bool CheckBlockFilterMatches(BlockManager& blockman, const CBlockIndex& b
// Check if any of the outputs match the scriptPubKey
for (const auto& tx : block.vtx) {
if (std::any_of(tx->vout.cbegin(), tx->vout.cend(), [&](const auto& txout) {
return needles.count(std::vector<unsigned char>(txout.scriptPubKey.begin(), txout.scriptPubKey.end())) != 0;
return needles.contains(std::vector<unsigned char>(txout.scriptPubKey.begin(), txout.scriptPubKey.end()));
})) {
return true;
}
@ -2468,7 +2468,7 @@ static bool CheckBlockFilterMatches(BlockManager& blockman, const CBlockIndex& b
// Check if any of the inputs match the scriptPubKey
for (const auto& txundo : block_undo.vtxundo) {
if (std::any_of(txundo.vprevout.cbegin(), txundo.vprevout.cend(), [&](const auto& coin) {
return needles.count(std::vector<unsigned char>(coin.out.scriptPubKey.begin(), coin.out.scriptPubKey.end())) != 0;
return needles.contains(std::vector<unsigned char>(coin.out.scriptPubKey.begin(), coin.out.scriptPubKey.end()));
})) {
return true;
}

View File

@ -913,7 +913,7 @@ static RPCHelpMan getblocktemplate()
UniValue deps(UniValue::VARR);
for (const CTxIn &in : tx.vin)
{
if (setTxIndex.count(in.prevout.hash))
if (setTxIndex.contains(in.prevout.hash))
deps.push_back(setTxIndex[in.prevout.hash]);
}
entry.pushKV("depends", std::move(deps));
@ -957,7 +957,7 @@ static RPCHelpMan getblocktemplate()
for (const auto& [name, info] : gbtstatus.signalling) {
vbavailable.pushKV(gbt_rule_value(name, info.gbt_optional_rule), info.bit);
if (!info.gbt_optional_rule && !setClientRules.count(name)) {
if (!info.gbt_optional_rule && !setClientRules.contains(name)) {
// If the client doesn't support this, don't indicate it in the [default] version
block.nVersion &= ~info.mask;
}
@ -966,7 +966,7 @@ static RPCHelpMan getblocktemplate()
for (const auto& [name, info] : gbtstatus.locked_in) {
block.nVersion |= info.mask;
vbavailable.pushKV(gbt_rule_value(name, info.gbt_optional_rule), info.bit);
if (!info.gbt_optional_rule && !setClientRules.count(name)) {
if (!info.gbt_optional_rule && !setClientRules.contains(name)) {
// If the client doesn't support this, don't indicate it in the [default] version
block.nVersion &= ~info.mask;
}
@ -974,7 +974,7 @@ static RPCHelpMan getblocktemplate()
for (const auto& [name, info] : gbtstatus.active) {
aRules.push_back(gbt_rule_value(name, info.gbt_optional_rule));
if (!info.gbt_optional_rule && !setClientRules.count(name)) {
if (!info.gbt_optional_rule && !setClientRules.contains(name)) {
// Not supported by the client; make sure it's safe to proceed
throw JSONRPCError(RPC_INVALID_PARAMETER, strprintf("Support for '%s' rule requires explicit client support", name));
}

View File

@ -1841,7 +1841,7 @@ static RPCHelpMan joinpsbts()
merged_psbt.AddOutput(psbt.tx->vout[i], psbt.outputs[i]);
}
for (auto& xpub_pair : psbt.m_xpubs) {
if (merged_psbt.m_xpubs.count(xpub_pair.first) == 0) {
if (!merged_psbt.m_xpubs.contains(xpub_pair.first)) {
merged_psbt.m_xpubs[xpub_pair.first] = xpub_pair.second;
} else {
merged_psbt.m_xpubs[xpub_pair.first].insert(xpub_pair.second.begin(), xpub_pair.second.end());

View File

@ -109,7 +109,7 @@ static RPCHelpMan gettxoutproof()
unsigned int ntxFound = 0;
for (const auto& tx : block.vtx) {
if (setTxids.count(tx->GetHash())) {
if (setTxids.contains(tx->GetHash())) {
ntxFound++;
}
}

View File

@ -71,7 +71,7 @@ void RPCTypeCheckObj(const UniValue& o,
{
for (const std::string& k : o.getKeys())
{
if (typesExpected.count(k) == 0)
if (!typesExpected.contains(k))
{
std::string err = strprintf("Unexpected key %s", k);
throw JSONRPCError(RPC_TYPE_ERROR, err);
@ -1172,7 +1172,7 @@ UniValue RPCResult::MatchesType(const UniValue& result) const
std::map<std::string, UniValue> result_obj;
result.getObjMap(result_obj);
for (const auto& result_entry : result_obj) {
if (doc_keys.find(result_entry.first) == doc_keys.end()) {
if (!doc_keys.contains(result_entry.first)) {
errors.pushKV(result_entry.first, "key returned that was not in doc");
}
}

View File

@ -890,7 +890,7 @@ SignatureData DataFromTransaction(const CMutableTransaction& tx, unsigned int nI
for (unsigned int i = last_success_key; i < num_pubkeys; ++i) {
const valtype& pubkey = solutions[i+1];
// We either have a signature for this pubkey, or we have found a signature and it is valid
if (data.signatures.count(CPubKey(pubkey).GetID()) || extractor_checker.CheckECDSASignature(sig, pubkey, next_script, sigversion)) {
if (data.signatures.contains(CPubKey(pubkey).GetID()) || extractor_checker.CheckECDSASignature(sig, pubkey, next_script, sigversion)) {
last_success_key = i + 1;
break;
}

View File

@ -196,7 +196,7 @@ bool FillableSigningProvider::AddKeyPubKey(const CKey& key, const CPubKey &pubke
bool FillableSigningProvider::HaveKey(const CKeyID &address) const
{
LOCK(cs_KeyStore);
return mapKeys.count(address) > 0;
return mapKeys.contains(address);
}
std::set<CKeyID> FillableSigningProvider::GetKeys() const
@ -235,7 +235,7 @@ bool FillableSigningProvider::AddCScript(const CScript& redeemScript)
bool FillableSigningProvider::HaveCScript(const CScriptID& hash) const
{
LOCK(cs_KeyStore);
return mapScripts.count(hash) > 0;
return mapScripts.contains(hash);
}
std::set<CScriptID> FillableSigningProvider::GetCScripts() const

View File

@ -173,11 +173,11 @@ static void push_lock(MutexType* c, const CLockLocation& locklocation)
}
const LockPair p1 = std::make_pair(i.first, c);
if (lockdata.lockorders.count(p1))
if (lockdata.lockorders.contains(p1))
continue;
const LockPair p2 = std::make_pair(c, i.first);
if (lockdata.lockorders.count(p2)) {
if (lockdata.lockorders.contains(p2)) {
auto lock_stack_copy = lock_stack;
lock_stack.pop_back();
potential_deadlock_detected(p1, lockdata.lockorders[p2], lock_stack_copy);

View File

@ -232,8 +232,8 @@ BOOST_AUTO_TEST_CASE(util_ParseParameters)
BOOST_CHECK(testArgs.m_settings.command_line_options.size() == 3 && testArgs.m_settings.ro_config.empty());
BOOST_CHECK(testArgs.IsArgSet("-a") && testArgs.IsArgSet("-b") && testArgs.IsArgSet("-ccc")
&& !testArgs.IsArgSet("f") && !testArgs.IsArgSet("-d"));
BOOST_CHECK(testArgs.m_settings.command_line_options.count("a") && testArgs.m_settings.command_line_options.count("b") && testArgs.m_settings.command_line_options.count("ccc")
&& !testArgs.m_settings.command_line_options.count("f") && !testArgs.m_settings.command_line_options.count("d"));
BOOST_CHECK(testArgs.m_settings.command_line_options.contains("a") && testArgs.m_settings.command_line_options.contains("b") && testArgs.m_settings.command_line_options.contains("ccc")
&& !testArgs.m_settings.command_line_options.contains("f") && !testArgs.m_settings.command_line_options.contains("d"));
BOOST_CHECK(testArgs.m_settings.command_line_options["a"].size() == 1);
BOOST_CHECK(testArgs.m_settings.command_line_options["a"].front().get_str() == "");
@ -460,18 +460,18 @@ BOOST_AUTO_TEST_CASE(util_ReadConfigStream)
BOOST_CHECK(test_args.m_settings.ro_config["sec1"].size() == 3);
BOOST_CHECK(test_args.m_settings.ro_config["sec2"].size() == 2);
BOOST_CHECK(test_args.m_settings.ro_config[""].count("a"));
BOOST_CHECK(test_args.m_settings.ro_config[""].count("b"));
BOOST_CHECK(test_args.m_settings.ro_config[""].count("ccc"));
BOOST_CHECK(test_args.m_settings.ro_config[""].count("d"));
BOOST_CHECK(test_args.m_settings.ro_config[""].count("fff"));
BOOST_CHECK(test_args.m_settings.ro_config[""].count("ggg"));
BOOST_CHECK(test_args.m_settings.ro_config[""].count("h"));
BOOST_CHECK(test_args.m_settings.ro_config[""].count("i"));
BOOST_CHECK(test_args.m_settings.ro_config["sec1"].count("ccc"));
BOOST_CHECK(test_args.m_settings.ro_config["sec1"].count("h"));
BOOST_CHECK(test_args.m_settings.ro_config["sec2"].count("ccc"));
BOOST_CHECK(test_args.m_settings.ro_config["sec2"].count("iii"));
BOOST_CHECK(test_args.m_settings.ro_config[""].contains("a"));
BOOST_CHECK(test_args.m_settings.ro_config[""].contains("b"));
BOOST_CHECK(test_args.m_settings.ro_config[""].contains("ccc"));
BOOST_CHECK(test_args.m_settings.ro_config[""].contains("d"));
BOOST_CHECK(test_args.m_settings.ro_config[""].contains("fff"));
BOOST_CHECK(test_args.m_settings.ro_config[""].contains("ggg"));
BOOST_CHECK(test_args.m_settings.ro_config[""].contains("h"));
BOOST_CHECK(test_args.m_settings.ro_config[""].contains("i"));
BOOST_CHECK(test_args.m_settings.ro_config["sec1"].contains("ccc"));
BOOST_CHECK(test_args.m_settings.ro_config["sec1"].contains("h"));
BOOST_CHECK(test_args.m_settings.ro_config["sec2"].contains("ccc"));
BOOST_CHECK(test_args.m_settings.ro_config["sec2"].contains("iii"));
BOOST_CHECK(test_args.IsArgSet("-a"));
BOOST_CHECK(test_args.IsArgSet("-b"));

View File

@ -386,15 +386,15 @@ BOOST_FIXTURE_TEST_CASE(updatecoins_simulation_test, UpdateTest)
auto utxod = FindRandomFrom(disconnected_coins);
tx = CMutableTransaction{std::get<0>(utxod->second)};
prevout = tx.vin[0].prevout;
if (!CTransaction(tx).IsCoinBase() && !utxoset.count(prevout)) {
if (!CTransaction(tx).IsCoinBase() && !utxoset.contains(prevout)) {
disconnected_coins.erase(utxod->first);
continue;
}
// If this tx is already IN the UTXO, then it must be a coinbase, and it must be a duplicate
if (utxoset.count(utxod->first)) {
if (utxoset.contains(utxod->first)) {
assert(CTransaction(tx).IsCoinBase());
assert(duplicate_coins.count(utxod->first));
assert(duplicate_coins.contains(utxod->first));
}
disconnected_coins.erase(utxod->first);
}
@ -417,7 +417,7 @@ BOOST_FIXTURE_TEST_CASE(updatecoins_simulation_test, UpdateTest)
// The test is designed to ensure spending a duplicate coinbase will work properly
// if that ever happens and not resurrect the previously overwritten coinbase
if (duplicate_coins.count(prevout)) {
if (duplicate_coins.contains(prevout)) {
spent_a_duplicate_coinbase = true;
}

View File

@ -456,7 +456,7 @@ void DoCheck(std::string prv, std::string pub, const std::string& norm_pub, int
// Test whether the observed key path is present in the 'paths' variable (which contains expected, unobserved paths),
// and then remove it from that set.
for (const auto& origin : script_provider.origins) {
BOOST_CHECK_MESSAGE(paths.count(origin.second.second.path), "Unexpected key path: " + prv);
BOOST_CHECK_MESSAGE(paths.contains(origin.second.second.path), "Unexpected key path: " + prv);
left_paths.erase(origin.second.second.path);
}
}

View File

@ -155,7 +155,7 @@ public:
bool HaveCoin(const COutPoint& outpoint) const final
{
return m_data.count(outpoint);
return m_data.contains(outpoint);
}
uint256 GetBestBlock() const final { return {}; }

View File

@ -687,7 +687,7 @@ struct SmartInfo
while (true) {
size_t set_size = useful_types.size();
for (const auto& [type, recipes] : table) {
if (useful_types.count(type) != 0) {
if (useful_types.contains(type)) {
for (const auto& [_, subtypes] : recipes) {
for (auto subtype : subtypes) useful_types.insert(subtype);
}
@ -697,7 +697,7 @@ struct SmartInfo
}
// Remove all rules that construct uninteresting types.
for (auto type_it = table.begin(); type_it != table.end();) {
if (useful_types.count(type_it->first) == 0) {
if (!useful_types.contains(type_it->first)) {
type_it = table.erase(type_it);
} else {
++type_it;
@ -710,7 +710,7 @@ struct SmartInfo
* because they can only be constructed using recipes that involve otherwise
* non-constructible types, or because they require infinite recursion. */
std::set<Type> constructible_types{};
auto known_constructible = [&](Type type) { return constructible_types.count(type) != 0; };
auto known_constructible = [&](Type type) { return constructible_types.contains(type); };
// Find the transitive closure by adding types until the set of types does not change.
while (true) {
size_t set_size = constructible_types.size();
@ -1177,13 +1177,13 @@ void TestNode(const MsCtx script_ctx, const NodeRef& node, FuzzedDataProvider& p
case Fragment::AFTER:
return node.k & 1;
case Fragment::SHA256:
return TEST_DATA.sha256_preimages.count(node.data);
return TEST_DATA.sha256_preimages.contains(node.data);
case Fragment::HASH256:
return TEST_DATA.hash256_preimages.count(node.data);
return TEST_DATA.hash256_preimages.contains(node.data);
case Fragment::RIPEMD160:
return TEST_DATA.ripemd160_preimages.count(node.data);
return TEST_DATA.ripemd160_preimages.contains(node.data);
case Fragment::HASH160:
return TEST_DATA.hash160_preimages.count(node.data);
return TEST_DATA.hash160_preimages.contains(node.data);
default:
assert(false);
}

View File

@ -95,12 +95,12 @@ FUZZ_TARGET(partially_downloaded_block, .init = initialize_pdb)
for (size_t i = 0; i < cmpctblock.BlockTxCount(); i++) {
// If init_status == READ_STATUS_OK then a available transaction in the
// compact block (i.e. IsTxAvailable(i) == true) implies that we marked
// that transaction as available above (i.e. available.count(i) > 0).
// that transaction as available above (i.e. available.contains(i)).
// The reverse is not true, due to possible compact block short id
// collisions (i.e. available.count(i) > 0 does not imply
// collisions (i.e. available.contains(i) does not imply
// IsTxAvailable(i) == true).
if (init_status == READ_STATUS_OK) {
assert(!pdb.IsTxAvailable(i) || available.count(i) > 0);
assert(!pdb.IsTxAvailable(i) || available.contains(i));
}
bool skip{fuzzed_data_provider.ConsumeBool()};

View File

@ -339,7 +339,7 @@ FUZZ_TARGET(txorphan_protected, .init = initialize_orphanage)
}
},
[&] { // EraseTx
if (protected_wtxids.count(tx->GetWitnessHash())) {
if (protected_wtxids.contains(tx->GetWitnessHash())) {
protected_wtxids.erase(wtxid);
}
orphanage->EraseTx(wtxid);
@ -616,7 +616,7 @@ FUZZ_TARGET(txorphanage_sim)
real->EraseForBlock(block);
std::erase_if(sim_announcements, [&](auto& ann) {
for (auto& txin : txn[ann.tx]->vin) {
if (spent.count(txin.prevout)) return true;
if (spent.contains(txin.prevout)) return true;
}
return false;
});

View File

@ -103,7 +103,7 @@ BOOST_FIXTURE_TEST_CASE(miniminer_negative, TestChain100Setup)
mini_miner_no_target.BuildMockTemplate(std::nullopt);
const auto template_txids{mini_miner_no_target.GetMockTemplateTxids()};
BOOST_CHECK_EQUAL(template_txids.size(), 1);
BOOST_CHECK(template_txids.count(tx_mod_negative->GetHash()) > 0);
BOOST_CHECK(template_txids.contains(tx_mod_negative->GetHash()));
}
BOOST_FIXTURE_TEST_CASE(miniminer_1p1c, TestChain100Setup)

View File

@ -207,17 +207,17 @@ struct Satisfier : public KeyConverter {
//! Implement simplified CLTV logic: stack value must exactly match an entry in `supported`.
bool CheckAfter(uint32_t value) const {
return supported.count(Challenge(ChallengeType::AFTER, value));
return supported.contains(Challenge(ChallengeType::AFTER, value));
}
//! Implement simplified CSV logic: stack value must exactly match an entry in `supported`.
bool CheckOlder(uint32_t value) const {
return supported.count(Challenge(ChallengeType::OLDER, value));
return supported.contains(Challenge(ChallengeType::OLDER, value));
}
//! Produce a signature for the given key.
miniscript::Availability Sign(const CPubKey& key, std::vector<unsigned char>& sig) const {
if (supported.count(Challenge(ChallengeType::PK, ChallengeNumber(key)))) {
if (supported.contains(Challenge(ChallengeType::PK, ChallengeNumber(key)))) {
if (!miniscript::IsTapscript(m_script_ctx)) {
auto it = g_testdata->signatures.find(key);
if (it == g_testdata->signatures.end()) return miniscript::Availability::NO;
@ -234,7 +234,7 @@ struct Satisfier : public KeyConverter {
//! Helper function for the various hash based satisfactions.
miniscript::Availability SatHash(const std::vector<unsigned char>& hash, std::vector<unsigned char>& preimage, ChallengeType chtype) const {
if (!supported.count(Challenge(chtype, ChallengeNumber(hash)))) return miniscript::Availability::NO;
if (!supported.contains(Challenge(chtype, ChallengeNumber(hash)))) return miniscript::Availability::NO;
const auto& m =
chtype == ChallengeType::SHA256 ? g_testdata->sha256_preimages :
chtype == ChallengeType::HASH256 ? g_testdata->hash256_preimages :

View File

@ -40,12 +40,12 @@ bool IsProtected(int num_peers,
size_t unprotected_count{0};
for (const NodeEvictionCandidate& candidate : candidates) {
if (protected_peer_ids.count(candidate.id)) {
if (protected_peer_ids.contains(candidate.id)) {
// this peer should have been removed from the eviction candidates
BOOST_TEST_MESSAGE(strprintf("expected candidate to be protected: %d", candidate.id));
return false;
}
if (unprotected_peer_ids.count(candidate.id)) {
if (unprotected_peer_ids.contains(candidate.id)) {
// this peer remains in the eviction candidates, as expected
++unprotected_count;
}
@ -577,7 +577,7 @@ bool IsEvicted(std::vector<NodeEvictionCandidate> candidates, const std::unorder
if (!evicted_node_id) {
return false;
}
return node_ids.count(*evicted_node_id);
return node_ids.contains(*evicted_node_id);
}
// Create number_of_nodes random nodes, apply setup function candidate_setup_fn,

View File

@ -892,7 +892,7 @@ BOOST_AUTO_TEST_CASE(script_build)
#ifdef UPDATE_JSON_TESTS
strGen += str + ",\n";
#else
if (tests_set.count(str) == 0) {
if (!tests_set.contains(str)) {
BOOST_CHECK_MESSAGE(false, "Missing auto script_valid test: " + test.GetComment());
}
#endif

View File

@ -60,7 +60,7 @@ std::optional<std::string> CheckPackageMempoolAcceptResult(const Package& txns,
}
for (const auto& tx : txns) {
const auto& wtxid = tx->GetWitnessHash();
if (result.m_tx_results.count(wtxid) == 0) {
if (!result.m_tx_results.contains(wtxid)) {
return strprintf("result not found for tx %s", wtxid.ToString());
}

View File

@ -58,7 +58,7 @@ BOOST_AUTO_TEST_CASE(util_threadnames_test_rename_threaded)
// Names "test_thread.[n]" should exist for n = [0, 99]
for (int i = 0; i < 100; ++i) {
BOOST_CHECK(names.find(TEST_THREAD_NAME_BASE + ToString(i)) != names.end());
BOOST_CHECK(names.contains(TEST_THREAD_NAME_BASE + ToString(i)));
}
}

View File

@ -586,17 +586,17 @@ void TorController::protocolinfo_cb(TorControlConnection& _conn, const TorContro
*/
std::string torpassword = gArgs.GetArg("-torpassword", "");
if (!torpassword.empty()) {
if (methods.count("HASHEDPASSWORD")) {
if (methods.contains("HASHEDPASSWORD")) {
LogDebug(BCLog::TOR, "Using HASHEDPASSWORD authentication\n");
ReplaceAll(torpassword, "\"", "\\\"");
_conn.Command("AUTHENTICATE \"" + torpassword + "\"", std::bind(&TorController::auth_cb, this, std::placeholders::_1, std::placeholders::_2));
} else {
LogWarning("tor: Password provided with -torpassword, but HASHEDPASSWORD authentication is not available");
}
} else if (methods.count("NULL")) {
} else if (methods.contains("NULL")) {
LogDebug(BCLog::TOR, "Using NULL authentication\n");
_conn.Command("AUTHENTICATE", std::bind(&TorController::auth_cb, this, std::placeholders::_1, std::placeholders::_2));
} else if (methods.count("SAFECOOKIE")) {
} else if (methods.contains("SAFECOOKIE")) {
// Cookie: hexdump -e '32/1 "%02x""\n"' ~/.tor/control_auth_cookie
LogDebug(BCLog::TOR, "Using SAFECOOKIE authentication, reading cookie authentication from %s\n", cookiefile);
std::pair<bool,std::string> status_cookie = ReadBinaryFile(fs::PathFromString(cookiefile), TOR_COOKIE_SIZE);
@ -613,7 +613,7 @@ void TorController::protocolinfo_cb(TorControlConnection& _conn, const TorContro
LogWarning("tor: Authentication cookie %s could not be opened (check permissions)", cookiefile);
}
}
} else if (methods.count("HASHEDPASSWORD")) {
} else if (methods.contains("HASHEDPASSWORD")) {
LogWarning("tor: The only supported authentication mechanism left is password, but no password provided with -torpassword");
} else {
LogWarning("tor: No supported authentication method");

View File

@ -2917,7 +2917,7 @@ void TxGraphImpl::SanityCheck() const
// Check the sequence number.
assert(cluster.m_sequence < m_next_sequence_counter);
assert(sequences.count(cluster.m_sequence) == 0);
assert(!sequences.contains(cluster.m_sequence));
sequences.insert(cluster.m_sequence);
// Remember we saw this Cluster (only if it is non-empty; empty Clusters aren't
// expected to be referenced by the Entry vector).

View File

@ -988,7 +988,7 @@ util::Result<std::pair<std::vector<FeeFrac>, std::vector<FeeFrac>>> CTxMemPool::
CTxMemPool::ChangeSet::TxHandle CTxMemPool::ChangeSet::StageAddition(const CTransactionRef& tx, const CAmount fee, int64_t time, unsigned int entry_height, uint64_t entry_sequence, bool spends_coinbase, int64_t sigops_cost, LockPoints lp)
{
LOCK(m_pool->cs);
Assume(m_to_add.find(tx->GetHash()) == m_to_add.end());
Assume(!m_to_add.contains(tx->GetHash()));
Assume(!m_dependencies_processed);
// We need to process dependencies after adding a new transaction.

View File

@ -566,7 +566,7 @@ public:
bool IsUnbroadcastTx(const Txid& txid) const EXCLUSIVE_LOCKS_REQUIRED(cs)
{
AssertLockHeld(cs);
return m_unbroadcast_txids.count(txid) != 0;
return m_unbroadcast_txids.contains(txid);
}
/** Guards this internal counter for external reporting */

View File

@ -580,7 +580,7 @@ public:
// Bail out if we already have a CANDIDATE_BEST announcement for this (txhash, peer) combination. The case
// where there is a non-CANDIDATE_BEST announcement already will be caught by the uniqueness property of the
// ByPeer index when we try to emplace the new object below.
if (m_index.get<ByPeer>().count(ByPeerView{peer, true, gtxid.ToUint256()})) return;
if (m_index.get<ByPeer>().contains(ByPeerView{peer, true, gtxid.ToUint256()})) return;
// Try creating the announcement with CANDIDATE_DELAYED state (which will fail due to the uniqueness
// of the ByPeer index if a non-CANDIDATE_BEST announcement already exists with the same txhash and peer).

View File

@ -50,7 +50,7 @@ LockResult LockDirectory(const fs::path& directory, const fs::path& lockfile_nam
fs::path pathLockFile = directory / lockfile_name;
// If a lock for this directory already exists in the map, don't try to re-lock it
if (dir_locks.count(fs::PathToString(pathLockFile))) {
if (dir_locks.contains(fs::PathToString(pathLockFile))) {
return LockResult::Success;
}

View File

@ -1748,9 +1748,9 @@ PackageMempoolAcceptResult MemPoolAccept::AcceptPackage(const Package& package,
for (const auto& tx : package) {
const auto& wtxid = tx->GetWitnessHash();
if (multi_submission_result.m_tx_results.count(wtxid) > 0) {
if (multi_submission_result.m_tx_results.contains(wtxid)) {
// We shouldn't have re-submitted if the tx result was already in results_final.
Assume(results_final.count(wtxid) == 0);
Assume(!results_final.contains(wtxid));
// If it was submitted, check to see if the tx is still in the mempool. It could have
// been evicted due to LimitMempoolSize() above.
const auto& txresult = multi_submission_result.m_tx_results.at(wtxid);
@ -1766,7 +1766,7 @@ PackageMempoolAcceptResult MemPoolAccept::AcceptPackage(const Package& package,
// Already-in-mempool transaction. Check to see if it's still there, as it could have
// been evicted when LimitMempoolSize() was called.
Assume(it->second.m_result_type != MempoolAcceptResult::ResultType::INVALID);
Assume(individual_results_nonfinal.count(wtxid) == 0);
Assume(!individual_results_nonfinal.contains(wtxid));
// Query by txid to include the same-txid-different-witness ones.
if (!m_pool.exists(tx->GetHash())) {
package_state_final.Invalid(PackageValidationResult::PCKG_TX, "transaction failed");
@ -4874,14 +4874,14 @@ bool Chainstate::ReplayBlocks()
const CBlockIndex* pindexNew; // New tip during the interrupted flush.
const CBlockIndex* pindexFork = nullptr; // Latest block common to both the old and the new tip.
if (m_blockman.m_block_index.count(hashHeads[0]) == 0) {
if (!m_blockman.m_block_index.contains(hashHeads[0])) {
LogError("ReplayBlocks(): reorganization to unknown block requested\n");
return false;
}
pindexNew = &(m_blockman.m_block_index[hashHeads[0]]);
if (!hashHeads[1].IsNull()) { // The old tip is allowed to be 0, indicating it's the first flush.
if (m_blockman.m_block_index.count(hashHeads[1]) == 0) {
if (!m_blockman.m_block_index.contains(hashHeads[1])) {
LogError("ReplayBlocks(): reorganization from unknown block requested\n");
return false;
}
@ -5013,7 +5013,7 @@ bool Chainstate::LoadGenesisBlock()
// m_blockman.m_block_index. Note that we can't use m_chain here, since it is
// set based on the coins db, not the block index db, which is the only
// thing loaded at this point.
if (m_blockman.m_block_index.count(params.GenesisBlock().GetHash()))
if (m_blockman.m_block_index.contains(params.GenesisBlock().GetHash()))
return true;
try {

View File

@ -48,7 +48,7 @@ ThresholdState AbstractThresholdConditionChecker::GetStateFor(const CBlockIndex*
// Walk backwards in steps of nPeriod to find a pindexPrev whose information is known
std::vector<const CBlockIndex*> vToCompute;
while (cache.count(pindexPrev) == 0) {
while (!cache.contains(pindexPrev)) {
if (pindexPrev == nullptr) {
// The genesis block is by definition defined.
cache[pindexPrev] = ThresholdState::DEFINED;
@ -64,7 +64,7 @@ ThresholdState AbstractThresholdConditionChecker::GetStateFor(const CBlockIndex*
}
// At this point, cache[pindexPrev] is known
assert(cache.count(pindexPrev));
assert(cache.contains(pindexPrev));
ThresholdState state = cache[pindexPrev];
// Now walk forward and compute the state of descendants of pindexPrev

View File

@ -19,7 +19,7 @@ bool CCoinControl::HasSelected() const
bool CCoinControl::IsSelected(const COutPoint& outpoint) const
{
return m_selected.count(outpoint) > 0;
return m_selected.contains(outpoint);
}
bool CCoinControl::IsExternalSelected(const COutPoint& outpoint) const

View File

@ -39,7 +39,7 @@ static feebumper::Result PreconditionChecks(const CWallet& wallet, const CWallet
return feebumper::Result::WALLET_ERROR;
}
if (wtx.mapValue.count("replaced_by_txid")) {
if (wtx.mapValue.contains("replaced_by_txid")) {
errors.push_back(Untranslated(strprintf("Cannot bump transaction %s which was already bumped by transaction %s", wtx.GetHash().ToString(), wtx.mapValue.at("replaced_by_txid"))));
return feebumper::Result::WALLET_ERROR;
}

View File

@ -747,7 +747,7 @@ bool BerkeleyROBatch::ReadKey(DataStream&& key, DataStream& value)
bool BerkeleyROBatch::HasKey(DataStream&& key)
{
SerializeData key_data{key.begin(), key.end()};
return m_database.m_records.count(key_data) > 0;
return m_database.m_records.contains(key_data);
}
BerkeleyROCursor::BerkeleyROCursor(const BerkeleyRODatabase& database, std::span<const std::byte> prefix)

View File

@ -227,7 +227,7 @@ bool CachedTxIsTrusted(const CWallet& wallet, const CWalletTx& wtx, std::set<Txi
// Check that this specific input being spent is trusted
if (!wallet.IsMine(parentOut)) return false;
// If we've already trusted this parent, continue
if (trusted_parents.count(parent->GetHash())) continue;
if (trusted_parents.contains(parent->GetHash())) continue;
// Recurse to check that the parent is also trusted
if (!CachedTxIsTrusted(wallet, *parent, trusted_parents)) return false;
trusted_parents.insert(parent->GetHash());

View File

@ -526,7 +526,7 @@ RPCHelpMan listdescriptors()
wallet_descriptors.push_back({
descriptor,
wallet_descriptor.creation_time,
active_spk_mans.count(desc_spk_man) != 0,
active_spk_mans.contains(desc_spk_man),
wallet->IsInternalScriptPubKeyMan(desc_spk_man),
is_range ? std::optional(std::make_pair(wallet_descriptor.range_start, wallet_descriptor.range_end)) : std::nullopt,
wallet_descriptor.next_index

View File

@ -67,7 +67,7 @@ static CAmount GetReceived(const CWallet& wallet, const UniValue& params, bool b
}
for (const CTxOut& txout : wtx.tx->vout) {
if (output_scripts.count(txout.scriptPubKey) > 0) {
if (output_scripts.contains(txout.scriptPubKey)) {
amount += txout.nValue;
}
}
@ -612,7 +612,7 @@ RPCHelpMan listunspent()
bool fValidAddress = ExtractDestination(scriptPubKey, address);
bool reused = avoid_reuse && pwallet->IsSpentKey(scriptPubKey);
if (destinations.size() && (!fValidAddress || !destinations.count(address)))
if (destinations.size() && (!fValidAddress || !destinations.contains(address)))
continue;
UniValue entry(UniValue::VOBJ);

View File

@ -1588,7 +1588,7 @@ RPCHelpMan sendall()
CTxDestination dest;
ExtractDestination(out.scriptPubKey, dest);
std::string addr{EncodeDestination(dest)};
if (addresses_without_amount.count(addr) > 0) {
if (addresses_without_amount.contains(addr)) {
out.nValue = per_output_without_amount;
if (!gave_remaining_to_first) {
out.nValue += remainder % addresses_without_amount.size();

View File

@ -802,7 +802,7 @@ RPCHelpMan abandontransaction()
Txid hash{Txid::FromUint256(ParseHashV(request.params[0], "txid"))};
if (!pwallet->mapWallet.count(hash)) {
if (!pwallet->mapWallet.contains(hash)) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid or non-wallet transaction id");
}
if (!pwallet->AbandonTransaction(hash)) {

View File

@ -311,7 +311,7 @@ static RPCHelpMan setwalletflag()
std::string flag_str = request.params[0].get_str();
bool value = request.params[1].isNull() || request.params[1].get_bool();
if (!STRING_TO_WALLET_FLAG.count(flag_str)) {
if (!STRING_TO_WALLET_FLAG.contains(flag_str)) {
throw JSONRPCError(RPC_INVALID_PARAMETER, strprintf("Unknown wallet flag: %s", flag_str));
}
@ -336,7 +336,7 @@ static RPCHelpMan setwalletflag()
pwallet->UnsetWalletFlag(flag);
}
if (flag && value && WALLET_FLAG_CAVEATS.count(flag)) {
if (flag && value && WALLET_FLAG_CAVEATS.contains(flag)) {
res.pushKV("warnings", WALLET_FLAG_CAVEATS.at(flag));
}
@ -548,10 +548,10 @@ RPCHelpMan simulaterawtransaction()
// broadcast, we will lose everything in these
for (const auto& txin : mtx.vin) {
const auto& outpoint = txin.prevout;
if (spent.count(outpoint)) {
if (spent.contains(outpoint)) {
throw JSONRPCError(RPC_INVALID_PARAMETER, "Transaction(s) are spending the same output more than once");
}
if (new_utxos.count(outpoint)) {
if (new_utxos.contains(outpoint)) {
changes -= new_utxos.at(outpoint);
new_utxos.erase(outpoint);
} else {

View File

@ -339,7 +339,7 @@ bool LegacyDataSPKM::AddCryptedKeyInner(const CPubKey &vchPubKey, const std::vec
bool LegacyDataSPKM::HaveWatchOnly(const CScript &dest) const
{
LOCK(cs_KeyStore);
return setWatchOnly.count(dest) > 0;
return setWatchOnly.contains(dest);
}
bool LegacyDataSPKM::LoadWatchOnly(const CScript &dest)
@ -385,7 +385,7 @@ bool LegacyDataSPKM::HaveKey(const CKeyID &address) const
if (!m_storage.HasEncryptionKeys()) {
return FillableSigningProvider::HaveKey(address);
}
return mapCryptedKeys.count(address) > 0;
return mapCryptedKeys.contains(address);
}
bool LegacyDataSPKM::GetKey(const CKeyID &address, CKey& keyOut) const
@ -560,7 +560,7 @@ std::optional<MigrationData> LegacyDataSPKM::MigrateToDescriptor()
keyid_it++;
continue;
}
if (!meta.hd_seed_id.IsNull() && (m_hd_chain.seed_id == meta.hd_seed_id || m_inactive_hd_chains.count(meta.hd_seed_id) > 0)) {
if (!meta.hd_seed_id.IsNull() && (m_hd_chain.seed_id == meta.hd_seed_id || m_inactive_hd_chains.contains(meta.hd_seed_id))) {
keyid_it = keyids.erase(keyid_it);
continue;
}
@ -1039,7 +1039,7 @@ bool DescriptorScriptPubKeyMan::TopUpWithDB(WalletBatch& batch, unsigned int siz
}
for (const auto& pk_pair : out_keys.pubkeys) {
const CPubKey& pubkey = pk_pair.second;
if (m_map_pubkeys.count(pubkey) != 0) {
if (m_map_pubkeys.contains(pubkey)) {
// We don't need to give an error here.
// It doesn't matter which of many valid indexes the pubkey has, we just need an index where we can derive it and its private key
continue;
@ -1107,8 +1107,8 @@ bool DescriptorScriptPubKeyMan::AddDescriptorKeyWithDB(WalletBatch& batch, const
assert(!m_storage.IsWalletFlagSet(WALLET_FLAG_DISABLE_PRIVATE_KEYS));
// Check if provided key already exists
if (m_map_keys.find(pubkey.GetID()) != m_map_keys.end() ||
m_map_crypted_keys.find(pubkey.GetID()) != m_map_crypted_keys.end()) {
if (m_map_keys.contains(pubkey.GetID()) ||
m_map_crypted_keys.contains(pubkey.GetID())) {
return true;
}
@ -1441,14 +1441,14 @@ void DescriptorScriptPubKeyMan::SetCache(const DescriptorCache& cache)
// Add all of the scriptPubKeys to the scriptPubKey set
new_spks.insert(scripts_temp.begin(), scripts_temp.end());
for (const CScript& script : scripts_temp) {
if (m_map_script_pub_keys.count(script) != 0) {
if (m_map_script_pub_keys.contains(script)) {
throw std::runtime_error(strprintf("Error: Already loaded script at index %d as being at index %d", i, m_map_script_pub_keys[script]));
}
m_map_script_pub_keys[script] = i;
}
for (const auto& pk_pair : out_keys.pubkeys) {
const CPubKey& pubkey = pk_pair.second;
if (m_map_pubkeys.count(pubkey) != 0) {
if (m_map_pubkeys.contains(pubkey)) {
// We don't need to give an error here.
// It doesn't matter which of many valid indexes the pubkey has, we just need an index where we can derive it and its private key
continue;

View File

@ -219,7 +219,7 @@ void CoinsResult::Erase(const std::unordered_set<COutPoint, SaltedOutpointHasher
for (auto& [type, vec] : coins) {
auto remove_it = std::remove_if(vec.begin(), vec.end(), [&](const COutput& coin) {
// remove it if it's on the set
if (coins_to_remove.count(coin.outpoint) == 0) return false;
if (!coins_to_remove.contains(coin.outpoint)) return false;
// update cached amounts
total_amount -= coin.txout.nValue;
@ -382,7 +382,7 @@ CoinsResult AvailableCoins(const CWallet& wallet,
// be a 1-block reorg away from the chain where transactions A and C
// were accepted to another chain where B, B', and C were all
// accepted.
if (nDepth == 0 && wtx.mapValue.count("replaces_txid")) {
if (nDepth == 0 && wtx.mapValue.contains("replaces_txid")) {
safeTx = false;
}
@ -394,7 +394,7 @@ CoinsResult AvailableCoins(const CWallet& wallet,
// intending to replace A', but potentially resulting in a scenario
// where A, A', and D could all be accepted (instead of just B and
// D, or just A and A' like the user would want).
if (nDepth == 0 && wtx.mapValue.count("replaced_by_txid")) {
if (nDepth == 0 && wtx.mapValue.contains("replaced_by_txid")) {
safeTx = false;
}

View File

@ -163,7 +163,7 @@ bool MockableBatch::HasKey(DataStream&& key)
return false;
}
SerializeData key_data{key.begin(), key.end()};
return m_records.count(key_data) > 0;
return m_records.contains(key_data);
}
bool MockableBatch::ErasePrefix(std::span<const std::byte> prefix)

View File

@ -631,8 +631,8 @@ BOOST_FIXTURE_TEST_CASE(CreateWallet, TestChain100Setup)
BOOST_CHECK_EQUAL(addtx_count, 3);
{
LOCK(wallet->cs_wallet);
BOOST_CHECK_EQUAL(wallet->mapWallet.count(block_tx.GetHash()), 1U);
BOOST_CHECK_EQUAL(wallet->mapWallet.count(mempool_tx.GetHash()), 1U);
BOOST_CHECK(wallet->mapWallet.contains(block_tx.GetHash()));
BOOST_CHECK(wallet->mapWallet.contains(mempool_tx.GetHash()));
}
@ -670,8 +670,8 @@ BOOST_FIXTURE_TEST_CASE(CreateWallet, TestChain100Setup)
BOOST_CHECK_EQUAL(addtx_count, 2 + 2);
{
LOCK(wallet->cs_wallet);
BOOST_CHECK_EQUAL(wallet->mapWallet.count(block_tx.GetHash()), 1U);
BOOST_CHECK_EQUAL(wallet->mapWallet.count(mempool_tx.GetHash()), 1U);
BOOST_CHECK(wallet->mapWallet.contains(block_tx.GetHash()));
BOOST_CHECK(wallet->mapWallet.contains(mempool_tx.GetHash()));
}
@ -710,13 +710,13 @@ BOOST_FIXTURE_TEST_CASE(RemoveTxs, TestChain100Setup)
LOCK(wallet->cs_wallet);
BOOST_CHECK(wallet->HasWalletSpend(prev_tx));
BOOST_CHECK_EQUAL(wallet->mapWallet.count(block_hash), 1u);
BOOST_CHECK(wallet->mapWallet.contains(block_hash));
std::vector<Txid> vHashIn{ block_hash };
BOOST_CHECK(wallet->RemoveTxs(vHashIn));
BOOST_CHECK(!wallet->HasWalletSpend(prev_tx));
BOOST_CHECK_EQUAL(wallet->mapWallet.count(block_hash), 0u);
BOOST_CHECK(!wallet->mapWallet.contains(block_hash));
}
TestUnloadWallet(std::move(wallet));

View File

@ -266,7 +266,7 @@ void WaitForDeleteWallet(std::shared_ptr<CWallet>&& wallet)
wallet.reset();
{
WAIT_LOCK(g_wallet_release_mutex, lock);
while (g_unloading_wallet_set.count(name) == 1) {
while (g_unloading_wallet_set.contains(name)) {
g_wallet_release_cv.wait(lock);
}
}
@ -925,7 +925,7 @@ bool CWallet::MarkReplaced(const Txid& originalHash, const Txid& newHash)
CWalletTx& wtx = (*mi).second;
// Ensure for now that we're not overwriting data
assert(wtx.mapValue.count("replaced_by_txid") == 0);
assert(!wtx.mapValue.contains("replaced_by_txid"));
wtx.mapValue["replaced_by_txid"] = newHash.ToString();
@ -1161,7 +1161,7 @@ bool CWallet::AddToWalletIfInvolvingMe(const CTransactionRef& ptx, const SyncTxS
}
}
bool fExisted = mapWallet.count(tx.GetHash()) != 0;
bool fExisted = mapWallet.contains(tx.GetHash());
if (fExisted && !fUpdate) return false;
if (fExisted || IsMine(tx) || IsFromMe(tx))
{
@ -1337,7 +1337,7 @@ void CWallet::RecursiveUpdateTxState(WalletBatch* batch, const Txid& tx_hash, co
for (unsigned int i = 0; i < wtx.tx->vout.size(); ++i) {
std::pair<TxSpends::const_iterator, TxSpends::const_iterator> range = mapTxSpends.equal_range(COutPoint(now, i));
for (TxSpends::const_iterator iter = range.first; iter != range.second; ++iter) {
if (!done.count(iter->second)) {
if (!done.contains(iter->second)) {
todo.insert(iter->second);
}
}
@ -1526,7 +1526,7 @@ void CWallet::blockDisconnected(const interfaces::BlockInfo& block)
for (const CTxIn& tx_in : ptx->vin) {
// No other wallet transactions conflicted with this transaction
if (mapTxSpends.count(tx_in.prevout) < 1) continue;
if (!mapTxSpends.contains(tx_in.prevout)) continue;
std::pair<TxSpends::const_iterator, TxSpends::const_iterator> range = mapTxSpends.equal_range(tx_in.prevout);
@ -2535,7 +2535,7 @@ void CWallet::MarkDestinationsDirty(const std::set<CTxDestination>& destinations
if (wtx.m_is_cache_empty) continue;
for (unsigned int i = 0; i < wtx.tx->vout.size(); i++) {
CTxDestination dst;
if (ExtractDestination(wtx.tx->vout[i].scriptPubKey, dst) && destinations.count(dst)) {
if (ExtractDestination(wtx.tx->vout[i].scriptPubKey, dst) && destinations.contains(dst)) {
wtx.MarkDirty();
break;
}
@ -2679,7 +2679,7 @@ bool CWallet::UnlockAllCoins()
bool CWallet::IsLockedCoin(const COutPoint& output) const
{
AssertLockHeld(cs_wallet);
return m_locked_coins.count(output) > 0;
return m_locked_coins.contains(output);
}
void CWallet::ListLockedCoins(std::vector<COutPoint>& vOutpts) const
@ -3398,7 +3398,7 @@ std::set<ScriptPubKeyMan*> CWallet::GetScriptPubKeyMans(const CScript& script) c
ScriptPubKeyMan* CWallet::GetScriptPubKeyMan(const uint256& id) const
{
if (m_spk_managers.count(id) > 0) {
if (m_spk_managers.contains(id)) {
return m_spk_managers.at(id).get();
}
return nullptr;
@ -3683,7 +3683,7 @@ DescriptorScriptPubKeyMan* CWallet::GetDescriptorScriptPubKeyMan(const WalletDes
std::optional<bool> CWallet::IsInternalScriptPubKeyMan(ScriptPubKeyMan* spk_man) const
{
// only active ScriptPubKeyMan can be internal
if (!GetActiveScriptPubKeyMans().count(spk_man)) {
if (!GetActiveScriptPubKeyMans().contains(spk_man)) {
return std::nullopt;
}
@ -3878,7 +3878,7 @@ util::Result<void> CWallet::ApplyMigrationData(WalletBatch& local_wallet_batch,
if (!data.solvable_descs.empty()) Assume(!data.solvable_wallet->m_cached_spks.empty());
for (auto& desc_spkm : data.desc_spkms) {
if (m_spk_managers.count(desc_spkm->GetID()) > 0) {
if (m_spk_managers.contains(desc_spkm->GetID())) {
return util::Error{_("Error: Duplicate descriptors created during migration. Your wallet may be corrupted.")};
}
uint256 id = desc_spkm->GetID();
@ -4026,7 +4026,7 @@ util::Result<void> CWallet::ApplyMigrationData(WalletBatch& local_wallet_batch,
if (require_transfer && !copied) {
// Skip invalid/non-watched scripts that will not be migrated
if (not_migrated_dests.count(dest) > 0) {
if (not_migrated_dests.contains(dest)) {
dests_to_delete.push_back(dest);
continue;
}

View File

@ -409,7 +409,7 @@ bool LoadEncryptionKey(CWallet* pwallet, DataStream& ssKey, DataStream& ssValue,
ssKey >> nID;
CMasterKey kMasterKey;
ssValue >> kMasterKey;
if(pwallet->mapMasterKeys.count(nID) != 0)
if(pwallet->mapMasterKeys.contains(nID))
{
strErr = strprintf("Error reading wallet database: duplicate CMasterKey id %u", nID);
return false;