Skip to content

Commit

Permalink
fix(rpc): pass blockhash into TxToJSON so that getspecialtxes cou…
Browse files Browse the repository at this point in the history
…ld show correct `instantlock`/`chainlock` values (dashpay#5774)

## Issue being fixed or feature implemented
`instantlock` and `chainlock` are broken in `getspecialtxes`

kudos to @thephez for finding the issue

## What was done?
pass the hash and also rename the variable to self-describing

## How Has This Been Tested?
run `getspecialtxes` on a node with and without the patch

## Breaking Changes
`instantlock` and `chainlock` will show actual values and not just
`false` all the time now (not sure if that qualifies for "breaking"
though)

## Checklist:
- [x] I have performed a self-review of my own code
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have added or updated relevant unit/integration/functional/e2e
tests
- [ ] I have made corresponding changes to the documentation
- [x] I have assigned this pull request to a milestone _(for repository
code-owners and collaborators only)_
  • Loading branch information
UdjinM6 authored and ogabrielides committed Dec 22, 2023
1 parent 578f46b commit 8d3b00b
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 4 deletions.
4 changes: 4 additions & 0 deletions doc/release-notes-5774.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
RPC changes
-----------

In `getspecialtxes` `instantlock` and `chainlock` fields were always `false`. They should show actual values now.
6 changes: 3 additions & 3 deletions src/rpc/blockchain.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2462,7 +2462,7 @@ static UniValue getspecialtxes(const JSONRPCRequest& request)
CTxMemPool& mempool = EnsureMemPool(node);
LLMQContext& llmq_ctx = EnsureLLMQContext(node);

uint256 hash(ParseHashV(request.params[0], "blockhash"));
uint256 blockhash(ParseHashV(request.params[0], "blockhash"));

int nTxType = -1;
if (!request.params[1].isNull()) {
Expand Down Expand Up @@ -2491,7 +2491,7 @@ static UniValue getspecialtxes(const JSONRPCRequest& request)
}
}

const CBlockIndex* pblockindex = chainman.m_blockman.LookupBlockIndex(hash);
const CBlockIndex* pblockindex = chainman.m_blockman.LookupBlockIndex(blockhash);
if (!pblockindex) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Block not found");
}
Expand Down Expand Up @@ -2519,7 +2519,7 @@ static UniValue getspecialtxes(const JSONRPCRequest& request)
case 2 :
{
UniValue objTx(UniValue::VOBJ);
TxToJSON(*tx, uint256(), mempool, chainman.ActiveChainstate(), *llmq_ctx.clhandler, *llmq_ctx.isman, objTx);
TxToJSON(*tx, blockhash, mempool, chainman.ActiveChainstate(), *llmq_ctx.clhandler, *llmq_ctx.isman, objTx);
result.push_back(objTx);
break;
}
Expand Down
13 changes: 12 additions & 1 deletion test/functional/feature_llmq_chainlocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,13 @@ def run_test(self):
self.test_coinbase_best_cl(self.nodes[0], expected_cl_in_cb=False)

# v20 is active, no quorums, no CLs - null CL in CbTx
self.nodes[0].generate(1)
nocl_block_hash = self.nodes[0].generate(1)[0]
self.test_coinbase_best_cl(self.nodes[0], expected_cl_in_cb=True, expected_null_cl=True)
cbtx = self.nodes[0].getspecialtxes(nocl_block_hash, 5, 1, 0, 2)[0]
assert_equal(cbtx["instantlock"], False)
assert_equal(cbtx["instantlock_internal"], False)
assert_equal(cbtx["chainlock"], False)


self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
Expand All @@ -55,6 +60,12 @@ def run_test(self):
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
self.test_coinbase_best_cl(self.nodes[0])

# ChainLock locks all the blocks below it so nocl_block_hash should be locked too
cbtx = self.nodes[0].getspecialtxes(nocl_block_hash, 5, 1, 0, 2)[0]
assert_equal(cbtx["instantlock"], True)
assert_equal(cbtx["instantlock_internal"], False)
assert_equal(cbtx["chainlock"], True)

self.log.info("Mine many blocks, wait for chainlock")
self.nodes[0].generate(20)
# We need more time here due to 20 blocks being generated at once
Expand Down

0 comments on commit 8d3b00b

Please sign in to comment.