Merge branch 'beta' into mergemaster
# Conflicts: # src/main.cpp
This commit is contained in:
@@ -11,10 +11,15 @@ export BITCOIND=${REAL_BITCOIND}
|
||||
#Run the tests
|
||||
|
||||
testScripts=(
|
||||
'paymentdisclosure.py'
|
||||
'prioritisetransaction.py'
|
||||
'wallet_treestate.py'
|
||||
'wallet_anchorfork.py'
|
||||
'wallet_protectcoinbase.py'
|
||||
'wallet_shieldcoinbase.py'
|
||||
'wallet_mergetoaddress.py'
|
||||
'wallet.py'
|
||||
'wallet_overwintertx.py'
|
||||
'wallet_nullifiers.py'
|
||||
'wallet_1941.py'
|
||||
'listtransactions.py'
|
||||
@@ -25,7 +30,10 @@ testScripts=(
|
||||
'rawtransactions.py'
|
||||
'rest.py'
|
||||
'mempool_spendcoinbase.py'
|
||||
'mempool_coinbase_spends.py'
|
||||
'mempool_reorg.py'
|
||||
'mempool_tx_input_limit.py'
|
||||
'mempool_nu_activation.py'
|
||||
'mempool_tx_expiry.py'
|
||||
'httpbasics.py'
|
||||
'zapwallettxes.py'
|
||||
'proxy_test.py'
|
||||
@@ -33,17 +41,21 @@ testScripts=(
|
||||
'fundrawtransaction.py'
|
||||
'signrawtransactions.py'
|
||||
'walletbackup.py'
|
||||
'key_import_export.py'
|
||||
'nodehandling.py'
|
||||
'reindex.py'
|
||||
'decodescript.py'
|
||||
'disablewallet.py'
|
||||
'zcjoinsplit.py'
|
||||
'zcjoinsplitdoublespend.py'
|
||||
'zkey_import_export.py'
|
||||
'reorg_limit.py'
|
||||
'getblocktemplate.py'
|
||||
'bip65-cltv-p2p.py'
|
||||
'bipdersig-p2p.py'
|
||||
'overwinter_peer_management.py'
|
||||
);
|
||||
testScriptsExt=(
|
||||
'bipdersig-p2p.py'
|
||||
'bipdersig.py'
|
||||
'getblocktemplate_longpoll.py'
|
||||
'getblocktemplate_proposals.py'
|
||||
'pruning.py'
|
||||
@@ -65,6 +77,10 @@ if [ "x$ENABLE_ZMQ" = "x1" ]; then
|
||||
testScripts+=('zmq_test.py')
|
||||
fi
|
||||
|
||||
if [ "x$ENABLE_PROTON" = "x1" ]; then
|
||||
testScripts+=('proton_test.py')
|
||||
fi
|
||||
|
||||
extArg="-extended"
|
||||
passOn=${@#$extArg}
|
||||
|
||||
@@ -78,7 +94,7 @@ function runTestScript
|
||||
|
||||
echo -e "=== Running testscript ${testName} ==="
|
||||
|
||||
if eval "$@" | sed 's/^/ /'
|
||||
if eval "$@"
|
||||
then
|
||||
successCount=$(expr $successCount + 1)
|
||||
echo "--- Success: ${testName} ---"
|
||||
|
||||
@@ -11,6 +11,7 @@ EXEEXT="@EXEEXT@"
|
||||
@BUILD_BITCOIN_UTILS_TRUE@ENABLE_UTILS=1
|
||||
@BUILD_BITCOIND_TRUE@ENABLE_BITCOIND=1
|
||||
@ENABLE_ZMQ_TRUE@ENABLE_ZMQ=1
|
||||
@ENABLE_PROTON_TRUE@ENABLE_PROTON=1
|
||||
|
||||
REAL_BITCOIND="$BUILDDIR/src/zcashd${EXEEXT}"
|
||||
REAL_BITCOINCLI="$BUILDDIR/src/zcash-cli${EXEEXT}"
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
Regression tests of RPC interface
|
||||
=================================
|
||||
|
||||
### [python-bitcoinrpc](https://github.com/jgarzik/python-bitcoinrpc)
|
||||
Git subtree of [https://github.com/jgarzik/python-bitcoinrpc](https://github.com/jgarzik/python-bitcoinrpc).
|
||||
Changes to python-bitcoinrpc should be made upstream, and then
|
||||
pulled here using git subtree.
|
||||
|
||||
### [test_framework/test_framework.py](test_framework/test_framework.py)
|
||||
Base class for new regression tests.
|
||||
Base class for RPC regression tests.
|
||||
|
||||
### [test_framework/util.py](test_framework/util.py)
|
||||
Generally useful functions.
|
||||
@@ -35,8 +30,8 @@ If you set the environment variable `PYTHON_DEBUG=1` you will get some debug out
|
||||
|
||||
A 200-block -regtest blockchain and wallets for four nodes
|
||||
is created the first time a regression test is run and
|
||||
is stored in the cache/ directory. Each node has 25 mature
|
||||
blocks (25*50=1250 BTC) in its wallet.
|
||||
is stored in the cache/ directory. Each node has the miner
|
||||
subsidy from 25 mature blocks (25*10=250 ZEC) in its wallet.
|
||||
|
||||
After the first run, the cache/ blockchain and wallets are
|
||||
copied into a temporary directory and used as the initial
|
||||
@@ -47,5 +42,5 @@ to recover with:
|
||||
|
||||
```bash
|
||||
rm -rf cache
|
||||
killall bitcoind
|
||||
killall zcashd
|
||||
```
|
||||
|
||||
@@ -5,35 +5,23 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import ComparisonTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import start_nodes
|
||||
from test_framework.mininode import CTransaction, NetworkThread
|
||||
from test_framework.blocktools import create_coinbase, create_block
|
||||
from test_framework.comptool import TestInstance, TestManager
|
||||
from test_framework.script import CScript, OP_1NEGATE, OP_NOP2, OP_DROP
|
||||
from binascii import hexlify, unhexlify
|
||||
from binascii import unhexlify
|
||||
import cStringIO
|
||||
import time
|
||||
|
||||
def cltv_invalidate(tx):
|
||||
'''Modify the signature in vin 0 of the tx to fail CLTV
|
||||
|
||||
Prepends -1 CLTV DROP in the scriptSig itself.
|
||||
'''
|
||||
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP2, OP_DROP] +
|
||||
list(CScript(tx.vin[0].scriptSig)))
|
||||
|
||||
'''
|
||||
This test is meant to exercise BIP65 (CHECKLOCKTIMEVERIFY)
|
||||
This test is meant to exercise BIP65 (CHECKLOCKTIMEVERIFY).
|
||||
Connect to a single node.
|
||||
Mine 2 (version 3) blocks (save the coinbases for later).
|
||||
Generate 98 more version 3 blocks, verify the node accepts.
|
||||
Mine 749 version 4 blocks, verify the node accepts.
|
||||
Check that the new CLTV rules are not enforced on the 750th version 4 block.
|
||||
Check that the new CLTV rules are enforced on the 751st version 4 block.
|
||||
Mine 199 new version blocks.
|
||||
Mine 1 old-version block.
|
||||
Mine 1 new version block.
|
||||
Mine 1 old version block, see that the node rejects.
|
||||
Mine a coinbase block, and then ...
|
||||
Mine 1 version 4 block.
|
||||
Check that the CLTV rules are enforced.
|
||||
|
||||
TODO: factor out common code from {bipdersig-p2p,bip65-cltv-p2p}.py.
|
||||
'''
|
||||
|
||||
class BIP65Test(ComparisonTestFramework):
|
||||
@@ -42,10 +30,10 @@ class BIP65Test(ComparisonTestFramework):
|
||||
self.num_nodes = 1
|
||||
|
||||
def setup_network(self):
|
||||
# Must set the blockversion for this test
|
||||
self.nodes = start_nodes(1, self.options.tmpdir,
|
||||
extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=3']],
|
||||
extra_args=[['-debug', '-whitelist=127.0.0.1']],
|
||||
binary=[self.options.testbinary])
|
||||
self.is_network_split = False
|
||||
|
||||
def run_test(self):
|
||||
test = TestManager(self, self.options.tmpdir)
|
||||
@@ -64,112 +52,45 @@ class BIP65Test(ComparisonTestFramework):
|
||||
tx.deserialize(f)
|
||||
return tx
|
||||
|
||||
def get_tests(self):
|
||||
def invalidate_transaction(self, tx):
|
||||
'''
|
||||
Modify the signature in vin 0 of the tx to fail CLTV
|
||||
|
||||
self.coinbase_blocks = self.nodes[0].generate(2)
|
||||
Prepends -1 CLTV DROP in the scriptSig itself.
|
||||
'''
|
||||
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP2, OP_DROP] +
|
||||
list(CScript(tx.vin[0].scriptSig)))
|
||||
|
||||
def get_tests(self):
|
||||
self.coinbase_blocks = self.nodes[0].generate(1)
|
||||
self.nodes[0].generate(100)
|
||||
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
|
||||
self.nodeaddress = self.nodes[0].getnewaddress()
|
||||
self.last_block_time = time.time()
|
||||
|
||||
''' 98 more version 3 blocks '''
|
||||
test_blocks = []
|
||||
for i in xrange(98):
|
||||
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
|
||||
block.nVersion = 3
|
||||
block.rehash()
|
||||
block.solve()
|
||||
test_blocks.append([block, True])
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance(test_blocks, sync_every_block=False)
|
||||
'''Check that the rules are enforced.'''
|
||||
for valid in (True, False):
|
||||
spendtx = self.create_transaction(self.nodes[0],
|
||||
self.coinbase_blocks[0],
|
||||
self.nodeaddress, 1.0)
|
||||
if not valid:
|
||||
self.invalidate_transaction(spendtx)
|
||||
spendtx.rehash()
|
||||
|
||||
''' Mine 749 version 4 blocks '''
|
||||
test_blocks = []
|
||||
for i in xrange(749):
|
||||
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
|
||||
gbt = self.nodes[0].getblocktemplate()
|
||||
self.block_time = gbt["mintime"] + 1
|
||||
self.block_bits = int("0x" + gbt["bits"], 0)
|
||||
|
||||
block = create_block(self.tip, create_coinbase(101),
|
||||
self.block_time, self.block_bits)
|
||||
block.nVersion = 4
|
||||
block.vtx.append(spendtx)
|
||||
block.hashMerkleRoot = block.calc_merkle_root()
|
||||
block.rehash()
|
||||
block.solve()
|
||||
test_blocks.append([block, True])
|
||||
self.last_block_time += 1
|
||||
self.block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance(test_blocks, sync_every_block=False)
|
||||
yield TestInstance([[block, valid]])
|
||||
|
||||
'''
|
||||
Check that the new CLTV rules are not enforced in the 750th
|
||||
version 3 block.
|
||||
'''
|
||||
spendtx = self.create_transaction(self.nodes[0],
|
||||
self.coinbase_blocks[0], self.nodeaddress, 1.0)
|
||||
cltv_invalidate(spendtx)
|
||||
spendtx.rehash()
|
||||
|
||||
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
|
||||
block.nVersion = 4
|
||||
block.vtx.append(spendtx)
|
||||
block.hashMerkleRoot = block.calc_merkle_root()
|
||||
block.rehash()
|
||||
block.solve()
|
||||
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance([[block, True]])
|
||||
|
||||
'''
|
||||
Check that the new CLTV rules are enforced in the 751st version 4
|
||||
block.
|
||||
'''
|
||||
spendtx = self.create_transaction(self.nodes[0],
|
||||
self.coinbase_blocks[1], self.nodeaddress, 1.0)
|
||||
cltv_invalidate(spendtx)
|
||||
spendtx.rehash()
|
||||
|
||||
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
|
||||
block.nVersion = 4
|
||||
block.vtx.append(spendtx)
|
||||
block.hashMerkleRoot = block.calc_merkle_root()
|
||||
block.rehash()
|
||||
block.solve()
|
||||
self.last_block_time += 1
|
||||
yield TestInstance([[block, False]])
|
||||
|
||||
''' Mine 199 new version blocks on last valid tip '''
|
||||
test_blocks = []
|
||||
for i in xrange(199):
|
||||
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
|
||||
block.nVersion = 4
|
||||
block.rehash()
|
||||
block.solve()
|
||||
test_blocks.append([block, True])
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance(test_blocks, sync_every_block=False)
|
||||
|
||||
''' Mine 1 old version block '''
|
||||
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
|
||||
block.nVersion = 3
|
||||
block.rehash()
|
||||
block.solve()
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance([[block, True]])
|
||||
|
||||
''' Mine 1 new version block '''
|
||||
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
|
||||
block.nVersion = 4
|
||||
block.rehash()
|
||||
block.solve()
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance([[block, True]])
|
||||
|
||||
''' Mine 1 old version block, should be invalid '''
|
||||
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
|
||||
block.nVersion = 3
|
||||
block.rehash()
|
||||
block.solve()
|
||||
self.last_block_time += 1
|
||||
yield TestInstance([[block, False]])
|
||||
|
||||
if __name__ == '__main__':
|
||||
BIP65Test().main()
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2015 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#
|
||||
# Test the CHECKLOCKTIMEVERIFY (BIP65) soft-fork logic
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
import os
|
||||
import shutil
|
||||
|
||||
class BIP65Test(BitcoinTestFramework):
|
||||
|
||||
def setup_network(self):
|
||||
self.nodes = []
|
||||
self.nodes.append(start_node(0, self.options.tmpdir, []))
|
||||
self.nodes.append(start_node(1, self.options.tmpdir, ["-blockversion=3"]))
|
||||
self.nodes.append(start_node(2, self.options.tmpdir, ["-blockversion=4"]))
|
||||
connect_nodes(self.nodes[1], 0)
|
||||
connect_nodes(self.nodes[2], 0)
|
||||
self.is_network_split = False
|
||||
self.sync_all()
|
||||
|
||||
def run_test(self):
|
||||
cnt = self.nodes[0].getblockcount()
|
||||
|
||||
# Mine some old-version blocks
|
||||
self.nodes[1].generate(100)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 100):
|
||||
raise AssertionError("Failed to mine 100 version=3 blocks")
|
||||
|
||||
# Mine 750 new-version blocks
|
||||
for i in xrange(15):
|
||||
self.nodes[2].generate(50)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 850):
|
||||
raise AssertionError("Failed to mine 750 version=4 blocks")
|
||||
|
||||
# TODO: check that new CHECKLOCKTIMEVERIFY rules are not enforced
|
||||
|
||||
# Mine 1 new-version block
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 851):
|
||||
raise AssertionFailure("Failed to mine a version=4 blocks")
|
||||
|
||||
# TODO: check that new CHECKLOCKTIMEVERIFY rules are enforced
|
||||
|
||||
# Mine 198 new-version blocks
|
||||
for i in xrange(2):
|
||||
self.nodes[2].generate(99)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 1049):
|
||||
raise AssertionError("Failed to mine 198 version=4 blocks")
|
||||
|
||||
# Mine 1 old-version block
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 1050):
|
||||
raise AssertionError("Failed to mine a version=3 block after 949 version=4 blocks")
|
||||
|
||||
# Mine 1 new-version blocks
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 1051):
|
||||
raise AssertionError("Failed to mine a version=3 block")
|
||||
|
||||
# Mine 1 old-version blocks
|
||||
try:
|
||||
self.nodes[1].generate(1)
|
||||
raise AssertionError("Succeeded to mine a version=3 block after 950 version=4 blocks")
|
||||
except JSONRPCException:
|
||||
pass
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 1051):
|
||||
raise AssertionError("Accepted a version=3 block after 950 version=4 blocks")
|
||||
|
||||
# Mine 1 new-version blocks
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 1052):
|
||||
raise AssertionError("Failed to mine a version=4 block")
|
||||
|
||||
if __name__ == '__main__':
|
||||
BIP65Test().main()
|
||||
@@ -5,55 +5,34 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import ComparisonTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import start_nodes
|
||||
from test_framework.mininode import CTransaction, NetworkThread
|
||||
from test_framework.blocktools import create_coinbase, create_block
|
||||
from test_framework.comptool import TestInstance, TestManager
|
||||
from test_framework.script import CScript
|
||||
from binascii import hexlify, unhexlify
|
||||
from binascii import unhexlify
|
||||
import cStringIO
|
||||
import time
|
||||
|
||||
# A canonical signature consists of:
|
||||
# <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype>
|
||||
def unDERify(tx):
|
||||
'''
|
||||
Make the signature in vin 0 of a tx non-DER-compliant,
|
||||
by adding padding after the S-value.
|
||||
'''
|
||||
scriptSig = CScript(tx.vin[0].scriptSig)
|
||||
newscript = []
|
||||
for i in scriptSig:
|
||||
if (len(newscript) == 0):
|
||||
newscript.append(i[0:-1] + '\0' + i[-1])
|
||||
else:
|
||||
newscript.append(i)
|
||||
tx.vin[0].scriptSig = CScript(newscript)
|
||||
|
||||
|
||||
'''
|
||||
This test is meant to exercise BIP66 (DER SIG).
|
||||
Connect to a single node.
|
||||
Mine 2 (version 2) blocks (save the coinbases for later).
|
||||
Generate 98 more version 2 blocks, verify the node accepts.
|
||||
Mine 749 version 3 blocks, verify the node accepts.
|
||||
Check that the new DERSIG rules are not enforced on the 750th version 3 block.
|
||||
Check that the new DERSIG rules are enforced on the 751st version 3 block.
|
||||
Mine 199 new version blocks.
|
||||
Mine 1 old-version block.
|
||||
Mine 1 new version block.
|
||||
Mine 1 old version block, see that the node rejects.
|
||||
Mine a coinbase block, and then ...
|
||||
Mine 1 version 4 block.
|
||||
Check that the DERSIG rules are enforced.
|
||||
|
||||
TODO: factor out common code from {bipdersig-p2p,bip65-cltv-p2p}.py.
|
||||
'''
|
||||
|
||||
class BIP66Test(ComparisonTestFramework):
|
||||
|
||||
def __init__(self):
|
||||
self.num_nodes = 1
|
||||
|
||||
def setup_network(self):
|
||||
# Must set the blockversion for this test
|
||||
self.nodes = start_nodes(1, self.options.tmpdir,
|
||||
extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=2']],
|
||||
self.nodes = start_nodes(1, self.options.tmpdir,
|
||||
extra_args=[['-debug', '-whitelist=127.0.0.1']],
|
||||
binary=[self.options.testbinary])
|
||||
self.is_network_split = False
|
||||
|
||||
def run_test(self):
|
||||
test = TestManager(self, self.options.tmpdir)
|
||||
@@ -72,112 +51,52 @@ class BIP66Test(ComparisonTestFramework):
|
||||
tx.deserialize(f)
|
||||
return tx
|
||||
|
||||
def get_tests(self):
|
||||
def invalidate_transaction(self, tx):
|
||||
'''
|
||||
Make the signature in vin 0 of a tx non-DER-compliant,
|
||||
by adding padding after the S-value.
|
||||
|
||||
self.coinbase_blocks = self.nodes[0].generate(2)
|
||||
A canonical signature consists of:
|
||||
<30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype>
|
||||
'''
|
||||
scriptSig = CScript(tx.vin[0].scriptSig)
|
||||
newscript = []
|
||||
for i in scriptSig:
|
||||
if (len(newscript) == 0):
|
||||
newscript.append(i[0:-1] + '\0' + i[-1])
|
||||
else:
|
||||
newscript.append(i)
|
||||
tx.vin[0].scriptSig = CScript(newscript)
|
||||
|
||||
def get_tests(self):
|
||||
self.coinbase_blocks = self.nodes[0].generate(1)
|
||||
self.nodes[0].generate(100)
|
||||
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
|
||||
self.nodeaddress = self.nodes[0].getnewaddress()
|
||||
self.last_block_time = time.time()
|
||||
|
||||
''' 98 more version 2 blocks '''
|
||||
test_blocks = []
|
||||
for i in xrange(98):
|
||||
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
|
||||
block.nVersion = 2
|
||||
'''Check that the rules are enforced.'''
|
||||
for valid in (True, False):
|
||||
spendtx = self.create_transaction(self.nodes[0],
|
||||
self.coinbase_blocks[0],
|
||||
self.nodeaddress, 1.0)
|
||||
if not valid:
|
||||
self.invalidate_transaction(spendtx)
|
||||
spendtx.rehash()
|
||||
|
||||
gbt = self.nodes[0].getblocktemplate()
|
||||
self.block_time = gbt["mintime"] + 1
|
||||
self.block_bits = int("0x" + gbt["bits"], 0)
|
||||
|
||||
block = create_block(self.tip, create_coinbase(101),
|
||||
self.block_time, self.block_bits)
|
||||
block.nVersion = 4
|
||||
block.vtx.append(spendtx)
|
||||
block.hashMerkleRoot = block.calc_merkle_root()
|
||||
block.rehash()
|
||||
block.solve()
|
||||
test_blocks.append([block, True])
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance(test_blocks, sync_every_block=False)
|
||||
yield TestInstance([[block, valid]])
|
||||
|
||||
''' Mine 749 version 3 blocks '''
|
||||
test_blocks = []
|
||||
for i in xrange(749):
|
||||
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
|
||||
block.nVersion = 3
|
||||
block.rehash()
|
||||
block.solve()
|
||||
test_blocks.append([block, True])
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance(test_blocks, sync_every_block=False)
|
||||
|
||||
'''
|
||||
Check that the new DERSIG rules are not enforced in the 750th
|
||||
version 3 block.
|
||||
'''
|
||||
spendtx = self.create_transaction(self.nodes[0],
|
||||
self.coinbase_blocks[0], self.nodeaddress, 1.0)
|
||||
unDERify(spendtx)
|
||||
spendtx.rehash()
|
||||
|
||||
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
|
||||
block.nVersion = 3
|
||||
block.vtx.append(spendtx)
|
||||
block.hashMerkleRoot = block.calc_merkle_root()
|
||||
block.rehash()
|
||||
block.solve()
|
||||
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance([[block, True]])
|
||||
|
||||
'''
|
||||
Check that the new DERSIG rules are enforced in the 751st version 3
|
||||
block.
|
||||
'''
|
||||
spendtx = self.create_transaction(self.nodes[0],
|
||||
self.coinbase_blocks[1], self.nodeaddress, 1.0)
|
||||
unDERify(spendtx)
|
||||
spendtx.rehash()
|
||||
|
||||
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
|
||||
block.nVersion = 3
|
||||
block.vtx.append(spendtx)
|
||||
block.hashMerkleRoot = block.calc_merkle_root()
|
||||
block.rehash()
|
||||
block.solve()
|
||||
self.last_block_time += 1
|
||||
yield TestInstance([[block, False]])
|
||||
|
||||
''' Mine 199 new version blocks on last valid tip '''
|
||||
test_blocks = []
|
||||
for i in xrange(199):
|
||||
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
|
||||
block.nVersion = 3
|
||||
block.rehash()
|
||||
block.solve()
|
||||
test_blocks.append([block, True])
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance(test_blocks, sync_every_block=False)
|
||||
|
||||
''' Mine 1 old version block '''
|
||||
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
|
||||
block.nVersion = 2
|
||||
block.rehash()
|
||||
block.solve()
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance([[block, True]])
|
||||
|
||||
''' Mine 1 new version block '''
|
||||
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
|
||||
block.nVersion = 3
|
||||
block.rehash()
|
||||
block.solve()
|
||||
self.last_block_time += 1
|
||||
self.tip = block.sha256
|
||||
yield TestInstance([[block, True]])
|
||||
|
||||
''' Mine 1 old version block, should be invalid '''
|
||||
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
|
||||
block.nVersion = 2
|
||||
block.rehash()
|
||||
block.solve()
|
||||
self.last_block_time += 1
|
||||
yield TestInstance([[block, False]])
|
||||
|
||||
if __name__ == '__main__':
|
||||
BIP66Test().main()
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2014 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#
|
||||
# Test the BIP66 changeover logic
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
import os
|
||||
import shutil
|
||||
|
||||
class BIP66Test(BitcoinTestFramework):
|
||||
|
||||
def setup_network(self):
|
||||
self.nodes = []
|
||||
self.nodes.append(start_node(0, self.options.tmpdir, []))
|
||||
self.nodes.append(start_node(1, self.options.tmpdir, ["-blockversion=2"]))
|
||||
self.nodes.append(start_node(2, self.options.tmpdir, ["-blockversion=3"]))
|
||||
connect_nodes(self.nodes[1], 0)
|
||||
connect_nodes(self.nodes[2], 0)
|
||||
self.is_network_split = False
|
||||
self.sync_all()
|
||||
|
||||
def run_test(self):
|
||||
cnt = self.nodes[0].getblockcount()
|
||||
|
||||
# Mine some old-version blocks
|
||||
self.nodes[1].generate(100)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 100):
|
||||
raise AssertionError("Failed to mine 100 version=2 blocks")
|
||||
|
||||
# Mine 750 new-version blocks
|
||||
for i in xrange(15):
|
||||
self.nodes[2].generate(50)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 850):
|
||||
raise AssertionError("Failed to mine 750 version=3 blocks")
|
||||
|
||||
# TODO: check that new DERSIG rules are not enforced
|
||||
|
||||
# Mine 1 new-version block
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 851):
|
||||
raise AssertionFailure("Failed to mine a version=3 blocks")
|
||||
|
||||
# TODO: check that new DERSIG rules are enforced
|
||||
|
||||
# Mine 198 new-version blocks
|
||||
for i in xrange(2):
|
||||
self.nodes[2].generate(99)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 1049):
|
||||
raise AssertionError("Failed to mine 198 version=3 blocks")
|
||||
|
||||
# Mine 1 old-version block
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 1050):
|
||||
raise AssertionError("Failed to mine a version=2 block after 949 version=3 blocks")
|
||||
|
||||
# Mine 1 new-version blocks
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 1051):
|
||||
raise AssertionError("Failed to mine a version=3 block")
|
||||
|
||||
# Mine 1 old-version blocks
|
||||
try:
|
||||
self.nodes[1].generate(1)
|
||||
raise AssertionError("Succeeded to mine a version=2 block after 950 version=3 blocks")
|
||||
except JSONRPCException:
|
||||
pass
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 1051):
|
||||
raise AssertionError("Accepted a version=2 block after 950 version=3 blocks")
|
||||
|
||||
# Mine 1 new-version blocks
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
if (self.nodes[0].getblockcount() != cnt + 1052):
|
||||
raise AssertionError("Failed to mine a version=3 block")
|
||||
|
||||
if __name__ == '__main__':
|
||||
BIP66Test().main()
|
||||
@@ -4,7 +4,9 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_nodes
|
||||
|
||||
|
||||
class DecodeScriptTest(BitcoinTestFramework):
|
||||
"""Tests decoding scripts via RPC command "decodescript"."""
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import initialize_chain_clean, start_nodes
|
||||
|
||||
|
||||
class DisableWalletTest (BitcoinTestFramework):
|
||||
|
||||
|
||||
@@ -8,9 +8,9 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import start_node, connect_nodes
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
class ForkNotifyTest(BitcoinTestFramework):
|
||||
|
||||
@@ -19,7 +19,7 @@ class ForkNotifyTest(BitcoinTestFramework):
|
||||
def setup_network(self):
|
||||
self.nodes = []
|
||||
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
|
||||
with open(self.alert_filename, 'w') as f:
|
||||
with open(self.alert_filename, 'w'):
|
||||
pass # Just open then close to create zero-length file
|
||||
self.nodes.append(start_node(0, self.options.tmpdir,
|
||||
["-blockversion=2", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]))
|
||||
|
||||
@@ -4,9 +4,12 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from pprint import pprint
|
||||
from time import sleep
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, assert_greater_than, \
|
||||
initialize_chain_clean, start_nodes, connect_nodes_bi, stop_nodes, \
|
||||
wait_bitcoinds
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
# Create one-input, one-output, no-fee transaction:
|
||||
class RawTransactionsTest(BitcoinTestFramework):
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import initialize_chain_clean, start_nodes, \
|
||||
connect_nodes_bi
|
||||
|
||||
|
||||
class GetBlockTemplateTest(BitcoinTestFramework):
|
||||
|
||||
@@ -4,8 +4,10 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.authproxy import AuthServiceProxy
|
||||
from test_framework.util import random_transaction
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
def check_array_result(object_array, to_match, expected):
|
||||
"""
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
|
||||
from binascii import a2b_hex, b2a_hex
|
||||
from hashlib import sha256
|
||||
|
||||
@@ -5,9 +5,10 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, start_node
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
class HardForkDetectionTest(BitcoinTestFramework):
|
||||
|
||||
@@ -16,7 +17,7 @@ class HardForkDetectionTest(BitcoinTestFramework):
|
||||
def setup_network(self):
|
||||
self.nodes = []
|
||||
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
|
||||
with open(self.alert_filename, 'w') as f:
|
||||
with open(self.alert_filename, 'w'):
|
||||
pass # Just open then close to create zero-length file
|
||||
self.nodes.append(start_node(0, self.options.tmpdir,
|
||||
["-blockversion=2", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]))
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import assert_equal, start_nodes
|
||||
|
||||
import base64
|
||||
|
||||
try:
|
||||
@@ -36,45 +37,45 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
|
||||
out1 = conn.getresponse().read();
|
||||
out1 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True)
|
||||
assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open!
|
||||
assert_equal(conn.sock!=None, True) # according to http/1.1 connection must still be open!
|
||||
|
||||
#send 2nd request without closing connection
|
||||
# send 2nd request without closing connection
|
||||
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
|
||||
out2 = conn.getresponse().read();
|
||||
assert_equal('"error":null' in out1, True) #must also response with a correct json-rpc message
|
||||
assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open!
|
||||
out2 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out2, True) # must also response with a correct json-rpc message
|
||||
assert_equal(conn.sock!=None, True) # according to http/1.1 connection must still be open!
|
||||
conn.close()
|
||||
|
||||
#same should be if we add keep-alive because this should be the std. behaviour
|
||||
# same should be if we add keep-alive because this should be the std. behaviour
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpair), "Connection": "keep-alive"}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
|
||||
out1 = conn.getresponse().read();
|
||||
out1 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True)
|
||||
assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open!
|
||||
assert_equal(conn.sock!=None, True) # according to http/1.1 connection must still be open!
|
||||
|
||||
#send 2nd request without closing connection
|
||||
# send 2nd request without closing connection
|
||||
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
|
||||
out2 = conn.getresponse().read();
|
||||
assert_equal('"error":null' in out1, True) #must also response with a correct json-rpc message
|
||||
assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open!
|
||||
out2 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out2, True) # must also response with a correct json-rpc message
|
||||
assert_equal(conn.sock!=None, True) # according to http/1.1 connection must still be open!
|
||||
conn.close()
|
||||
|
||||
#now do the same with "Connection: close"
|
||||
# now do the same with "Connection: close"
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpair), "Connection":"close"}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
|
||||
out1 = conn.getresponse().read();
|
||||
out1 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True)
|
||||
assert_equal(conn.sock!=None, False) #now the connection must be closed after the response
|
||||
assert_equal(conn.sock!=None, False) # now the connection must be closed after the response
|
||||
|
||||
#node1 (2nd node) is running with disabled keep-alive option
|
||||
# node1 (2nd node) is running with disabled keep-alive option
|
||||
urlNode1 = urlparse.urlparse(self.nodes[1].url)
|
||||
authpair = urlNode1.username + ':' + urlNode1.password
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpair)}
|
||||
@@ -82,10 +83,10 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
conn = httplib.HTTPConnection(urlNode1.hostname, urlNode1.port)
|
||||
conn.connect()
|
||||
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
|
||||
out1 = conn.getresponse().read();
|
||||
out1 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True)
|
||||
|
||||
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
|
||||
# node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
|
||||
urlNode2 = urlparse.urlparse(self.nodes[2].url)
|
||||
authpair = urlNode2.username + ':' + urlNode2.password
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpair)}
|
||||
@@ -93,9 +94,9 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
conn = httplib.HTTPConnection(urlNode2.hostname, urlNode2.port)
|
||||
conn.connect()
|
||||
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
|
||||
out1 = conn.getresponse().read();
|
||||
out1 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True)
|
||||
assert_equal(conn.sock!=None, True) #connection must be closed because bitcoind should use keep-alive by default
|
||||
assert_equal(conn.sock!=None, True) # connection must be closed because bitcoind should use keep-alive by default
|
||||
|
||||
if __name__ == '__main__':
|
||||
HTTPBasicsTest ().main ()
|
||||
HTTPBasicsTest().main()
|
||||
|
||||
@@ -8,22 +8,23 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import initialize_chain_clean, start_node, \
|
||||
connect_nodes_bi, sync_blocks
|
||||
|
||||
import time
|
||||
|
||||
class InvalidateTest(BitcoinTestFramework):
|
||||
|
||||
|
||||
def setup_chain(self):
|
||||
print("Initializing test directory "+self.options.tmpdir)
|
||||
initialize_chain_clean(self.options.tmpdir, 3)
|
||||
|
||||
|
||||
def setup_network(self):
|
||||
self.nodes = []
|
||||
self.is_network_split = False
|
||||
self.is_network_split = False
|
||||
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"]))
|
||||
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug"]))
|
||||
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug"]))
|
||||
|
||||
|
||||
def run_test(self):
|
||||
print "Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:"
|
||||
print "Mine 4 blocks on Node 0"
|
||||
|
||||
@@ -5,11 +5,11 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import ComparisonTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import assert_equal
|
||||
from test_framework.comptool import TestManager, TestInstance
|
||||
from test_framework.mininode import *
|
||||
from test_framework.blocktools import *
|
||||
import logging
|
||||
from test_framework.mininode import NetworkThread
|
||||
from test_framework.blocktools import create_block, create_coinbase, create_transaction
|
||||
|
||||
import copy
|
||||
import time
|
||||
|
||||
@@ -25,7 +25,7 @@ re-requested.
|
||||
# Use the ComparisonTestFramework with 1 node: only use --testbinary.
|
||||
class InvalidBlockRequestTest(ComparisonTestFramework):
|
||||
|
||||
''' Can either run this test as 1 node with expected answers, or two and compare them.
|
||||
''' Can either run this test as 1 node with expected answers, or two and compare them.
|
||||
Change the "outcome" variable from each TestInstance object to only do the comparison. '''
|
||||
def __init__(self):
|
||||
self.num_nodes = 1
|
||||
|
||||
111
qa/rpc-tests/key_import_export.py
Executable file
111
qa/rpc-tests/key_import_export.py
Executable file
@@ -0,0 +1,111 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2017 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from decimal import Decimal
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import assert_equal, assert_greater_than, start_nodes, initialize_chain_clean, connect_nodes_bi
|
||||
|
||||
import logging
|
||||
|
||||
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
|
||||
|
||||
|
||||
class KeyImportExportTest (BitcoinTestFramework):
|
||||
|
||||
def setup_chain(self):
|
||||
print("Initializing test directory "+self.options.tmpdir)
|
||||
initialize_chain_clean(self.options.tmpdir, 4)
|
||||
|
||||
def setup_network(self, split=False):
|
||||
self.nodes = start_nodes(4, self.options.tmpdir )
|
||||
connect_nodes_bi(self.nodes,0,1)
|
||||
connect_nodes_bi(self.nodes,1,2)
|
||||
connect_nodes_bi(self.nodes,0,2)
|
||||
connect_nodes_bi(self.nodes,0,3)
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
def run_test(self):
|
||||
[alice, bob, charlie, miner] = self.nodes
|
||||
|
||||
def alice_to_bob(amount):
|
||||
alice.sendtoaddress(addr, Decimal(amount))
|
||||
self.sync_all()
|
||||
miner.generate(1)
|
||||
self.sync_all()
|
||||
|
||||
def verify_utxos(node, amounts):
|
||||
utxos = node.listunspent(1, 10**9, [addr])
|
||||
|
||||
def cmp_confirmations_high_to_low(a, b):
|
||||
return cmp(b["confirmations"], a["confirmations"])
|
||||
|
||||
utxos.sort(cmp_confirmations_high_to_low)
|
||||
|
||||
try:
|
||||
assert_equal(amounts, [utxo["amount"] for utxo in utxos])
|
||||
except AssertionError:
|
||||
logging.error(
|
||||
'Expected amounts: %r; utxos: %r',
|
||||
amounts, utxos)
|
||||
raise
|
||||
|
||||
# Seed Alice with some funds
|
||||
alice.generate(10)
|
||||
self.sync_all()
|
||||
miner.generate(100)
|
||||
self.sync_all()
|
||||
|
||||
# Now get a pristine address for receiving transfers:
|
||||
addr = bob.getnewaddress()
|
||||
verify_utxos(bob, [])
|
||||
verify_utxos(charlie, [])
|
||||
|
||||
# the amounts of each txn embodied which generates a single UTXO:
|
||||
amounts = map(Decimal, ['2.3', '3.7', '0.1', '0.5', '1.0', '0.19'])
|
||||
|
||||
# Internal test consistency assertion:
|
||||
assert_greater_than(
|
||||
alice.getbalance(),
|
||||
reduce(Decimal.__add__, amounts))
|
||||
|
||||
logging.info("Sending pre-export txns...")
|
||||
for amount in amounts[0:2]:
|
||||
alice_to_bob(amount)
|
||||
|
||||
logging.info("Exporting privkey from bob...")
|
||||
privkey = bob.dumpprivkey(addr)
|
||||
|
||||
logging.info("Sending post-export txns...")
|
||||
for amount in amounts[2:4]:
|
||||
alice_to_bob(amount)
|
||||
|
||||
verify_utxos(bob, amounts[:4])
|
||||
verify_utxos(charlie, [])
|
||||
|
||||
logging.info("Importing privkey into charlie...")
|
||||
ipkaddr = charlie.importprivkey(privkey, '', True)
|
||||
assert_equal(addr, ipkaddr)
|
||||
|
||||
# importprivkey should have rescanned, so this should pass:
|
||||
verify_utxos(charlie, amounts[:4])
|
||||
|
||||
# Verify idempotent behavior:
|
||||
ipkaddr2 = charlie.importprivkey(privkey, '', True)
|
||||
assert_equal(addr, ipkaddr2)
|
||||
|
||||
# amounts should be unchanged
|
||||
verify_utxos(charlie, amounts[:4])
|
||||
|
||||
logging.info("Sending post-import txns...")
|
||||
for amount in amounts[4:]:
|
||||
alice_to_bob(amount)
|
||||
|
||||
verify_utxos(bob, amounts)
|
||||
verify_utxos(charlie, amounts)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
KeyImportExportTest().main()
|
||||
@@ -6,18 +6,17 @@
|
||||
# Exercise the wallet keypool, and interaction with wallet encryption/locking
|
||||
|
||||
# Add python-bitcoinrpc to module search path:
|
||||
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import check_json_precision, initialize_chain, \
|
||||
start_nodes, start_node, stop_nodes, wait_bitcoinds, bitcoind_processes
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import json
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import traceback
|
||||
|
||||
from test_framework.util import *
|
||||
|
||||
|
||||
def check_array_result(object_array, to_match, expected):
|
||||
"""
|
||||
Pass in array of JSON objects, a dictionary with key/value pairs
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
# Exercise the listtransactions API
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
def check_array_result(object_array, to_match, expected):
|
||||
"""
|
||||
|
||||
@@ -4,9 +4,15 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
#
|
||||
|
||||
from test_framework.mininode import *
|
||||
from test_framework.mininode import NodeConn, NodeConnCB, NetworkThread, \
|
||||
EarlyDisconnectError, CInv, msg_inv, mininode_lock
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import initialize_chain_clean, start_nodes, \
|
||||
p2p_port
|
||||
|
||||
import os
|
||||
import time
|
||||
import random
|
||||
import logging
|
||||
|
||||
'''
|
||||
@@ -43,7 +49,6 @@ class TestManager(NodeConnCB):
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
fail = False
|
||||
self.connection.rpc.generate(1) # Leave IBD
|
||||
|
||||
numBlocksToGenerate = [ 8, 16, 128, 1024 ]
|
||||
@@ -56,7 +61,7 @@ class TestManager(NodeConnCB):
|
||||
current_invs = []
|
||||
if len(current_invs) > 0:
|
||||
self.connection.send_message(msg_inv(current_invs))
|
||||
|
||||
|
||||
# Wait and see how many blocks were requested
|
||||
time.sleep(2)
|
||||
|
||||
@@ -75,7 +80,7 @@ class TestManager(NodeConnCB):
|
||||
self.disconnectOkay = True
|
||||
self.connection.disconnect_node()
|
||||
|
||||
|
||||
|
||||
class MaxBlocksInFlightTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
parser.add_option("--testbinary", dest="testbinary",
|
||||
|
||||
119
qa/rpc-tests/mempool_nu_activation.py
Executable file
119
qa/rpc-tests/mempool_nu_activation.py
Executable file
@@ -0,0 +1,119 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2018 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_node, connect_nodes, wait_and_assert_operationid_status
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
# Test mempool behaviour around network upgrade activation
|
||||
class MempoolUpgradeActivationTest(BitcoinTestFramework):
|
||||
|
||||
alert_filename = None # Set by setup_network
|
||||
|
||||
def setup_network(self):
|
||||
args = ["-checkmempool", "-debug=mempool", "-blockmaxsize=4000", "-nuparams=5ba81b19:200"]
|
||||
self.nodes = []
|
||||
self.nodes.append(start_node(0, self.options.tmpdir, args))
|
||||
self.nodes.append(start_node(1, self.options.tmpdir, args))
|
||||
connect_nodes(self.nodes[1], 0)
|
||||
self.is_network_split = False
|
||||
self.sync_all
|
||||
|
||||
def setup_chain(self):
|
||||
print "Initializing test directory "+self.options.tmpdir
|
||||
initialize_chain_clean(self.options.tmpdir, 2)
|
||||
|
||||
def run_test(self):
|
||||
self.nodes[1].generate(100)
|
||||
self.sync_all()
|
||||
|
||||
# Mine 97 blocks. After this, nodes[1] blocks
|
||||
# 1 to 97 are spend-able.
|
||||
self.nodes[0].generate(97)
|
||||
self.sync_all()
|
||||
|
||||
# Shield some ZEC
|
||||
node1_taddr = self.nodes[1].getnewaddress()
|
||||
node0_zaddr = self.nodes[0].z_getnewaddress()
|
||||
recipients = [{'address': node0_zaddr, 'amount': Decimal('10')}]
|
||||
myopid = self.nodes[1].z_sendmany(node1_taddr, recipients, 1, Decimal('0'))
|
||||
print wait_and_assert_operationid_status(self.nodes[1], myopid)
|
||||
self.sync_all()
|
||||
|
||||
# Mine block 198. After this, the mempool expects
|
||||
# block 199, which is the last Sprout block.
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Mempool should be empty.
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set())
|
||||
|
||||
# Check node 0 shielded balance
|
||||
assert_equal(self.nodes[0].z_getbalance(node0_zaddr), Decimal('10'))
|
||||
|
||||
# Fill the mempool with twice as many transactions as can fit into blocks
|
||||
node0_taddr = self.nodes[0].getnewaddress()
|
||||
sprout_txids = []
|
||||
while self.nodes[1].getmempoolinfo()['bytes'] < 2 * 4000:
|
||||
sprout_txids.append(self.nodes[1].sendtoaddress(node0_taddr, Decimal('0.001')))
|
||||
self.sync_all()
|
||||
|
||||
# Spends should be in the mempool
|
||||
sprout_mempool = set(self.nodes[0].getrawmempool())
|
||||
assert_equal(sprout_mempool, set(sprout_txids))
|
||||
|
||||
# Mine block 199. After this, the mempool expects
|
||||
# block 200, which is the first Overwinter block.
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# mempool should be empty.
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set())
|
||||
|
||||
# Block 199 should contain a subset of the original mempool
|
||||
# (with all other transactions having been dropped)
|
||||
block_txids = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['tx']
|
||||
assert(len(block_txids) < len(sprout_txids))
|
||||
for txid in block_txids[1:]: # Exclude coinbase
|
||||
assert(txid in sprout_txids)
|
||||
|
||||
# Create some transparent Overwinter transactions
|
||||
overwinter_txids = [self.nodes[1].sendtoaddress(node0_taddr, Decimal('0.001')) for i in range(10)]
|
||||
self.sync_all()
|
||||
|
||||
# Create a shielded Overwinter transaction
|
||||
recipients = [{'address': node0_taddr, 'amount': Decimal('10')}]
|
||||
myopid = self.nodes[0].z_sendmany(node0_zaddr, recipients, 1, Decimal('0'))
|
||||
shielded = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
assert(shielded != None)
|
||||
overwinter_txids.append(shielded)
|
||||
self.sync_all()
|
||||
|
||||
# Spends should be in the mempool
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set(overwinter_txids))
|
||||
|
||||
# Node 0 note should be unspendable
|
||||
assert_equal(self.nodes[0].z_getbalance(node0_zaddr), Decimal('0'))
|
||||
|
||||
# Invalidate block 199.
|
||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||
|
||||
# BUG: Ideally, the mempool should now only contain the transactions
|
||||
# that were in block 199, the Overwinter transactions having been dropped.
|
||||
# However, because chainActive is not updated until after the transactions
|
||||
# in the disconnected block have been re-added to the mempool, the height
|
||||
# seen by AcceptToMemoryPool is one greater than it should be. This causes
|
||||
# the block 199 transactions to be validated against the Overwinter rules,
|
||||
# and rejected because they (obviously) fail.
|
||||
#assert_equal(set(self.nodes[0].getrawmempool()), set(block_txids[1:]))
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set())
|
||||
|
||||
# Node 0 note should be spendable again
|
||||
assert_equal(self.nodes[0].z_getbalance(node0_zaddr), Decimal('10'))
|
||||
|
||||
if __name__ == '__main__':
|
||||
MempoolUpgradeActivationTest().main()
|
||||
@@ -9,9 +9,9 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
import os
|
||||
import shutil
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, assert_raises, start_node, connect_nodes
|
||||
|
||||
|
||||
# Create one-input, one-output, no-fee transaction:
|
||||
class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
@@ -36,8 +36,6 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
return signresult["hex"]
|
||||
|
||||
def run_test(self):
|
||||
start_count = self.nodes[0].getblockcount()
|
||||
|
||||
# Mine three blocks. After this, nodes[0] blocks
|
||||
# 101, 102, and 103 are spend-able.
|
||||
new_blocks = self.nodes[1].generate(4)
|
||||
@@ -52,16 +50,25 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
# 3. Indirect (coinbase and child both in chain) : spend_103 and spend_103_1
|
||||
# Use invalidatblock to make all of the above coinbase spends invalid (immature coinbase),
|
||||
# and make sure the mempool code behaves correctly.
|
||||
b = [ self.nodes[0].getblockhash(n) for n in range(102, 105) ]
|
||||
b = [ self.nodes[0].getblockhash(n) for n in range(101, 105) ]
|
||||
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
|
||||
spend_101_raw = self.create_tx(coinbase_txids[0], node1_address, 10)
|
||||
spend_102_raw = self.create_tx(coinbase_txids[1], node0_address, 10)
|
||||
spend_103_raw = self.create_tx(coinbase_txids[2], node0_address, 10)
|
||||
spend_101_raw = self.create_tx(coinbase_txids[1], node1_address, 10)
|
||||
spend_102_raw = self.create_tx(coinbase_txids[2], node0_address, 10)
|
||||
spend_103_raw = self.create_tx(coinbase_txids[3], node0_address, 10)
|
||||
|
||||
# Create a block-height-locked transaction which will be invalid after reorg
|
||||
timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 10})
|
||||
# Set the time lock
|
||||
timelock_tx = timelock_tx.replace("ffffffff", "11111111", 1)
|
||||
timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
|
||||
timelock_tx = self.nodes[0].signrawtransaction(timelock_tx)["hex"]
|
||||
assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, timelock_tx)
|
||||
|
||||
# Broadcast and mine spend_102 and 103:
|
||||
spend_102_id = self.nodes[0].sendrawtransaction(spend_102_raw)
|
||||
spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw)
|
||||
self.nodes[0].generate(1)
|
||||
assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, timelock_tx)
|
||||
|
||||
# Create 102_1 and 103_1:
|
||||
spend_102_1_raw = self.create_tx(spend_102_id, node1_address, 10)
|
||||
@@ -69,7 +76,8 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
|
||||
# Broadcast and mine 103_1:
|
||||
spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw)
|
||||
self.nodes[0].generate(1)
|
||||
last_block = self.nodes[0].generate(1)
|
||||
timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx)
|
||||
|
||||
# ... now put spend_101 and spend_102_1 in memory pools:
|
||||
spend_101_id = self.nodes[0].sendrawtransaction(spend_101_raw)
|
||||
@@ -77,7 +85,11 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
|
||||
self.sync_all()
|
||||
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set([ spend_101_id, spend_102_1_id ]))
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set([ spend_101_id, spend_102_1_id, timelock_tx_id ]))
|
||||
|
||||
for node in self.nodes:
|
||||
node.invalidateblock(last_block[0])
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set([ spend_101_id, spend_102_1_id, spend_103_1_id ]))
|
||||
|
||||
# Use invalidateblock to re-org back and make all those coinbase spends
|
||||
# immature/invalid:
|
||||
@@ -9,9 +9,8 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
import os
|
||||
import shutil
|
||||
from test_framework.util import assert_equal, start_node
|
||||
|
||||
|
||||
# Create one-input, one-output, no-fee transaction:
|
||||
class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
|
||||
@@ -14,9 +14,10 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
import os
|
||||
import shutil
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, assert_greater_than, assert_raises, \
|
||||
start_node
|
||||
|
||||
|
||||
# Create one-input, one-output, no-fee transaction:
|
||||
class MempoolSpendCoinbaseTest(BitcoinTestFramework):
|
||||
|
||||
184
qa/rpc-tests/mempool_tx_expiry.py
Executable file
184
qa/rpc-tests/mempool_tx_expiry.py
Executable file
@@ -0,0 +1,184 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2018 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#
|
||||
# Test proper expiry for transactions >= version 3
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import assert_equal, \
|
||||
connect_nodes_bi, sync_blocks, start_nodes, \
|
||||
wait_and_assert_operationid_status
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
class MempoolTxExpiryTest(BitcoinTestFramework):
|
||||
|
||||
def setup_nodes(self):
|
||||
return start_nodes(4, self.options.tmpdir, [["-nuparams=5ba81b19:205", "-txexpirydelta=4", "-debug=mempool"]] * 4)
|
||||
|
||||
# Test before, at, and after expiry block
|
||||
# TODO: Test case of dependent txs in reorgs
|
||||
# chain is at block height 199 when run_test executes
|
||||
def run_test(self):
|
||||
z_alice = self.nodes[0].z_getnewaddress()
|
||||
bob = self.nodes[2].getnewaddress()
|
||||
z_bob = self.nodes[2].z_getnewaddress()
|
||||
|
||||
# When Overwinter not yet activated, no expiryheight in tx
|
||||
sapling_tx = self.nodes[0].sendtoaddress(bob, 0.01)
|
||||
rawtx = self.nodes[0].getrawtransaction(sapling_tx, 1)
|
||||
assert_equal(rawtx["overwintered"], False)
|
||||
assert("expiryheight" not in rawtx)
|
||||
|
||||
self.nodes[0].generate(6)
|
||||
self.sync_all()
|
||||
|
||||
## Shield one of Alice's coinbase funds to her zaddr
|
||||
res = self.nodes[0].z_shieldcoinbase("*", z_alice, 0.0001, 1)
|
||||
wait_and_assert_operationid_status(self.nodes[0], res['opid'])
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Get balance on node 0
|
||||
bal = self.nodes[0].z_gettotalbalance()
|
||||
print "Balance before zsend, after shielding 10: ", bal
|
||||
assert_equal(Decimal(bal["private"]), Decimal("9.9999"))
|
||||
|
||||
print "Splitting network..."
|
||||
self.split_network()
|
||||
|
||||
# Create transactions
|
||||
zsendamount = Decimal('1.0') - Decimal('0.0001')
|
||||
recipients = []
|
||||
recipients.append({"address": z_bob, "amount": zsendamount})
|
||||
myopid = self.nodes[0].z_sendmany(z_alice, recipients)
|
||||
persist_shielded = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
persist_transparent = self.nodes[0].sendtoaddress(bob, 0.01)
|
||||
# Verify transparent transaction is version 3 intended for Overwinter branch
|
||||
rawtx = self.nodes[0].getrawtransaction(persist_transparent, 1)
|
||||
assert_equal(rawtx["version"], 3)
|
||||
assert_equal(rawtx["overwintered"], True)
|
||||
assert_equal(rawtx["expiryheight"], 212)
|
||||
print "Blockheight at persist_transparent & persist_shielded creation:", self.nodes[0].getblockchaininfo()['blocks']
|
||||
print "Expiryheight of persist_transparent:", rawtx['expiryheight']
|
||||
# Verify shielded transaction is version 3 intended for Overwinter branch
|
||||
rawtx = self.nodes[0].getrawtransaction(persist_shielded, 1)
|
||||
print "Expiryheight of persist_shielded", rawtx['expiryheight']
|
||||
assert_equal(rawtx["version"], 3)
|
||||
assert_equal(rawtx["overwintered"], True)
|
||||
assert_equal(rawtx["expiryheight"], 212)
|
||||
|
||||
print "\n Blockheight advances to less than expiry block height. After reorg, txs should persist in mempool"
|
||||
assert(persist_transparent in self.nodes[0].getrawmempool())
|
||||
assert(persist_shielded in self.nodes[0].getrawmempool())
|
||||
assert_equal(set(self.nodes[2].getrawmempool()), set())
|
||||
print "mempool node 0:", self.nodes[0].getrawmempool()
|
||||
print "mempool node 2:", self.nodes[2].getrawmempool()
|
||||
bal = self.nodes[0].z_gettotalbalance()
|
||||
print "Printing balance before persist_shielded & persist_transparent are initially mined from mempool", bal
|
||||
# Txs are mined on node 0; will later be rolled back
|
||||
self.nodes[0].generate(1)
|
||||
print "Node 0 generated 1 block"
|
||||
print "Node 0 height:", self.nodes[0].getblockchaininfo()['blocks']
|
||||
print "Node 2 height:", self.nodes[2].getblockchaininfo()['blocks']
|
||||
bal = self.nodes[0].z_gettotalbalance()
|
||||
print "Printing balance after persist_shielded & persist_transparent are mined:", bal
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set())
|
||||
|
||||
print "Mine 2 competing blocks on Node 2..."
|
||||
blocks = self.nodes[2].generate(2)
|
||||
for block in blocks:
|
||||
blk = self.nodes[2].getblock(block)
|
||||
print "Height: {0}, Mined block txs: {1}".format(blk["height"], blk["tx"])
|
||||
print "Connect nodes to force a reorg"
|
||||
connect_nodes_bi(self.nodes,0,2)
|
||||
self.is_network_split = False
|
||||
|
||||
print "Syncing blocks"
|
||||
sync_blocks(self.nodes)
|
||||
|
||||
print "Ensure that txs are back in mempool of node 0"
|
||||
print "Blockheight node 0:", self.nodes[0].getblockchaininfo()['blocks']
|
||||
print "Blockheight node 2:", self.nodes[2].getblockchaininfo()['blocks']
|
||||
print "mempool node 0: ", self.nodes[0].getrawmempool()
|
||||
print "mempool node 2: ", self.nodes[2].getrawmempool()
|
||||
assert(persist_transparent in self.nodes[0].getrawmempool())
|
||||
assert(persist_shielded in self.nodes[0].getrawmempool())
|
||||
bal = self.nodes[0].z_gettotalbalance()
|
||||
# Mine txs to get them out of the way of mempool sync in split_network()
|
||||
print "Generating another block on node 0 to clear txs from mempool"
|
||||
self.nodes[0].generate(1)
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set())
|
||||
sync_blocks(self.nodes)
|
||||
|
||||
print "Splitting network..."
|
||||
self.split_network()
|
||||
|
||||
print "\n Blockheight advances to equal expiry block height. After reorg, txs should persist in mempool"
|
||||
myopid = self.nodes[0].z_sendmany(z_alice, recipients)
|
||||
persist_shielded_2 = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
persist_transparent_2 = self.nodes[0].sendtoaddress(bob, 0.01)
|
||||
rawtx_trans = self.nodes[0].getrawtransaction(persist_transparent_2, 1)
|
||||
rawtx_shield = self.nodes[0].getrawtransaction(persist_shielded_2, 1)
|
||||
print "Blockheight node 0 at persist_transparent_2 creation:", self.nodes[0].getblockchaininfo()['blocks']
|
||||
print "Blockheight node 2 at persist_transparent_2 creation:", self.nodes[2].getblockchaininfo()['blocks']
|
||||
print "Expiryheight of persist_transparent_2:", rawtx_trans['expiryheight']
|
||||
print "Expiryheight of persist_shielded_2:", rawtx_shield['expiryheight']
|
||||
blocks = self.nodes[2].generate(4)
|
||||
for block in blocks:
|
||||
blk = self.nodes[2].getblock(block)
|
||||
print "Height: {0}, Mined block txs: {1}".format(blk["height"], blk["tx"])
|
||||
print "Connect nodes to force a reorg"
|
||||
connect_nodes_bi(self.nodes, 0, 2)
|
||||
self.is_network_split = False
|
||||
sync_blocks(self.nodes)
|
||||
print "Ensure that persist_transparent_2 & persist_shielded_2 are in mempool at expiry block height"
|
||||
print "Blockheight node 0:", self.nodes[0].getblockchaininfo()['blocks']
|
||||
print "Blockheight node 2:", self.nodes[2].getblockchaininfo()['blocks']
|
||||
print "mempool node 0: ", self.nodes[0].getrawmempool()
|
||||
print "mempool node 2: ", self.nodes[2].getrawmempool()
|
||||
assert(persist_transparent_2 in self.nodes[0].getrawmempool())
|
||||
assert(persist_shielded_2 in self.nodes[0].getrawmempool())
|
||||
# Mine persist txs to get them out of the way of mempool sync in split_network()
|
||||
self.nodes[0].generate(1)
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set())
|
||||
sync_blocks(self.nodes)
|
||||
print "Balance after persist_shielded_2 is mined to remove from mempool: ", self.nodes[0].z_gettotalbalance()
|
||||
|
||||
print "Splitting network..."
|
||||
self.split_network()
|
||||
|
||||
print "\n Blockheight advances to greater than expiry block height. After reorg, txs should expire from mempool"
|
||||
print "Balance before expire_shielded is sent: ", self.nodes[0].z_gettotalbalance()
|
||||
myopid = self.nodes[0].z_sendmany(z_alice, recipients)
|
||||
expire_shielded = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
expire_transparent = self.nodes[0].sendtoaddress(bob, 0.01)
|
||||
print "Blockheight node 0 at expire_transparent creation:", self.nodes[0].getblockchaininfo()['blocks']
|
||||
print "Blockheight node 2 at expire_shielded creation:", self.nodes[2].getblockchaininfo()['blocks']
|
||||
print "Expiryheight of expire_transparent:", self.nodes[0].getrawtransaction(expire_transparent, 1)['expiryheight']
|
||||
print "Expiryheight of expire_shielded:", self.nodes[0].getrawtransaction(expire_shielded, 1)['expiryheight']
|
||||
assert(expire_transparent in self.nodes[0].getrawmempool())
|
||||
assert(expire_shielded in self.nodes[0].getrawmempool())
|
||||
blocks = self.nodes[2].generate(6)
|
||||
for block in blocks:
|
||||
blk = self.nodes[2].getblock(block)
|
||||
print "Height: {0}, Mined block txs: {1}".format(blk["height"], blk["tx"])
|
||||
print "Connect nodes to force a reorg"
|
||||
connect_nodes_bi(self.nodes, 0, 2)
|
||||
self.is_network_split = False
|
||||
sync_blocks(self.nodes)
|
||||
print "Ensure that expire_transparent & expire_shielded are in mempool at expiry block height"
|
||||
print "mempool node 0: ", self.nodes[0].getrawmempool()
|
||||
print "mempool node 2: ", self.nodes[2].getrawmempool()
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set())
|
||||
print "Ensure balance of node 0 is correct"
|
||||
bal = self.nodes[0].z_gettotalbalance()
|
||||
print "Balance after expire_shielded has expired: ", bal
|
||||
assert_equal(Decimal(bal["private"]), Decimal("7.9999"))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
MempoolTxExpiryTest().main()
|
||||
124
qa/rpc-tests/mempool_tx_input_limit.py
Executable file
124
qa/rpc-tests/mempool_tx_input_limit.py
Executable file
@@ -0,0 +1,124 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2017 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_node, connect_nodes, wait_and_assert_operationid_status
|
||||
|
||||
import time
|
||||
from decimal import Decimal
|
||||
|
||||
# Test -mempooltxinputlimit
|
||||
class MempoolTxInputLimitTest(BitcoinTestFramework):
|
||||
|
||||
alert_filename = None # Set by setup_network
|
||||
|
||||
def setup_network(self):
|
||||
args = ["-checkmempool", "-debug=mempool", "-mempooltxinputlimit=2"]
|
||||
self.nodes = []
|
||||
self.nodes.append(start_node(0, self.options.tmpdir, args))
|
||||
self.nodes.append(start_node(1, self.options.tmpdir, args))
|
||||
connect_nodes(self.nodes[1], 0)
|
||||
self.is_network_split = False
|
||||
self.sync_all
|
||||
|
||||
def setup_chain(self):
|
||||
print "Initializing test directory "+self.options.tmpdir
|
||||
initialize_chain_clean(self.options.tmpdir, 2)
|
||||
|
||||
def call_z_sendmany(self, from_addr, to_addr, amount):
|
||||
recipients = []
|
||||
recipients.append({"address": to_addr, "amount": amount})
|
||||
myopid = self.nodes[0].z_sendmany(from_addr, recipients)
|
||||
return wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
|
||||
def run_test(self):
|
||||
self.nodes[0].generate(100)
|
||||
self.sync_all()
|
||||
# Mine three blocks. After this, nodes[0] blocks
|
||||
# 1, 2, and 3 are spend-able.
|
||||
self.nodes[1].generate(3)
|
||||
self.sync_all()
|
||||
|
||||
# Check 1: z_sendmany is limited by -mempooltxinputlimit
|
||||
|
||||
# Add zaddr to node 0
|
||||
node0_zaddr = self.nodes[0].z_getnewaddress()
|
||||
|
||||
# Send three inputs from node 0 taddr to zaddr to get out of coinbase
|
||||
node0_taddr = self.nodes[0].getnewaddress();
|
||||
recipients = []
|
||||
recipients.append({"address":node0_zaddr, "amount":Decimal('30.0')-Decimal('0.0001')}) # utxo amount less fee
|
||||
myopid = self.nodes[0].z_sendmany(node0_taddr, recipients)
|
||||
|
||||
opids = []
|
||||
opids.append(myopid)
|
||||
|
||||
# Spend should fail due to -mempooltxinputlimit
|
||||
timeout = 120
|
||||
status = None
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[0].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
msg = results[0]["error"]["message"]
|
||||
assert_equal("failed", status)
|
||||
assert_equal("Too many transparent inputs 3 > limit 2", msg)
|
||||
break
|
||||
|
||||
# Mempool should be empty.
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set())
|
||||
|
||||
# Reduce amount to only use two inputs
|
||||
spend_zaddr_amount = Decimal('20.0') - Decimal('0.0001')
|
||||
spend_zaddr_id = self.call_z_sendmany(node0_taddr, node0_zaddr, spend_zaddr_amount) # utxo amount less fee
|
||||
self.sync_all()
|
||||
|
||||
# Spend should be in the mempool
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set([ spend_zaddr_id ]))
|
||||
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# mempool should be empty.
|
||||
assert_equal(set(self.nodes[0].getrawmempool()), set())
|
||||
|
||||
# Check 2: sendfrom is limited by -mempooltxinputlimit
|
||||
recipients = []
|
||||
spend_taddr_amount = spend_zaddr_amount - Decimal('0.0001')
|
||||
spend_taddr_output = Decimal('8')
|
||||
|
||||
# Create three outputs
|
||||
recipients.append({"address":self.nodes[1].getnewaddress(), "amount": spend_taddr_output})
|
||||
recipients.append({"address":self.nodes[1].getnewaddress(), "amount": spend_taddr_output})
|
||||
recipients.append({"address":self.nodes[1].getnewaddress(), "amount": spend_taddr_amount - spend_taddr_output - spend_taddr_output})
|
||||
|
||||
myopid = self.nodes[0].z_sendmany(node0_zaddr, recipients)
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Should use three UTXOs and fail
|
||||
try:
|
||||
self.nodes[1].sendfrom("", node0_taddr, spend_taddr_amount - Decimal('1'))
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
msg = e.error['message']
|
||||
assert_equal("Too many transparent inputs 3 > limit 2", msg)
|
||||
|
||||
# mempool should be empty.
|
||||
assert_equal(set(self.nodes[1].getrawmempool()), set())
|
||||
|
||||
# Should use two UTXOs and succeed
|
||||
spend_taddr_id2 = self.nodes[1].sendfrom("", node0_taddr, spend_taddr_output + spend_taddr_output - Decimal('1'))
|
||||
|
||||
# Spend should be in the mempool
|
||||
assert_equal(set(self.nodes[1].getrawmempool()), set([ spend_taddr_id2 ]))
|
||||
|
||||
if __name__ == '__main__':
|
||||
MempoolTxInputLimitTest().main()
|
||||
@@ -8,9 +8,10 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
import os
|
||||
import shutil
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, assert_raises, \
|
||||
initialize_chain_clean, start_node, connect_nodes
|
||||
|
||||
|
||||
class MerkleBlockTest(BitcoinTestFramework):
|
||||
|
||||
|
||||
@@ -8,13 +8,10 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
import base64
|
||||
from test_framework.util import assert_equal, connect_nodes_bi, p2p_port
|
||||
|
||||
import time
|
||||
|
||||
try:
|
||||
import http.client as httplib
|
||||
except ImportError:
|
||||
import httplib
|
||||
try:
|
||||
import urllib.parse as urlparse
|
||||
except ImportError:
|
||||
|
||||
116
qa/rpc-tests/overwinter_peer_management.py
Executable file
116
qa/rpc-tests/overwinter_peer_management.py
Executable file
@@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2018 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.mininode import NodeConn, NodeConnCB, NetworkThread, \
|
||||
msg_ping, MY_VERSION, OVERWINTER_PROTO_VERSION
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import initialize_chain_clean, start_nodes, \
|
||||
p2p_port, assert_equal
|
||||
|
||||
import time
|
||||
|
||||
#
|
||||
# In this test we connect Sprout and Overwinter mininodes to a Zcashd node
|
||||
# which will activate Overwinter at block 10.
|
||||
#
|
||||
# We test:
|
||||
# 1. the mininodes stay connected to Zcash with Sprout consensus rules
|
||||
# 2. when Overwinter activates, the Sprout mininodes are dropped
|
||||
# 3. new Overwinter nodes can connect to Zcash
|
||||
# 4. new Sprout nodes cannot connect to Zcash
|
||||
#
|
||||
# This test *does not* verify that prior to Overwinter activation, the Zcashd
|
||||
# node will prefer connections with Overwinter nodes, with an eviction process
|
||||
# that prioritizes Sprout connections.
|
||||
#
|
||||
|
||||
|
||||
class TestManager(NodeConnCB):
|
||||
def __init__(self):
|
||||
NodeConnCB.__init__(self)
|
||||
self.create_callback_map()
|
||||
|
||||
def on_close(self, conn):
|
||||
pass
|
||||
|
||||
def on_reject(self, conn, message):
|
||||
conn.rejectMessage = message
|
||||
|
||||
|
||||
class OverwinterPeerManagementTest(BitcoinTestFramework):
|
||||
|
||||
def setup_chain(self):
|
||||
print "Initializing test directory "+self.options.tmpdir
|
||||
initialize_chain_clean(self.options.tmpdir, 1)
|
||||
|
||||
def setup_network(self):
|
||||
self.nodes = start_nodes(1, self.options.tmpdir,
|
||||
extra_args=[['-nuparams=5ba81b19:10', '-debug', '-whitelist=127.0.0.1']])
|
||||
|
||||
def run_test(self):
|
||||
test = TestManager()
|
||||
|
||||
# Launch 10 Sprout and 10 Overwinter mininodes
|
||||
nodes = []
|
||||
for x in xrange(10):
|
||||
nodes.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test, "regtest", False))
|
||||
nodes.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test, "regtest", True))
|
||||
|
||||
# Start up network handling in another thread
|
||||
NetworkThread().start()
|
||||
|
||||
# Sprout consensus rules apply at block height 9
|
||||
self.nodes[0].generate(9)
|
||||
assert_equal(9, self.nodes[0].getblockcount())
|
||||
|
||||
# Verify mininodes are still connected to zcashd node
|
||||
peerinfo = self.nodes[0].getpeerinfo()
|
||||
versions = [x["version"] for x in peerinfo]
|
||||
assert_equal(10, versions.count(MY_VERSION))
|
||||
assert_equal(10, versions.count(OVERWINTER_PROTO_VERSION))
|
||||
|
||||
# Overwinter consensus rules activate at block height 10
|
||||
self.nodes[0].generate(1)
|
||||
assert_equal(10, self.nodes[0].getblockcount())
|
||||
|
||||
# Mininodes send ping message to zcashd node.
|
||||
pingCounter = 1
|
||||
for node in nodes:
|
||||
node.send_message(msg_ping(pingCounter))
|
||||
pingCounter = pingCounter + 1
|
||||
|
||||
time.sleep(3)
|
||||
|
||||
# Verify Sprout mininodes have been dropped and Overwinter mininodes are still connected.
|
||||
peerinfo = self.nodes[0].getpeerinfo()
|
||||
versions = [x["version"] for x in peerinfo]
|
||||
assert_equal(0, versions.count(MY_VERSION))
|
||||
assert_equal(10, versions.count(OVERWINTER_PROTO_VERSION))
|
||||
|
||||
# Extend the Overwinter chain with another block.
|
||||
self.nodes[0].generate(1)
|
||||
|
||||
# Connect a new Overwinter mininode to the zcashd node, which is accepted.
|
||||
nodes.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test, "regtest", True))
|
||||
time.sleep(3)
|
||||
assert_equal(11, len(self.nodes[0].getpeerinfo()))
|
||||
|
||||
# Try to connect a new Sprout mininode to the zcashd node, which is rejected.
|
||||
sprout = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test, "regtest", False)
|
||||
nodes.append(sprout)
|
||||
time.sleep(3)
|
||||
assert("Version must be 170003 or greater" in str(sprout.rejectMessage))
|
||||
|
||||
# Verify that only Overwinter mininodes are connected.
|
||||
peerinfo = self.nodes[0].getpeerinfo()
|
||||
versions = [x["version"] for x in peerinfo]
|
||||
assert_equal(0, versions.count(MY_VERSION))
|
||||
assert_equal(11, versions.count(OVERWINTER_PROTO_VERSION))
|
||||
|
||||
for node in nodes:
|
||||
node.disconnect_node()
|
||||
|
||||
if __name__ == '__main__':
|
||||
OverwinterPeerManagementTest().main()
|
||||
@@ -4,12 +4,17 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
#
|
||||
|
||||
from test_framework.mininode import *
|
||||
from test_framework.mininode import CBlockHeader, CInv, NodeConn, NodeConnCB, \
|
||||
NetworkThread, msg_block, msg_headers, msg_inv, msg_ping, msg_pong, \
|
||||
mininode_lock
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
import time
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_node, p2p_port
|
||||
from test_framework.blocktools import create_block, create_coinbase
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
'''
|
||||
AcceptBlockTest -- test processing of unrequested blocks.
|
||||
|
||||
|
||||
215
qa/rpc-tests/paymentdisclosure.py
Executable file
215
qa/rpc-tests/paymentdisclosure.py
Executable file
@@ -0,0 +1,215 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2017 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_node, connect_nodes_bi, wait_and_assert_operationid_status
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
class PaymentDisclosureTest (BitcoinTestFramework):
|
||||
|
||||
def setup_chain(self):
|
||||
print("Initializing test directory "+self.options.tmpdir)
|
||||
initialize_chain_clean(self.options.tmpdir, 4)
|
||||
|
||||
def setup_network(self, split=False):
|
||||
args = ['-debug=zrpcunsafe,paymentdisclosure', '-experimentalfeatures', '-paymentdisclosure', '-txindex=1']
|
||||
self.nodes = []
|
||||
self.nodes.append(start_node(0, self.options.tmpdir, args))
|
||||
self.nodes.append(start_node(1, self.options.tmpdir, args))
|
||||
# node 2 does not enable payment disclosure
|
||||
args2 = ['-debug=zrpcunsafe', '-experimentalfeatures', '-txindex=1']
|
||||
self.nodes.append(start_node(2, self.options.tmpdir, args2))
|
||||
connect_nodes_bi(self.nodes,0,1)
|
||||
connect_nodes_bi(self.nodes,1,2)
|
||||
connect_nodes_bi(self.nodes,0,2)
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
def run_test (self):
|
||||
print "Mining blocks..."
|
||||
|
||||
self.nodes[0].generate(4)
|
||||
walletinfo = self.nodes[0].getwalletinfo()
|
||||
assert_equal(walletinfo['immature_balance'], 40)
|
||||
assert_equal(walletinfo['balance'], 0)
|
||||
self.sync_all()
|
||||
self.nodes[2].generate(3)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(101)
|
||||
self.sync_all()
|
||||
assert_equal(self.nodes[0].getbalance(), 40)
|
||||
assert_equal(self.nodes[1].getbalance(), 10)
|
||||
assert_equal(self.nodes[2].getbalance(), 30)
|
||||
|
||||
mytaddr = self.nodes[0].getnewaddress()
|
||||
myzaddr = self.nodes[0].z_getnewaddress()
|
||||
|
||||
# Check that Node 2 has payment disclosure disabled.
|
||||
try:
|
||||
self.nodes[2].z_getpaymentdisclosure("invalidtxid", 0, 0)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("payment disclosure is disabled" in errorString)
|
||||
|
||||
# Check that Node 0 returns an error for an unknown txid
|
||||
try:
|
||||
self.nodes[0].z_getpaymentdisclosure("invalidtxid", 0, 0)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("No information available about transaction" in errorString)
|
||||
|
||||
# Shield coinbase utxos from node 0 of value 40, standard fee of 0.00010000
|
||||
recipients = [{"address":myzaddr, "amount":Decimal('40.0')-Decimal('0.0001')}]
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
|
||||
txid = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
|
||||
# Check the tx has joinsplits
|
||||
assert( len(self.nodes[0].getrawtransaction("" + txid, 1)["vjoinsplit"]) > 0 )
|
||||
|
||||
# Sync mempools
|
||||
self.sync_all()
|
||||
|
||||
# Confirm that you can't create a payment disclosure for an unconfirmed tx
|
||||
try:
|
||||
self.nodes[0].z_getpaymentdisclosure(txid, 0, 0)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Transaction has not been confirmed yet" in errorString)
|
||||
|
||||
try:
|
||||
self.nodes[1].z_getpaymentdisclosure(txid, 0, 0)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Transaction has not been confirmed yet" in errorString)
|
||||
|
||||
# Mine tx
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Confirm that Node 1 cannot create a payment disclosure for a transaction which does not impact its wallet
|
||||
try:
|
||||
self.nodes[1].z_getpaymentdisclosure(txid, 0, 0)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Transaction does not belong to the wallet" in errorString)
|
||||
|
||||
# Check that an invalid joinsplit index is rejected
|
||||
try:
|
||||
self.nodes[0].z_getpaymentdisclosure(txid, 1, 0)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Invalid js_index" in errorString)
|
||||
|
||||
try:
|
||||
self.nodes[0].z_getpaymentdisclosure(txid, -1, 0)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Invalid js_index" in errorString)
|
||||
|
||||
# Check that an invalid output index is rejected
|
||||
try:
|
||||
self.nodes[0].z_getpaymentdisclosure(txid, 0, 2)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Invalid output_index" in errorString)
|
||||
|
||||
try:
|
||||
self.nodes[0].z_getpaymentdisclosure(txid, 0, -1)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Invalid output_index" in errorString)
|
||||
|
||||
# Ask Node 0 to create and validate a payment disclosure for output 0
|
||||
message = "Here is proof of my payment!"
|
||||
pd = self.nodes[0].z_getpaymentdisclosure(txid, 0, 0, message)
|
||||
result = self.nodes[0].z_validatepaymentdisclosure(pd)
|
||||
assert(result["valid"])
|
||||
output_value_sum = Decimal(result["value"])
|
||||
|
||||
# Ask Node 1 to confirm the payment disclosure is valid
|
||||
result = self.nodes[1].z_validatepaymentdisclosure(pd)
|
||||
assert(result["valid"])
|
||||
assert_equal(result["message"], message)
|
||||
assert_equal(result["value"], output_value_sum)
|
||||
|
||||
# Confirm that payment disclosure begins with prefix zpd:
|
||||
assert(pd.startswith("zpd:"))
|
||||
|
||||
# Confirm that payment disclosure without prefix zpd: fails validation
|
||||
try:
|
||||
self.nodes[1].z_validatepaymentdisclosure(pd[4:])
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("payment disclosure prefix not found" in errorString)
|
||||
|
||||
# Check that total value of output index 0 and index 1 should equal shielding amount of 40 less standard fee.
|
||||
pd = self.nodes[0].z_getpaymentdisclosure(txid, 0, 1)
|
||||
result = self.nodes[0].z_validatepaymentdisclosure(pd)
|
||||
output_value_sum += Decimal(result["value"])
|
||||
assert_equal(output_value_sum, Decimal('39.99990000'))
|
||||
|
||||
# Create a z->z transaction, sending shielded funds from node 0 to node 1
|
||||
node1zaddr = self.nodes[1].z_getnewaddress()
|
||||
recipients = [{"address":node1zaddr, "amount":Decimal('1')}]
|
||||
myopid = self.nodes[0].z_sendmany(myzaddr, recipients)
|
||||
txid = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
self.sync_all()
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Confirm that Node 0 can create a valid payment disclosure
|
||||
pd = self.nodes[0].z_getpaymentdisclosure(txid, 0, 0, "a message of your choice")
|
||||
result = self.nodes[0].z_validatepaymentdisclosure(pd)
|
||||
assert(result["valid"])
|
||||
|
||||
# Confirm that Node 1, even as recipient of shielded funds, cannot create a payment disclosure
|
||||
# as the transaction was created by Node 0 and Node 1's payment disclosure database does not
|
||||
# contain the necessary data to do so, where the data would only have been available on Node 0
|
||||
# when executing z_shieldcoinbase.
|
||||
try:
|
||||
self.nodes[1].z_getpaymentdisclosure(txid, 0, 0)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Could not find payment disclosure info for the given joinsplit output" in errorString)
|
||||
|
||||
# Payment disclosures cannot be created for transparent transactions.
|
||||
txid = self.nodes[2].sendtoaddress(mytaddr, 1.0)
|
||||
self.sync_all()
|
||||
|
||||
# No matter the type of transaction, if it has not been confirmed, it is ignored.
|
||||
try:
|
||||
self.nodes[0].z_getpaymentdisclosure(txid, 0, 0)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Transaction has not been confirmed yet" in errorString)
|
||||
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Confirm that a payment disclosure can only be generated for a shielded transaction.
|
||||
try:
|
||||
self.nodes[0].z_getpaymentdisclosure(txid, 0, 0)
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Transaction is not a shielded transaction" in errorString)
|
||||
|
||||
if __name__ == '__main__':
|
||||
PaymentDisclosureTest().main()
|
||||
@@ -4,10 +4,13 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from time import *
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_node, connect_nodes
|
||||
from test_framework.mininode import COIN
|
||||
|
||||
import time
|
||||
|
||||
|
||||
class PrioritiseTransactionTest (BitcoinTestFramework):
|
||||
|
||||
def setup_chain(self):
|
||||
@@ -23,34 +26,6 @@ class PrioritiseTransactionTest (BitcoinTestFramework):
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
# Returns txid if operation was a success or None
|
||||
def wait_and_assert_operationid_status(self, myopid, in_status='success', in_errormsg=None):
|
||||
print('waiting for async operation {}'.format(myopid))
|
||||
opids = []
|
||||
opids.append(myopid)
|
||||
timeout = 300
|
||||
status = None
|
||||
errormsg = None
|
||||
txid = None
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[0].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
if status == "failed":
|
||||
errormsg = results[0]['error']['message']
|
||||
elif status == "success":
|
||||
txid = results[0]['result']['txid']
|
||||
break
|
||||
print('...returned status: {}'.format(status))
|
||||
assert_equal(in_status, status)
|
||||
if errormsg is not None:
|
||||
assert(in_errormsg is not None)
|
||||
assert_equal(in_errormsg in errormsg, True)
|
||||
print('...returned error: {}'.format(errormsg))
|
||||
return txid
|
||||
|
||||
def run_test (self):
|
||||
# tx priority is calculated: priority = sum(input_value_in_base_units * input_age)/size_in_bytes
|
||||
|
||||
@@ -67,7 +42,7 @@ class PrioritiseTransactionTest (BitcoinTestFramework):
|
||||
self.sync_all()
|
||||
|
||||
# Create tx of lower value to be prioritized on node 0
|
||||
# Older transactions get mined first, so this lower value, newer tx is unlikely to be mined without prioritization
|
||||
# Older transactions get mined first, so this lower value, newer tx is unlikely to be mined without prioritisation
|
||||
priority_tx_0 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
|
||||
|
||||
# Check that priority_tx_0 is not in block_template() prior to prioritisation
|
||||
@@ -79,19 +54,50 @@ class PrioritiseTransactionTest (BitcoinTestFramework):
|
||||
break
|
||||
assert_equal(in_block_template, False)
|
||||
|
||||
priority_result = self.nodes[0].prioritisetransaction(priority_tx_0, 1000, int(3 * base_fee * COIN))
|
||||
priority_success = self.nodes[0].prioritisetransaction(priority_tx_0, 1000, int(3 * base_fee * COIN))
|
||||
assert(priority_success)
|
||||
|
||||
# Check that prioritized transaction is in getblocktemplate()
|
||||
# Check that prioritized transaction is not in getblocktemplate()
|
||||
# (not updated because no new txns)
|
||||
in_block_template = False
|
||||
block_template = self.nodes[0].getblocktemplate()
|
||||
for tx in block_template['transactions']:
|
||||
if tx['hash'] == priority_tx_0:
|
||||
in_block_template = True
|
||||
break
|
||||
# NOTE: getblocktemplate() should return prioritized transaction, but is not
|
||||
# Noted by user in issue #1884
|
||||
assert_equal(in_block_template, False)
|
||||
|
||||
# Sending a new transaction will make getblocktemplate refresh within 10s
|
||||
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
|
||||
|
||||
# Check that prioritized transaction is not in getblocktemplate()
|
||||
# (too soon)
|
||||
in_block_template = False
|
||||
block_template = self.nodes[0].getblocktemplate()
|
||||
for tx in block_template['transactions']:
|
||||
if tx['hash'] == priority_tx_0:
|
||||
in_block_template = True
|
||||
break
|
||||
assert_equal(in_block_template, False)
|
||||
|
||||
# Check that prioritized transaction is in getblocktemplate()
|
||||
# getblocktemplate() will refresh after 1 min, or after 10 sec if new transaction is added to mempool
|
||||
# Mempool is probed every 10 seconds. We'll give getblocktemplate() a maximum of 30 seconds to refresh
|
||||
block_template = self.nodes[0].getblocktemplate()
|
||||
start = time.time();
|
||||
in_block_template = False
|
||||
while in_block_template == False:
|
||||
for tx in block_template['transactions']:
|
||||
if tx['hash'] == priority_tx_0:
|
||||
in_block_template = True
|
||||
break
|
||||
if time.time() - start > 30:
|
||||
raise AssertionError("Test timed out because prioritised transaction was not returned by getblocktemplate within 30 seconds.")
|
||||
time.sleep(1)
|
||||
block_template = self.nodes[0].getblocktemplate()
|
||||
|
||||
assert(in_block_template)
|
||||
|
||||
# Node 1 doesn't get the next block, so this *shouldn't* be mined despite being prioritized on node 1
|
||||
priority_tx_1 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 0.1)
|
||||
self.nodes[1].prioritisetransaction(priority_tx_1, 1000, int(3 * base_fee * COIN))
|
||||
|
||||
117
qa/rpc-tests/proton_test.py
Executable file
117
qa/rpc-tests/proton_test.py
Executable file
@@ -0,0 +1,117 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2017 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#
|
||||
# Test Proton interface (provides AMQP 1.0 messaging support).
|
||||
#
|
||||
# Requirements:
|
||||
# Python library for Qpid Proton:
|
||||
# https://pypi.python.org/pypi/python-qpid-proton
|
||||
# To install:
|
||||
# pip install python-qpid-proton
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import assert_equal, bytes_to_hex_str, \
|
||||
start_nodes
|
||||
|
||||
from proton.handlers import MessagingHandler
|
||||
from proton.reactor import Container
|
||||
|
||||
import threading
|
||||
|
||||
|
||||
class Server(MessagingHandler):
|
||||
|
||||
def __init__(self, url, limit):
|
||||
super(Server, self).__init__()
|
||||
self.url = url
|
||||
self.counter = limit
|
||||
self.blockhashes = []
|
||||
self.txids = []
|
||||
self.blockseq = -1
|
||||
self.txidseq = -1
|
||||
|
||||
def on_start(self, event):
|
||||
print "Proton listening on:", self.url
|
||||
self.container = event.container
|
||||
self.acceptor = event.container.listen(self.url)
|
||||
|
||||
def on_message(self, event):
|
||||
m = event.message
|
||||
hash = bytes_to_hex_str(m.body)
|
||||
sequence = m.properties['x-opt-sequence-number']
|
||||
if m.subject == "hashtx":
|
||||
self.txids.append(hash)
|
||||
|
||||
# Test that sequence id is incrementing
|
||||
assert(sequence == 1 + self.txidseq)
|
||||
self.txidseq = sequence
|
||||
elif m.subject == "hashblock":
|
||||
self.blockhashes.append(hash)
|
||||
|
||||
# Test that sequence id is incrementing
|
||||
assert(sequence == 1 + self.blockseq)
|
||||
self.blockseq = sequence
|
||||
|
||||
self.counter = self.counter - 1
|
||||
if self.counter == 0:
|
||||
self.container.stop()
|
||||
|
||||
|
||||
class ProtonTest (BitcoinTestFramework):
|
||||
|
||||
port = 25672
|
||||
numblocks = 10 # must be even, as two nodes generate equal number
|
||||
assert(numblocks % 2 == 0)
|
||||
|
||||
def setup_nodes(self):
|
||||
|
||||
# Launch proton server in background thread
|
||||
# It terminates after receiving numblocks * 2 messages (one for coinbase, one for block)
|
||||
self.server = Server("127.0.0.1:%i" % self.port, self.numblocks * 2)
|
||||
self.container = Container(self.server)
|
||||
self.t1 = threading.Thread(target=self.container.run)
|
||||
self.t1.start()
|
||||
|
||||
return start_nodes(4, self.options.tmpdir, extra_args=[
|
||||
['-experimentalfeatures', '-debug=amqp', '-amqppubhashtx=amqp://127.0.0.1:'+str(self.port),
|
||||
'-amqppubhashblock=amqp://127.0.0.1:'+str(self.port)],
|
||||
[],
|
||||
[],
|
||||
[]
|
||||
])
|
||||
|
||||
def run_test(self):
|
||||
self.sync_all()
|
||||
baseheight = self.nodes[0].getblockcount() # 200 blocks already mined
|
||||
|
||||
# generate some blocks
|
||||
self.nodes[0].generate(self.numblocks/2)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(self.numblocks/2)
|
||||
self.sync_all()
|
||||
|
||||
# wait for server to finish
|
||||
self.t1.join()
|
||||
|
||||
# sequence numbers have already been checked in the server's message handler
|
||||
|
||||
# sanity check that we have the right number of block hashes and coinbase txids
|
||||
assert_equal(len(self.server.blockhashes), self.numblocks)
|
||||
assert_equal(len(self.server.txids), self.numblocks)
|
||||
|
||||
# verify that each block has the correct coinbase txid
|
||||
for i in xrange(0, self.numblocks):
|
||||
height = baseheight + i + 1
|
||||
blockhash = self.nodes[0].getblockhash(height)
|
||||
assert_equal(blockhash, self.server.blockhashes[i])
|
||||
resp = self.nodes[0].getblock(blockhash)
|
||||
coinbase = resp["tx"][0]
|
||||
assert_equal(coinbase, self.server.txids[i])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
ProtonTest().main()
|
||||
@@ -2,14 +2,14 @@
|
||||
# Copyright (c) 2015 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
import socket
|
||||
import traceback, sys
|
||||
from binascii import hexlify
|
||||
import time, os
|
||||
|
||||
from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import assert_equal, start_nodes
|
||||
|
||||
import socket
|
||||
import os
|
||||
|
||||
'''
|
||||
Test plan:
|
||||
- Start bitcoind's with different proxy configurations
|
||||
|
||||
@@ -12,8 +12,12 @@
|
||||
# ********
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import initialize_chain_clean, start_node, \
|
||||
connect_nodes, stop_node, sync_blocks
|
||||
|
||||
import os.path
|
||||
import time
|
||||
|
||||
def calc_usage(blockdir):
|
||||
return sum(os.path.getsize(blockdir+f) for f in os.listdir(blockdir) if os.path.isfile(blockdir+f))/(1024*1024)
|
||||
@@ -190,7 +194,7 @@ class PruneTest(BitcoinTestFramework):
|
||||
try:
|
||||
self.nodes[2].getblock(self.forkhash)
|
||||
raise AssertionError("Old block wasn't pruned so can't test redownload")
|
||||
except JSONRPCException as e:
|
||||
except JSONRPCException:
|
||||
print "Will need to redownload block",self.forkheight
|
||||
|
||||
# Verify that we have enough history to reorg back to the fork point
|
||||
@@ -253,7 +257,7 @@ class PruneTest(BitcoinTestFramework):
|
||||
newtx = newtx + rawtx[94:]
|
||||
# Appears to be ever so slightly faster to sign with SIGHASH_NONE
|
||||
signresult = node.signrawtransaction(newtx,None,None,"NONE")
|
||||
txid = node.sendrawtransaction(signresult["hex"], True)
|
||||
node.sendrawtransaction(signresult["hex"], True)
|
||||
# Mine a full sized block which will be these transactions we just created
|
||||
node.generate(1)
|
||||
|
||||
|
||||
@@ -9,9 +9,11 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from pprint import pprint
|
||||
from time import sleep
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_nodes, connect_nodes_bi
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
# Create one-input, one-output, no-fee transaction:
|
||||
class RawTransactionsTest(BitcoinTestFramework):
|
||||
@@ -104,18 +106,20 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
|
||||
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])
|
||||
mSigObjValid = self.nodes[2].validateaddress(mSigObj)
|
||||
assert_equal(mSigObjValid['isvalid'], True)
|
||||
|
||||
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2);
|
||||
decTx = self.nodes[0].gettransaction(txId)
|
||||
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
|
||||
sPK = rawTx['vout'][0]['scriptPubKey']['hex']
|
||||
[sPK] # hush pyflakes
|
||||
self.sync_all()
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
#THIS IS A INCOMPLETE FEATURE
|
||||
#NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
|
||||
assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
|
||||
# THIS IS A INCOMPLETE FEATURE
|
||||
# NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
|
||||
assert_equal(self.nodes[2].getbalance(), bal) # for now, assume the funds of a 2of3 multisig tx are not marked as spendable
|
||||
|
||||
txDetails = self.nodes[0].gettransaction(txId, True)
|
||||
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
|
||||
@@ -130,10 +134,10 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
outputs = { self.nodes[0].getnewaddress() : 2.199 }
|
||||
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
|
||||
rawTxPartialSigned = self.nodes[1].signrawtransaction(rawTx, inputs)
|
||||
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
|
||||
|
||||
assert_equal(rawTxPartialSigned['complete'], False) # node1 only has one key, can't comp. sign the tx
|
||||
|
||||
rawTxSigned = self.nodes[2].signrawtransaction(rawTx, inputs)
|
||||
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
|
||||
assert_equal(rawTxSigned['complete'], True) # node2 can sign the tx compl., own two of three keys
|
||||
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
|
||||
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
|
||||
self.sync_all()
|
||||
|
||||
@@ -6,15 +6,14 @@
|
||||
# Exercise the listreceivedbyaddress API
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
def get_sub_array_from_array(object_array, to_match):
|
||||
'''
|
||||
Finds and returns a sub array from an array of arrays.
|
||||
to_match should be a unique idetifier of a sub array
|
||||
Finds and returns a sub array from an array of arrays.
|
||||
to_match should be a unique idetifier of a sub array
|
||||
'''
|
||||
num_matched = 0
|
||||
for item in object_array:
|
||||
all_match = True
|
||||
for key,value in to_match.items():
|
||||
@@ -26,12 +25,12 @@ def get_sub_array_from_array(object_array, to_match):
|
||||
return []
|
||||
|
||||
def check_array_result(object_array, to_match, expected, should_not_find = False):
|
||||
"""
|
||||
Pass in array of JSON objects, a dictionary with key/value pairs
|
||||
to match against, and another dictionary with expected key/value
|
||||
pairs.
|
||||
If the should_not_find flag is true, to_match should not be found in object_array
|
||||
"""
|
||||
'''
|
||||
Pass in array of JSON objects, a dictionary with key/value pairs
|
||||
to match against, and another dictionary with expected key/value
|
||||
pairs.
|
||||
If the should_not_find flag is true, to_match should not be found in object_array
|
||||
'''
|
||||
if should_not_find == True:
|
||||
expected = { }
|
||||
num_matched = 0
|
||||
@@ -62,49 +61,49 @@ class ReceivedByTest(BitcoinTestFramework):
|
||||
txid = self.nodes[0].sendtoaddress(addr, 0.1)
|
||||
self.sync_all()
|
||||
|
||||
#Check not listed in listreceivedbyaddress because has 0 confirmations
|
||||
# Check not listed in listreceivedbyaddress because has 0 confirmations
|
||||
check_array_result(self.nodes[1].listreceivedbyaddress(),
|
||||
{"address":addr},
|
||||
{ },
|
||||
True)
|
||||
#Bury Tx under 10 block so it will be returned by listreceivedbyaddress
|
||||
# Bury Tx under 10 block so it will be returned by listreceivedbyaddress
|
||||
self.nodes[1].generate(10)
|
||||
self.sync_all()
|
||||
check_array_result(self.nodes[1].listreceivedbyaddress(),
|
||||
{"address":addr},
|
||||
{"address":addr, "account":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]})
|
||||
#With min confidence < 10
|
||||
# With min confidence < 10
|
||||
check_array_result(self.nodes[1].listreceivedbyaddress(5),
|
||||
{"address":addr},
|
||||
{"address":addr, "account":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]})
|
||||
#With min confidence > 10, should not find Tx
|
||||
# With min confidence > 10, should not find Tx
|
||||
check_array_result(self.nodes[1].listreceivedbyaddress(11),{"address":addr},{ },True)
|
||||
|
||||
#Empty Tx
|
||||
# Empty Tx
|
||||
addr = self.nodes[1].getnewaddress()
|
||||
check_array_result(self.nodes[1].listreceivedbyaddress(0,True),
|
||||
{"address":addr},
|
||||
{"address":addr, "account":"", "amount":0, "confirmations":0, "txids":[]})
|
||||
|
||||
'''
|
||||
getreceivedbyaddress Test
|
||||
getreceivedbyaddress Test
|
||||
'''
|
||||
# Send from node 0 to 1
|
||||
addr = self.nodes[1].getnewaddress()
|
||||
txid = self.nodes[0].sendtoaddress(addr, 0.1)
|
||||
self.sync_all()
|
||||
|
||||
#Check balance is 0 because of 0 confirmations
|
||||
# Check balance is 0 because of 0 confirmations
|
||||
balance = self.nodes[1].getreceivedbyaddress(addr)
|
||||
if balance != Decimal("0.0"):
|
||||
raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance))
|
||||
|
||||
#Check balance is 0.1
|
||||
# Check balance is 0.1
|
||||
balance = self.nodes[1].getreceivedbyaddress(addr,0)
|
||||
if balance != Decimal("0.1"):
|
||||
raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance))
|
||||
|
||||
#Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress
|
||||
# Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress
|
||||
self.nodes[1].generate(10)
|
||||
self.sync_all()
|
||||
balance = self.nodes[1].getreceivedbyaddress(addr)
|
||||
@@ -112,15 +111,15 @@ class ReceivedByTest(BitcoinTestFramework):
|
||||
raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance))
|
||||
|
||||
'''
|
||||
listreceivedbyaccount + getreceivedbyaccount Test
|
||||
listreceivedbyaccount + getreceivedbyaccount Test
|
||||
'''
|
||||
#set pre-state
|
||||
# set pre-state
|
||||
addrArr = self.nodes[1].getnewaddress()
|
||||
account = self.nodes[1].getaccount(addrArr)
|
||||
received_by_account_json = get_sub_array_from_array(self.nodes[1].listreceivedbyaccount(),{"account":account})
|
||||
if len(received_by_account_json) == 0:
|
||||
raise AssertionError("No accounts found in node")
|
||||
balance_by_account = rec_by_accountArr = self.nodes[1].getreceivedbyaccount(account)
|
||||
balance_by_account = self.nodes[1].getreceivedbyaccount(account)
|
||||
|
||||
txid = self.nodes[0].sendtoaddress(addr, 0.1)
|
||||
self.sync_all()
|
||||
@@ -147,7 +146,7 @@ class ReceivedByTest(BitcoinTestFramework):
|
||||
if balance != balance_by_account + Decimal("0.1"):
|
||||
raise AssertionError("Wrong balance returned by getreceivedbyaccount, %0.2f"%(balance))
|
||||
|
||||
#Create a new account named "mynewaccount" that has a 0 balance
|
||||
# Create a new account named "mynewaccount" that has a 0 balance
|
||||
self.nodes[1].getaccountaddress("mynewaccount")
|
||||
received_by_account_json = get_sub_array_from_array(self.nodes[1].listreceivedbyaccount(0,True),{"account":"mynewaccount"})
|
||||
if len(received_by_account_json) == 0:
|
||||
|
||||
@@ -6,9 +6,11 @@
|
||||
#
|
||||
# Test -reindex with CheckBlockIndex
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
import os.path
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_node, stop_node, wait_bitcoinds
|
||||
|
||||
|
||||
class ReindexTest(BitcoinTestFramework):
|
||||
|
||||
|
||||
84
qa/rpc-tests/reorg_limit.py
Executable file
84
qa/rpc-tests/reorg_limit.py
Executable file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2017 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#
|
||||
# Test reorg limit
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import (
|
||||
check_node,
|
||||
connect_nodes_bi,
|
||||
sync_blocks,
|
||||
)
|
||||
from time import sleep
|
||||
|
||||
def check_stopped(i, timeout=10):
|
||||
stopped = False
|
||||
for x in xrange(1, timeout):
|
||||
ret = check_node(i)
|
||||
if ret is None:
|
||||
sleep(1)
|
||||
else:
|
||||
stopped = True
|
||||
break
|
||||
return stopped
|
||||
|
||||
class ReorgLimitTest(BitcoinTestFramework):
|
||||
|
||||
def run_test(self):
|
||||
assert(self.nodes[0].getblockcount() == 200)
|
||||
assert(self.nodes[2].getblockcount() == 200)
|
||||
|
||||
self.split_network()
|
||||
|
||||
print "Test the maximum-allowed reorg:"
|
||||
print "Mine 99 blocks on Node 0"
|
||||
self.nodes[0].generate(99)
|
||||
assert(self.nodes[0].getblockcount() == 299)
|
||||
assert(self.nodes[2].getblockcount() == 200)
|
||||
|
||||
print "Mine competing 100 blocks on Node 2"
|
||||
self.nodes[2].generate(100)
|
||||
assert(self.nodes[0].getblockcount() == 299)
|
||||
assert(self.nodes[2].getblockcount() == 300)
|
||||
|
||||
print "Connect nodes to force a reorg"
|
||||
connect_nodes_bi(self.nodes, 0, 2)
|
||||
self.is_network_split = False
|
||||
sync_blocks(self.nodes)
|
||||
|
||||
print "Check Node 0 is still running and on the correct chain"
|
||||
assert(self.nodes[0].getblockcount() == 300)
|
||||
|
||||
self.split_network()
|
||||
|
||||
print "Test the minimum-rejected reorg:"
|
||||
print "Mine 100 blocks on Node 0"
|
||||
self.nodes[0].generate(100)
|
||||
assert(self.nodes[0].getblockcount() == 400)
|
||||
assert(self.nodes[2].getblockcount() == 300)
|
||||
|
||||
print "Mine competing 101 blocks on Node 2"
|
||||
self.nodes[2].generate(101)
|
||||
assert(self.nodes[0].getblockcount() == 400)
|
||||
assert(self.nodes[2].getblockcount() == 401)
|
||||
|
||||
print "Sync nodes to force a reorg"
|
||||
connect_nodes_bi(self.nodes, 0, 2)
|
||||
self.is_network_split = False
|
||||
# sync_blocks uses RPC calls to wait for nodes to be synced, so don't
|
||||
# call it here, because it will have a non-specific connection error
|
||||
# when Node 0 stops. Instead, we explicitly check for the process itself
|
||||
# to stop.
|
||||
|
||||
print "Check Node 0 is no longer running"
|
||||
assert(check_stopped(0))
|
||||
|
||||
# Dummy stop to enable the test to tear down
|
||||
self.nodes[0].stop = lambda: True
|
||||
|
||||
if __name__ == '__main__':
|
||||
ReorgLimitTest().main()
|
||||
@@ -7,14 +7,15 @@
|
||||
# Test REST interface
|
||||
#
|
||||
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from struct import *
|
||||
from test_framework.util import assert_equal, assert_greater_than, \
|
||||
initialize_chain_clean, start_nodes, connect_nodes_bi
|
||||
|
||||
import struct
|
||||
import binascii
|
||||
import json
|
||||
import StringIO
|
||||
import decimal
|
||||
from decimal import Decimal
|
||||
|
||||
try:
|
||||
import http.client as httplib
|
||||
@@ -28,11 +29,11 @@ except ImportError:
|
||||
def deser_uint256(f):
|
||||
r = 0
|
||||
for i in range(8):
|
||||
t = unpack(b"<I", f.read(4))[0]
|
||||
t = struct.unpack(b"<I", f.read(4))[0]
|
||||
r += t << (i * 32)
|
||||
return r
|
||||
|
||||
#allows simple http get calls
|
||||
# allows simple http get calls
|
||||
def http_get_call(host, port, path, response_object = 0):
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request('GET', path)
|
||||
@@ -42,7 +43,7 @@ def http_get_call(host, port, path, response_object = 0):
|
||||
|
||||
return conn.getresponse().read()
|
||||
|
||||
#allows simple http post calls with a request body
|
||||
# allows simple http post calls with a request body
|
||||
def http_post_call(host, port, path, requestdata = '', response_object = 0):
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request('POST', path, requestdata)
|
||||
@@ -84,7 +85,7 @@ class RESTTest (BitcoinTestFramework):
|
||||
self.sync_all()
|
||||
bb_hash = self.nodes[0].getbestblockhash()
|
||||
|
||||
assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) #balance now should be 0.1 on node 1
|
||||
assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) # balance now should be 0.1 on node 1
|
||||
|
||||
# load the latest 0.1 tx over the REST API
|
||||
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.FORMAT_SEPARATOR+"json")
|
||||
@@ -119,13 +120,13 @@ class RESTTest (BitcoinTestFramework):
|
||||
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
|
||||
json_obj = json.loads(json_string)
|
||||
|
||||
#check chainTip response
|
||||
# check chainTip response
|
||||
assert_equal(json_obj['chaintipHash'], bb_hash)
|
||||
|
||||
#make sure there is no utox in the response because this oupoint has been spent
|
||||
# make sure there is no utox in the response because this oupoint has been spent
|
||||
assert_equal(len(json_obj['utxos']), 0)
|
||||
|
||||
#check bitmap
|
||||
# check bitmap
|
||||
assert_equal(json_obj['bitmap'], "0")
|
||||
|
||||
|
||||
@@ -138,24 +139,24 @@ class RESTTest (BitcoinTestFramework):
|
||||
assert_equal(len(json_obj['utxos']), 1)
|
||||
assert_equal(json_obj['bitmap'], "10")
|
||||
|
||||
#test binary response
|
||||
# test binary response
|
||||
bb_hash = self.nodes[0].getbestblockhash()
|
||||
|
||||
binaryRequest = b'\x01\x02'
|
||||
binaryRequest += binascii.unhexlify(txid)
|
||||
binaryRequest += pack("i", n);
|
||||
binaryRequest += struct.pack("i", n);
|
||||
binaryRequest += binascii.unhexlify(vintx);
|
||||
binaryRequest += pack("i", 0);
|
||||
binaryRequest += struct.pack("i", 0);
|
||||
|
||||
bin_response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', binaryRequest)
|
||||
output = StringIO.StringIO()
|
||||
output.write(bin_response)
|
||||
output.seek(0)
|
||||
chainHeight = unpack("i", output.read(4))[0]
|
||||
chainHeight = struct.unpack("i", output.read(4))[0]
|
||||
hashFromBinResponse = hex(deser_uint256(output))[2:].zfill(65).rstrip("L")
|
||||
|
||||
assert_equal(bb_hash, hashFromBinResponse) #check if getutxo's chaintip during calculation was fine
|
||||
assert_equal(chainHeight, 102) #chain height must be 102
|
||||
assert_equal(bb_hash, hashFromBinResponse) # check if getutxo's chaintip during calculation was fine
|
||||
assert_equal(chainHeight, 102) # chain height must be 102
|
||||
|
||||
|
||||
############################
|
||||
@@ -176,41 +177,41 @@ class RESTTest (BitcoinTestFramework):
|
||||
json_request = '/'+txid+'-'+str(n)
|
||||
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
|
||||
json_obj = json.loads(json_string)
|
||||
assert_equal(len(json_obj['utxos']), 0) #there should be a outpoint because it has just added to the mempool
|
||||
assert_equal(len(json_obj['utxos']), 0) # there should be a outpoint because it has just added to the mempool
|
||||
|
||||
json_request = '/checkmempool/'+txid+'-'+str(n)
|
||||
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
|
||||
json_obj = json.loads(json_string)
|
||||
assert_equal(len(json_obj['utxos']), 1) #there should be a outpoint because it has just added to the mempool
|
||||
assert_equal(len(json_obj['utxos']), 1) # there should be a outpoint because it has just added to the mempool
|
||||
|
||||
#do some invalid requests
|
||||
# do some invalid requests
|
||||
json_request = '{"checkmempool'
|
||||
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'json', json_request, True)
|
||||
assert_equal(response.status, 500) #must be a 500 because we send a invalid json request
|
||||
assert_equal(response.status, 500) # must be a 500 because we send a invalid json request
|
||||
|
||||
json_request = '{"checkmempool'
|
||||
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', json_request, True)
|
||||
assert_equal(response.status, 500) #must be a 500 because we send a invalid bin request
|
||||
assert_equal(response.status, 500) # must be a 500 because we send a invalid bin request
|
||||
|
||||
response = http_post_call(url.hostname, url.port, '/rest/getutxos/checkmempool'+self.FORMAT_SEPARATOR+'bin', '', True)
|
||||
assert_equal(response.status, 500) #must be a 500 because we send a invalid bin request
|
||||
assert_equal(response.status, 500) # must be a 500 because we send a invalid bin request
|
||||
|
||||
#test limits
|
||||
# test limits
|
||||
json_request = '/checkmempool/'
|
||||
for x in range(0, 20):
|
||||
json_request += txid+'-'+str(n)+'/'
|
||||
json_request = json_request.rstrip("/")
|
||||
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
|
||||
assert_equal(response.status, 500) #must be a 500 because we exceeding the limits
|
||||
assert_equal(response.status, 500) # must be a 500 because we exceeding the limits
|
||||
|
||||
json_request = '/checkmempool/'
|
||||
for x in range(0, 15):
|
||||
json_request += txid+'-'+str(n)+'/'
|
||||
json_request = json_request.rstrip("/");
|
||||
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
|
||||
assert_equal(response.status, 200) #must be a 500 because we exceeding the limits
|
||||
assert_equal(response.status, 200) # must be a 500 because we exceeding the limits
|
||||
|
||||
self.nodes[0].generate(1) #generate block to not affect upcoming tests
|
||||
self.nodes[0].generate(1) # generate block to not affect upcoming tests
|
||||
self.sync_all()
|
||||
|
||||
################
|
||||
@@ -263,11 +264,11 @@ class RESTTest (BitcoinTestFramework):
|
||||
response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
|
||||
assert_equal(response_header_json.status, 200)
|
||||
response_header_json_str = response_header_json.read()
|
||||
json_obj = json.loads(response_header_json_str, parse_float=decimal.Decimal)
|
||||
assert_equal(len(json_obj), 1) #ensure that there is one header in the json response
|
||||
assert_equal(json_obj[0]['hash'], bb_hash) #request/response hash should be the same
|
||||
json_obj = json.loads(response_header_json_str, parse_float=Decimal)
|
||||
assert_equal(len(json_obj), 1) # ensure that there is one header in the json response
|
||||
assert_equal(json_obj[0]['hash'], bb_hash) # request/response hash should be the same
|
||||
|
||||
#compare with normal RPC block response
|
||||
# compare with normal RPC block response
|
||||
rpc_block_json = self.nodes[0].getblock(bb_hash)
|
||||
assert_equal(json_obj[0]['hash'], rpc_block_json['hash'])
|
||||
assert_equal(json_obj[0]['confirmations'], rpc_block_json['confirmations'])
|
||||
@@ -281,14 +282,14 @@ class RESTTest (BitcoinTestFramework):
|
||||
assert_equal(json_obj[0]['chainwork'], rpc_block_json['chainwork'])
|
||||
assert_equal(json_obj[0]['previousblockhash'], rpc_block_json['previousblockhash'])
|
||||
|
||||
#see if we can get 5 headers in one response
|
||||
# see if we can get 5 headers in one response
|
||||
self.nodes[1].generate(5)
|
||||
self.sync_all()
|
||||
response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/5/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
|
||||
assert_equal(response_header_json.status, 200)
|
||||
response_header_json_str = response_header_json.read()
|
||||
json_obj = json.loads(response_header_json_str)
|
||||
assert_equal(len(json_obj), 5) #now we should have 5 header objects
|
||||
assert_equal(len(json_obj), 5) # now we should have 5 header objects
|
||||
|
||||
# do tx test
|
||||
tx_hash = block_json_obj['tx'][0]['txid'];
|
||||
@@ -328,20 +329,20 @@ class RESTTest (BitcoinTestFramework):
|
||||
newblockhash = self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
#check if the 3 tx show up in the new block
|
||||
# check if the 3 tx show up in the new block
|
||||
json_string = http_get_call(url.hostname, url.port, '/rest/block/'+newblockhash[0]+self.FORMAT_SEPARATOR+'json')
|
||||
json_obj = json.loads(json_string)
|
||||
for tx in json_obj['tx']:
|
||||
if not 'coinbase' in tx['vin'][0]: #exclude coinbase
|
||||
if not 'coinbase' in tx['vin'][0]: # exclude coinbase
|
||||
assert_equal(tx['txid'] in txs, True)
|
||||
|
||||
#check the same but without tx details
|
||||
# check the same but without tx details
|
||||
json_string = http_get_call(url.hostname, url.port, '/rest/block/notxdetails/'+newblockhash[0]+self.FORMAT_SEPARATOR+'json')
|
||||
json_obj = json.loads(json_string)
|
||||
for tx in txs:
|
||||
assert_equal(tx in json_obj['tx'], True)
|
||||
|
||||
#test rest bestblock
|
||||
# test rest bestblock
|
||||
bb_hash = self.nodes[0].getbestblockhash()
|
||||
|
||||
json_string = http_get_call(url.hostname, url.port, '/rest/chaininfo.json')
|
||||
@@ -349,4 +350,4 @@ class RESTTest (BitcoinTestFramework):
|
||||
assert_equal(json_obj['bestblockhash'], bb_hash)
|
||||
|
||||
if __name__ == '__main__':
|
||||
RESTTest ().main ()
|
||||
RESTTest().main()
|
||||
|
||||
@@ -5,19 +5,20 @@
|
||||
|
||||
# Test for -rpcbind, as well as -rpcallowip and -rpcconnect
|
||||
|
||||
# Add python-bitcoinrpc to module search path:
|
||||
# Dependency: python-bitcoinrpc
|
||||
|
||||
from test_framework.util import assert_equal, check_json_precision, \
|
||||
initialize_chain, start_nodes, stop_nodes, wait_bitcoinds, \
|
||||
bitcoind_processes, rpc_port
|
||||
from test_framework.authproxy import AuthServiceProxy
|
||||
from test_framework.netutil import addr_to_hex, get_bind_addrs, all_interfaces
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import json
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import traceback
|
||||
|
||||
from test_framework.util import *
|
||||
from test_framework.netutil import *
|
||||
|
||||
def run_bind_test(tmpdir, allow_ips, connect_to, addresses, expected):
|
||||
'''
|
||||
Start a node with requested rpcallowip and rpcbind parameters,
|
||||
@@ -119,7 +120,6 @@ def main():
|
||||
check_json_precision()
|
||||
|
||||
success = False
|
||||
nodes = []
|
||||
try:
|
||||
print("Initializing test directory "+options.tmpdir)
|
||||
if not os.path.isdir(options.tmpdir):
|
||||
|
||||
@@ -20,13 +20,12 @@ NOTE: This test is very slow and may take more than 40 minutes to run.
|
||||
'''
|
||||
|
||||
from test_framework.test_framework import ComparisonTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.comptool import TestInstance, TestManager
|
||||
from test_framework.mininode import *
|
||||
from test_framework.blocktools import *
|
||||
from test_framework.script import *
|
||||
import logging
|
||||
import copy
|
||||
from test_framework.mininode import NetworkThread
|
||||
from test_framework.blocktools import create_block, create_coinbase, create_transaction
|
||||
from test_framework.script import CScript, CScriptOp, CScriptNum, OPCODES_BY_NAME
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
script_valid_file = "../../src/test/data/script_valid.json"
|
||||
@@ -54,9 +53,8 @@ class ScriptTestFile(object):
|
||||
|
||||
# Helper for parsing the flags specified in the .json files
|
||||
SCRIPT_VERIFY_NONE = 0
|
||||
SCRIPT_VERIFY_P2SH = 1
|
||||
SCRIPT_VERIFY_P2SH = 1
|
||||
SCRIPT_VERIFY_STRICTENC = 1 << 1
|
||||
SCRIPT_VERIFY_DERSIG = 1 << 2
|
||||
SCRIPT_VERIFY_LOW_S = 1 << 3
|
||||
SCRIPT_VERIFY_NULLDUMMY = 1 << 4
|
||||
SCRIPT_VERIFY_SIGPUSHONLY = 1 << 5
|
||||
@@ -64,12 +62,11 @@ SCRIPT_VERIFY_MINIMALDATA = 1 << 6
|
||||
SCRIPT_VERIFY_DISCOURAGE_UPGRADABLE_NOPS = 1 << 7
|
||||
SCRIPT_VERIFY_CLEANSTACK = 1 << 8
|
||||
|
||||
flag_map = {
|
||||
flag_map = {
|
||||
"": SCRIPT_VERIFY_NONE,
|
||||
"NONE": SCRIPT_VERIFY_NONE,
|
||||
"NONE": SCRIPT_VERIFY_NONE,
|
||||
"P2SH": SCRIPT_VERIFY_P2SH,
|
||||
"STRICTENC": SCRIPT_VERIFY_STRICTENC,
|
||||
"DERSIG": SCRIPT_VERIFY_DERSIG,
|
||||
"LOW_S": SCRIPT_VERIFY_LOW_S,
|
||||
"NULLDUMMY": SCRIPT_VERIFY_NULLDUMMY,
|
||||
"SIGPUSHONLY": SCRIPT_VERIFY_SIGPUSHONLY,
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_nodes
|
||||
|
||||
|
||||
class SignRawTransactionsTest(BitcoinTestFramework):
|
||||
|
||||
@@ -8,7 +8,11 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import start_node, connect_nodes, \
|
||||
sync_blocks, sync_mempools
|
||||
|
||||
import random
|
||||
from decimal import Decimal, ROUND_DOWN
|
||||
|
||||
# Construct 2 trivial P2SH's and the ScriptSigs that spend them
|
||||
# So we can create many many transactions without needing to spend
|
||||
|
||||
@@ -118,19 +118,18 @@ class AuthServiceProxy(object):
|
||||
try:
|
||||
self.__conn.request(method, path, postdata, headers)
|
||||
return self._get_response()
|
||||
except httplib.BadStatusLine as e:
|
||||
if e.line == "''": # if connection was closed, try again
|
||||
except Exception as e:
|
||||
# If connection was closed, try again.
|
||||
# Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset.
|
||||
# ConnectionResetError happens on FreeBSD with Python 3.4.
|
||||
# These classes don't exist in Python 2.x, so we can't refer to them directly.
|
||||
if ((isinstance(e, httplib.BadStatusLine) and e.line == "''")
|
||||
or e.__class__.__name__ in ('BrokenPipeError', 'ConnectionResetError')):
|
||||
self.__conn.close()
|
||||
self.__conn.request(method, path, postdata, headers)
|
||||
return self._get_response()
|
||||
else:
|
||||
raise
|
||||
except (BrokenPipeError,ConnectionResetError):
|
||||
# Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset
|
||||
# ConnectionResetError happens on FreeBSD with Python 3.4
|
||||
self.__conn.close()
|
||||
self.__conn.request(method, path, postdata, headers)
|
||||
return self._get_response()
|
||||
|
||||
def __call__(self, *args):
|
||||
AuthServiceProxy.__id_count += 1
|
||||
|
||||
@@ -3,14 +3,17 @@
|
||||
# and for constructing a getheaders message
|
||||
#
|
||||
|
||||
from mininode import *
|
||||
from mininode import CBlock, CBlockHeader, CBlockLocator, CTransaction, msg_block, msg_headers, msg_tx
|
||||
|
||||
import sys
|
||||
import cStringIO
|
||||
import dbm
|
||||
|
||||
class BlockStore(object):
|
||||
def __init__(self, datadir):
|
||||
self.blockDB = dbm.open(datadir + "/blocks", 'c')
|
||||
self.currentBlock = 0L
|
||||
|
||||
|
||||
def close(self):
|
||||
self.blockDB.close()
|
||||
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
#
|
||||
|
||||
from mininode import *
|
||||
from script import CScript, CScriptOp
|
||||
from mininode import CBlock, CTransaction, CTxIn, CTxOut, COutPoint
|
||||
from script import CScript, OP_0, OP_EQUAL, OP_HASH160
|
||||
|
||||
# Create a block (with regtest difficulty)
|
||||
def create_block(hashprev, coinbase, nTime=None):
|
||||
def create_block(hashprev, coinbase, nTime=None, nBits=None):
|
||||
block = CBlock()
|
||||
if nTime is None:
|
||||
import time
|
||||
@@ -16,7 +16,10 @@ def create_block(hashprev, coinbase, nTime=None):
|
||||
else:
|
||||
block.nTime = nTime
|
||||
block.hashPrevBlock = hashprev
|
||||
block.nBits = 0x207fffff # Will break after a difficulty adjustment...
|
||||
if nBits is None:
|
||||
block.nBits = 0x200f0f0f # Will break after a difficulty adjustment...
|
||||
else:
|
||||
block.nBits = nBits
|
||||
block.vtx.append(coinbase)
|
||||
block.hashMerkleRoot = block.calc_merkle_root()
|
||||
block.calc_sha256()
|
||||
@@ -43,14 +46,24 @@ def create_coinbase(heightAdjust = 0):
|
||||
global counter
|
||||
coinbase = CTransaction()
|
||||
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff),
|
||||
ser_string(serialize_script_num(counter+heightAdjust)), 0xffffffff))
|
||||
CScript([counter+heightAdjust, OP_0]), 0xffffffff))
|
||||
counter += 1
|
||||
coinbaseoutput = CTxOut()
|
||||
coinbaseoutput.nValue = 50*100000000
|
||||
coinbaseoutput.nValue = int(12.5*100000000)
|
||||
halvings = int((counter+heightAdjust)/150) # regtest
|
||||
coinbaseoutput.nValue >>= halvings
|
||||
coinbaseoutput.scriptPubKey = ""
|
||||
coinbase.vout = [ coinbaseoutput ]
|
||||
if halvings == 0: # regtest
|
||||
froutput = CTxOut()
|
||||
froutput.nValue = coinbaseoutput.nValue / 5
|
||||
# regtest
|
||||
fraddr = bytearray([0x67, 0x08, 0xe6, 0x67, 0x0d, 0xb0, 0xb9, 0x50,
|
||||
0xda, 0xc6, 0x80, 0x31, 0x02, 0x5c, 0xc5, 0xb6,
|
||||
0x32, 0x13, 0xa4, 0x91])
|
||||
froutput.scriptPubKey = CScript([OP_HASH160, fraddr, OP_EQUAL])
|
||||
coinbaseoutput.nValue -= froutput.nValue
|
||||
coinbase.vout = [ coinbaseoutput, froutput ]
|
||||
coinbase.calc_sha256()
|
||||
return coinbase
|
||||
|
||||
|
||||
@@ -4,10 +4,13 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
#
|
||||
|
||||
from mininode import *
|
||||
from mininode import CBlock, CTransaction, CInv, NodeConn, NodeConnCB, \
|
||||
msg_inv, msg_getheaders, msg_ping, msg_mempool, mininode_lock, MAX_INV_SZ
|
||||
from blockstore import BlockStore, TxStore
|
||||
from util import p2p_port
|
||||
|
||||
import time
|
||||
|
||||
'''
|
||||
This is a tool for comparing two or more bitcoinds to each other
|
||||
using a script provided.
|
||||
@@ -25,8 +28,6 @@ generator that returns TestInstance objects. See below for definition.
|
||||
# on_getheaders: provide headers via BlockStore
|
||||
# on_getdata: provide blocks via BlockStore
|
||||
|
||||
global mininode_lock
|
||||
|
||||
def wait_until(predicate, attempts=float('inf'), timeout=float('inf')):
|
||||
attempt = 0
|
||||
elapsed = 0
|
||||
|
||||
293
qa/rpc-tests/test_framework/equihash.py
Executable file
293
qa/rpc-tests/test_framework/equihash.py
Executable file
@@ -0,0 +1,293 @@
|
||||
from operator import itemgetter
|
||||
import struct
|
||||
|
||||
DEBUG = False
|
||||
VERBOSE = False
|
||||
|
||||
|
||||
word_size = 32
|
||||
word_mask = (1<<word_size)-1
|
||||
|
||||
def expand_array(inp, out_len, bit_len, byte_pad=0):
|
||||
assert bit_len >= 8 and word_size >= 7+bit_len
|
||||
bit_len_mask = (1<<bit_len)-1
|
||||
|
||||
out_width = (bit_len+7)/8 + byte_pad
|
||||
assert out_len == 8*out_width*len(inp)/bit_len
|
||||
out = bytearray(out_len)
|
||||
|
||||
bit_len_mask = (1 << bit_len) - 1
|
||||
|
||||
# The acc_bits least-significant bits of acc_value represent a bit sequence
|
||||
# in big-endian order.
|
||||
acc_bits = 0;
|
||||
acc_value = 0;
|
||||
|
||||
j = 0
|
||||
for i in xrange(len(inp)):
|
||||
acc_value = ((acc_value << 8) & word_mask) | inp[i]
|
||||
acc_bits += 8
|
||||
|
||||
# When we have bit_len or more bits in the accumulator, write the next
|
||||
# output element.
|
||||
if acc_bits >= bit_len:
|
||||
acc_bits -= bit_len
|
||||
for x in xrange(byte_pad, out_width):
|
||||
out[j+x] = (
|
||||
# Big-endian
|
||||
acc_value >> (acc_bits+(8*(out_width-x-1)))
|
||||
) & (
|
||||
# Apply bit_len_mask across byte boundaries
|
||||
(bit_len_mask >> (8*(out_width-x-1))) & 0xFF
|
||||
)
|
||||
j += out_width
|
||||
|
||||
return out
|
||||
|
||||
def compress_array(inp, out_len, bit_len, byte_pad=0):
|
||||
assert bit_len >= 8 and word_size >= 7+bit_len
|
||||
|
||||
in_width = (bit_len+7)/8 + byte_pad
|
||||
assert out_len == bit_len*len(inp)/(8*in_width)
|
||||
out = bytearray(out_len)
|
||||
|
||||
bit_len_mask = (1 << bit_len) - 1
|
||||
|
||||
# The acc_bits least-significant bits of acc_value represent a bit sequence
|
||||
# in big-endian order.
|
||||
acc_bits = 0;
|
||||
acc_value = 0;
|
||||
|
||||
j = 0
|
||||
for i in xrange(out_len):
|
||||
# When we have fewer than 8 bits left in the accumulator, read the next
|
||||
# input element.
|
||||
if acc_bits < 8:
|
||||
acc_value = ((acc_value << bit_len) & word_mask) | inp[j]
|
||||
for x in xrange(byte_pad, in_width):
|
||||
acc_value = acc_value | (
|
||||
(
|
||||
# Apply bit_len_mask across byte boundaries
|
||||
inp[j+x] & ((bit_len_mask >> (8*(in_width-x-1))) & 0xFF)
|
||||
) << (8*(in_width-x-1))); # Big-endian
|
||||
j += in_width
|
||||
acc_bits += bit_len
|
||||
|
||||
acc_bits -= 8
|
||||
out[i] = (acc_value >> acc_bits) & 0xFF
|
||||
|
||||
return out
|
||||
|
||||
def get_indices_from_minimal(minimal, bit_len):
|
||||
eh_index_size = 4
|
||||
assert (bit_len+7)/8 <= eh_index_size
|
||||
len_indices = 8*eh_index_size*len(minimal)/bit_len
|
||||
byte_pad = eh_index_size - (bit_len+7)/8
|
||||
expanded = expand_array(minimal, len_indices, bit_len, byte_pad)
|
||||
return [struct.unpack('>I', expanded[i:i+4])[0] for i in range(0, len_indices, eh_index_size)]
|
||||
|
||||
def get_minimal_from_indices(indices, bit_len):
|
||||
eh_index_size = 4
|
||||
assert (bit_len+7)/8 <= eh_index_size
|
||||
len_indices = len(indices)*eh_index_size
|
||||
min_len = bit_len*len_indices/(8*eh_index_size)
|
||||
byte_pad = eh_index_size - (bit_len+7)/8
|
||||
byte_indices = bytearray(''.join([struct.pack('>I', i) for i in indices]))
|
||||
return compress_array(byte_indices, min_len, bit_len, byte_pad)
|
||||
|
||||
|
||||
def hash_nonce(digest, nonce):
|
||||
for i in range(8):
|
||||
digest.update(struct.pack('<I', nonce >> (32*i)))
|
||||
|
||||
def hash_xi(digest, xi):
|
||||
digest.update(struct.pack('<I', xi))
|
||||
return digest # For chaining
|
||||
|
||||
def count_zeroes(h):
|
||||
# Convert to binary string
|
||||
if type(h) == bytearray:
|
||||
h = ''.join('{0:08b}'.format(x, 'b') for x in h)
|
||||
else:
|
||||
h = ''.join('{0:08b}'.format(ord(x), 'b') for x in h)
|
||||
# Count leading zeroes
|
||||
return (h+'1').index('1')
|
||||
|
||||
def has_collision(ha, hb, i, l):
|
||||
res = [ha[j] == hb[j] for j in range((i-1)*l/8, i*l/8)]
|
||||
return reduce(lambda x, y: x and y, res)
|
||||
|
||||
def distinct_indices(a, b):
|
||||
for i in a:
|
||||
for j in b:
|
||||
if i == j:
|
||||
return False
|
||||
return True
|
||||
|
||||
def xor(ha, hb):
|
||||
return bytearray(a^b for a,b in zip(ha,hb))
|
||||
|
||||
def gbp_basic(digest, n, k):
|
||||
'''Implementation of Basic Wagner's algorithm for the GBP.'''
|
||||
validate_params(n, k)
|
||||
collision_length = n/(k+1)
|
||||
hash_length = (k+1)*((collision_length+7)//8)
|
||||
indices_per_hash_output = 512/n
|
||||
|
||||
# 1) Generate first list
|
||||
if DEBUG: print 'Generating first list'
|
||||
X = []
|
||||
tmp_hash = ''
|
||||
for i in range(0, 2**(collision_length+1)):
|
||||
r = i % indices_per_hash_output
|
||||
if r == 0:
|
||||
# X_i = H(I||V||x_i)
|
||||
curr_digest = digest.copy()
|
||||
hash_xi(curr_digest, i/indices_per_hash_output)
|
||||
tmp_hash = curr_digest.digest()
|
||||
X.append((
|
||||
expand_array(bytearray(tmp_hash[r*n/8:(r+1)*n/8]),
|
||||
hash_length, collision_length),
|
||||
(i,)
|
||||
))
|
||||
|
||||
# 3) Repeat step 2 until 2n/(k+1) bits remain
|
||||
for i in range(1, k):
|
||||
if DEBUG: print 'Round %d:' % i
|
||||
|
||||
# 2a) Sort the list
|
||||
if DEBUG: print '- Sorting list'
|
||||
X.sort(key=itemgetter(0))
|
||||
if DEBUG and VERBOSE:
|
||||
for Xi in X[-32:]:
|
||||
print '%s %s' % (print_hash(Xi[0]), Xi[1])
|
||||
|
||||
if DEBUG: print '- Finding collisions'
|
||||
Xc = []
|
||||
while len(X) > 0:
|
||||
# 2b) Find next set of unordered pairs with collisions on first n/(k+1) bits
|
||||
j = 1
|
||||
while j < len(X):
|
||||
if not has_collision(X[-1][0], X[-1-j][0], i, collision_length):
|
||||
break
|
||||
j += 1
|
||||
|
||||
# 2c) Store tuples (X_i ^ X_j, (i, j)) on the table
|
||||
for l in range(0, j-1):
|
||||
for m in range(l+1, j):
|
||||
# Check that there are no duplicate indices in tuples i and j
|
||||
if distinct_indices(X[-1-l][1], X[-1-m][1]):
|
||||
if X[-1-l][1][0] < X[-1-m][1][0]:
|
||||
concat = X[-1-l][1] + X[-1-m][1]
|
||||
else:
|
||||
concat = X[-1-m][1] + X[-1-l][1]
|
||||
Xc.append((xor(X[-1-l][0], X[-1-m][0]), concat))
|
||||
|
||||
# 2d) Drop this set
|
||||
while j > 0:
|
||||
X.pop(-1)
|
||||
j -= 1
|
||||
# 2e) Replace previous list with new list
|
||||
X = Xc
|
||||
|
||||
# k+1) Find a collision on last 2n(k+1) bits
|
||||
if DEBUG:
|
||||
print 'Final round:'
|
||||
print '- Sorting list'
|
||||
X.sort(key=itemgetter(0))
|
||||
if DEBUG and VERBOSE:
|
||||
for Xi in X[-32:]:
|
||||
print '%s %s' % (print_hash(Xi[0]), Xi[1])
|
||||
if DEBUG: print '- Finding collisions'
|
||||
solns = []
|
||||
while len(X) > 0:
|
||||
j = 1
|
||||
while j < len(X):
|
||||
if not (has_collision(X[-1][0], X[-1-j][0], k, collision_length) and
|
||||
has_collision(X[-1][0], X[-1-j][0], k+1, collision_length)):
|
||||
break
|
||||
j += 1
|
||||
|
||||
for l in range(0, j-1):
|
||||
for m in range(l+1, j):
|
||||
res = xor(X[-1-l][0], X[-1-m][0])
|
||||
if count_zeroes(res) == 8*hash_length and distinct_indices(X[-1-l][1], X[-1-m][1]):
|
||||
if DEBUG and VERBOSE:
|
||||
print 'Found solution:'
|
||||
print '- %s %s' % (print_hash(X[-1-l][0]), X[-1-l][1])
|
||||
print '- %s %s' % (print_hash(X[-1-m][0]), X[-1-m][1])
|
||||
if X[-1-l][1][0] < X[-1-m][1][0]:
|
||||
solns.append(list(X[-1-l][1] + X[-1-m][1]))
|
||||
else:
|
||||
solns.append(list(X[-1-m][1] + X[-1-l][1]))
|
||||
|
||||
# 2d) Drop this set
|
||||
while j > 0:
|
||||
X.pop(-1)
|
||||
j -= 1
|
||||
return [get_minimal_from_indices(soln, collision_length+1) for soln in solns]
|
||||
|
||||
def gbp_validate(digest, minimal, n, k):
|
||||
validate_params(n, k)
|
||||
collision_length = n/(k+1)
|
||||
hash_length = (k+1)*((collision_length+7)//8)
|
||||
indices_per_hash_output = 512/n
|
||||
solution_width = (1 << k)*(collision_length+1)//8
|
||||
|
||||
if len(minimal) != solution_width:
|
||||
print 'Invalid solution length: %d (expected %d)' % \
|
||||
(len(minimal), solution_width)
|
||||
return False
|
||||
|
||||
X = []
|
||||
for i in get_indices_from_minimal(minimal, collision_length+1):
|
||||
r = i % indices_per_hash_output
|
||||
# X_i = H(I||V||x_i)
|
||||
curr_digest = digest.copy()
|
||||
hash_xi(curr_digest, i/indices_per_hash_output)
|
||||
tmp_hash = curr_digest.digest()
|
||||
X.append((
|
||||
expand_array(bytearray(tmp_hash[r*n/8:(r+1)*n/8]),
|
||||
hash_length, collision_length),
|
||||
(i,)
|
||||
))
|
||||
|
||||
for r in range(1, k+1):
|
||||
Xc = []
|
||||
for i in range(0, len(X), 2):
|
||||
if not has_collision(X[i][0], X[i+1][0], r, collision_length):
|
||||
print 'Invalid solution: invalid collision length between StepRows'
|
||||
return False
|
||||
if X[i+1][1][0] < X[i][1][0]:
|
||||
print 'Invalid solution: Index tree incorrectly ordered'
|
||||
return False
|
||||
if not distinct_indices(X[i][1], X[i+1][1]):
|
||||
print 'Invalid solution: duplicate indices'
|
||||
return False
|
||||
Xc.append((xor(X[i][0], X[i+1][0]), X[i][1] + X[i+1][1]))
|
||||
X = Xc
|
||||
|
||||
if len(X) != 1:
|
||||
print 'Invalid solution: incorrect length after end of rounds: %d' % len(X)
|
||||
return False
|
||||
|
||||
if count_zeroes(X[0][0]) != 8*hash_length:
|
||||
print 'Invalid solution: incorrect number of zeroes: %d' % count_zeroes(X[0][0])
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def zcash_person(n, k):
|
||||
return b'ZcashPoW' + struct.pack('<II', n, k)
|
||||
|
||||
def print_hash(h):
|
||||
if type(h) == bytearray:
|
||||
return ''.join('{0:02x}'.format(x, 'x') for x in h)
|
||||
else:
|
||||
return ''.join('{0:02x}'.format(ord(x), 'x') for x in h)
|
||||
|
||||
def validate_params(n, k):
|
||||
if (k >= n):
|
||||
raise ValueError('n must be larger than k')
|
||||
if (((n/(k+1))+1) >= 32):
|
||||
raise ValueError('Parameters must satisfy n/(k+1)+1 < 32')
|
||||
@@ -30,9 +30,18 @@ from threading import RLock
|
||||
from threading import Thread
|
||||
import logging
|
||||
import copy
|
||||
from pyblake2 import blake2b
|
||||
|
||||
from .equihash import (
|
||||
gbp_basic,
|
||||
gbp_validate,
|
||||
hash_nonce,
|
||||
zcash_person,
|
||||
)
|
||||
|
||||
OVERWINTER_PROTO_VERSION = 170003
|
||||
BIP0031_VERSION = 60000
|
||||
MY_VERSION = 60001 # past bip-31 for ping/pong
|
||||
MY_VERSION = 170002 # past bip-31 for ping/pong
|
||||
MY_SUBVERSION = "/python-mininode-tester:0.0.1/"
|
||||
|
||||
MAX_INV_SZ = 50000
|
||||
@@ -234,6 +243,36 @@ def ser_int_vector(l):
|
||||
return r
|
||||
|
||||
|
||||
def deser_char_vector(f):
|
||||
nit = struct.unpack("<B", f.read(1))[0]
|
||||
if nit == 253:
|
||||
nit = struct.unpack("<H", f.read(2))[0]
|
||||
elif nit == 254:
|
||||
nit = struct.unpack("<I", f.read(4))[0]
|
||||
elif nit == 255:
|
||||
nit = struct.unpack("<Q", f.read(8))[0]
|
||||
r = []
|
||||
for i in xrange(nit):
|
||||
t = struct.unpack("<B", f.read(1))[0]
|
||||
r.append(t)
|
||||
return r
|
||||
|
||||
|
||||
def ser_char_vector(l):
|
||||
r = ""
|
||||
if len(l) < 253:
|
||||
r = chr(len(l))
|
||||
elif len(l) < 0x10000:
|
||||
r = chr(253) + struct.pack("<H", len(l))
|
||||
elif len(l) < 0x100000000L:
|
||||
r = chr(254) + struct.pack("<I", len(l))
|
||||
else:
|
||||
r = chr(255) + struct.pack("<Q", len(l))
|
||||
for i in l:
|
||||
r += chr(i)
|
||||
return r
|
||||
|
||||
|
||||
# Objects that map to bitcoind objects, which can be serialized/deserialized
|
||||
|
||||
class CAddress(object):
|
||||
@@ -307,6 +346,154 @@ class CBlockLocator(object):
|
||||
% (self.nVersion, repr(self.vHave))
|
||||
|
||||
|
||||
G1_PREFIX_MASK = 0x02
|
||||
G2_PREFIX_MASK = 0x0a
|
||||
|
||||
class ZCProof(object):
|
||||
def __init__(self):
|
||||
self.g_A = None
|
||||
self.g_A_prime = None
|
||||
self.g_B = None
|
||||
self.g_B_prime = None
|
||||
self.g_C = None
|
||||
self.g_C_prime = None
|
||||
self.g_K = None
|
||||
self.g_H = None
|
||||
|
||||
def deserialize(self, f):
|
||||
def deser_g1(self, f):
|
||||
leadingByte = struct.unpack("<B", f.read(1))[0]
|
||||
return {
|
||||
'y_lsb': leadingByte & 1,
|
||||
'x': f.read(32),
|
||||
}
|
||||
def deser_g2(self, f):
|
||||
leadingByte = struct.unpack("<B", f.read(1))[0]
|
||||
return {
|
||||
'y_gt': leadingByte & 1,
|
||||
'x': f.read(64),
|
||||
}
|
||||
self.g_A = deser_g1(f)
|
||||
self.g_A_prime = deser_g1(f)
|
||||
self.g_B = deser_g2(f)
|
||||
self.g_B_prime = deser_g1(f)
|
||||
self.g_C = deser_g1(f)
|
||||
self.g_C_prime = deser_g1(f)
|
||||
self.g_K = deser_g1(f)
|
||||
self.g_H = deser_g1(f)
|
||||
|
||||
def serialize(self):
|
||||
def ser_g1(self, p):
|
||||
return chr(G1_PREFIX_MASK | p['y_lsb']) + p['x']
|
||||
def ser_g2(self, p):
|
||||
return chr(G2_PREFIX_MASK | p['y_gt']) + p['x']
|
||||
r = ""
|
||||
r += ser_g1(self.g_A)
|
||||
r += ser_g1(self.g_A_prime)
|
||||
r += ser_g2(self.g_B)
|
||||
r += ser_g1(self.g_B_prime)
|
||||
r += ser_g1(self.g_C)
|
||||
r += ser_g1(self.g_C_prime)
|
||||
r += ser_g1(self.g_K)
|
||||
r += ser_g1(self.g_H)
|
||||
return r
|
||||
|
||||
def __repr__(self):
|
||||
return "ZCProof(g_A=%s g_A_prime=%s g_B=%s g_B_prime=%s g_C=%s g_C_prime=%s g_K=%s g_H=%s)" \
|
||||
% (repr(self.g_A), repr(self.g_A_prime),
|
||||
repr(self.g_B), repr(self.g_B_prime),
|
||||
repr(self.g_C), repr(self.g_C_prime),
|
||||
repr(self.g_K), repr(self.g_H))
|
||||
|
||||
|
||||
ZC_NUM_JS_INPUTS = 2
|
||||
ZC_NUM_JS_OUTPUTS = 2
|
||||
|
||||
ZC_NOTEPLAINTEXT_LEADING = 1
|
||||
ZC_V_SIZE = 8
|
||||
ZC_RHO_SIZE = 32
|
||||
ZC_R_SIZE = 32
|
||||
ZC_MEMO_SIZE = 512
|
||||
|
||||
ZC_NOTEPLAINTEXT_SIZE = (
|
||||
ZC_NOTEPLAINTEXT_LEADING +
|
||||
ZC_V_SIZE +
|
||||
ZC_RHO_SIZE +
|
||||
ZC_R_SIZE +
|
||||
ZC_MEMO_SIZE
|
||||
)
|
||||
|
||||
NOTEENCRYPTION_AUTH_BYTES = 16
|
||||
|
||||
ZC_NOTECIPHERTEXT_SIZE = (
|
||||
ZC_NOTEPLAINTEXT_SIZE +
|
||||
NOTEENCRYPTION_AUTH_BYTES
|
||||
)
|
||||
|
||||
class JSDescription(object):
|
||||
def __init__(self):
|
||||
self.vpub_old = 0
|
||||
self.vpub_new = 0
|
||||
self.anchor = 0
|
||||
self.nullifiers = [0] * ZC_NUM_JS_INPUTS
|
||||
self.commitments = [0] * ZC_NUM_JS_OUTPUTS
|
||||
self.onetimePubKey = 0
|
||||
self.randomSeed = 0
|
||||
self.macs = [0] * ZC_NUM_JS_INPUTS
|
||||
self.proof = None
|
||||
self.ciphertexts = [None] * ZC_NUM_JS_OUTPUTS
|
||||
|
||||
def deserialize(self, f):
|
||||
self.vpub_old = struct.unpack("<q", f.read(8))[0]
|
||||
self.vpub_new = struct.unpack("<q", f.read(8))[0]
|
||||
self.anchor = deser_uint256(f)
|
||||
|
||||
self.nullifiers = []
|
||||
for i in range(ZC_NUM_JS_INPUTS):
|
||||
self.nullifiers.append(deser_uint256(f))
|
||||
|
||||
self.commitments = []
|
||||
for i in range(ZC_NUM_JS_OUTPUTS):
|
||||
self.commitments.append(deser_uint256(f))
|
||||
|
||||
self.onetimePubKey = deser_uint256(f)
|
||||
self.randomSeed = deser_uint256(f)
|
||||
|
||||
self.macs = []
|
||||
for i in range(ZC_NUM_JS_INPUTS):
|
||||
self.macs.append(deser_uint256(f))
|
||||
|
||||
self.proof = ZCProof()
|
||||
self.proof.deserialize(f)
|
||||
|
||||
self.ciphertexts = []
|
||||
for i in range(ZC_NUM_JS_OUTPUTS):
|
||||
self.ciphertexts.append(f.read(ZC_NOTECIPHERTEXT_SIZE))
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r += struct.pack("<q", self.vpub_old)
|
||||
r += struct.pack("<q", self.vpub_new)
|
||||
r += ser_uint256(self.anchor)
|
||||
for i in range(ZC_NUM_JS_INPUTS):
|
||||
r += ser_uint256(self.nullifiers[i])
|
||||
for i in range(ZC_NUM_JS_OUTPUTS):
|
||||
r += ser_uint256(self.commitments[i])
|
||||
r += ser_uint256(self.onetimePubKey)
|
||||
r += ser_uint256(self.randomSeed)
|
||||
for i in range(ZC_NUM_JS_INPUTS):
|
||||
r += ser_uint256(self.macs[i])
|
||||
r += self.proof.serialize()
|
||||
for i in range(ZC_NUM_JS_OUTPUTS):
|
||||
r += ser_uint256(self.ciphertexts[i])
|
||||
return r
|
||||
|
||||
def __repr__(self):
|
||||
return "JSDescription(vpub_old=%i.%08i vpub_new=%i.%08i anchor=%064x onetimePubKey=%064x randomSeed=%064x proof=%s)" \
|
||||
% (self.vpub_old, self.vpub_new, self.anchor,
|
||||
self.onetimePubKey, self.randomSeed, repr(self.proof))
|
||||
|
||||
|
||||
class COutPoint(object):
|
||||
def __init__(self, hash=0, n=0):
|
||||
self.hash = hash
|
||||
@@ -382,6 +569,9 @@ class CTransaction(object):
|
||||
self.vin = []
|
||||
self.vout = []
|
||||
self.nLockTime = 0
|
||||
self.vjoinsplit = []
|
||||
self.joinSplitPubKey = None
|
||||
self.joinSplitSig = None
|
||||
self.sha256 = None
|
||||
self.hash = None
|
||||
else:
|
||||
@@ -389,6 +579,9 @@ class CTransaction(object):
|
||||
self.vin = copy.deepcopy(tx.vin)
|
||||
self.vout = copy.deepcopy(tx.vout)
|
||||
self.nLockTime = tx.nLockTime
|
||||
self.vjoinsplit = copy.deepcopy(tx.vjoinsplit)
|
||||
self.joinSplitPubKey = tx.joinSplitPubKey
|
||||
self.joinSplitSig = tx.joinSplitSig
|
||||
self.sha256 = None
|
||||
self.hash = None
|
||||
|
||||
@@ -397,6 +590,11 @@ class CTransaction(object):
|
||||
self.vin = deser_vector(f, CTxIn)
|
||||
self.vout = deser_vector(f, CTxOut)
|
||||
self.nLockTime = struct.unpack("<I", f.read(4))[0]
|
||||
if self.nVersion >= 2:
|
||||
self.vjoinsplit = deser_vector(f, JSDescription)
|
||||
if len(self.vjoinsplit) > 0:
|
||||
self.joinSplitPubKey = deser_uint256(f)
|
||||
self.joinSplitSig = f.read(64)
|
||||
self.sha256 = None
|
||||
self.hash = None
|
||||
|
||||
@@ -406,6 +604,11 @@ class CTransaction(object):
|
||||
r += ser_vector(self.vin)
|
||||
r += ser_vector(self.vout)
|
||||
r += struct.pack("<I", self.nLockTime)
|
||||
if self.nVersion >= 2:
|
||||
r += ser_vector(self.vjoinsplit)
|
||||
if len(self.vjoinsplit) > 0:
|
||||
r += ser_uint256(self.joinSplitPubKey)
|
||||
r += self.joinSplitSig
|
||||
return r
|
||||
|
||||
def rehash(self):
|
||||
@@ -425,8 +628,15 @@ class CTransaction(object):
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i)" \
|
||||
r = "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i" \
|
||||
% (self.nVersion, repr(self.vin), repr(self.vout), self.nLockTime)
|
||||
if self.nVersion >= 2:
|
||||
r += " vjoinsplit=%s" % repr(self.vjoinsplit)
|
||||
if len(self.vjoinsplit) > 0:
|
||||
r += " joinSplitPubKey=%064x joinSplitSig=%064x" \
|
||||
(self.joinSplitPubKey, self.joinSplitSig)
|
||||
r += ")"
|
||||
return r
|
||||
|
||||
|
||||
class CBlockHeader(object):
|
||||
@@ -437,20 +647,24 @@ class CBlockHeader(object):
|
||||
self.nVersion = header.nVersion
|
||||
self.hashPrevBlock = header.hashPrevBlock
|
||||
self.hashMerkleRoot = header.hashMerkleRoot
|
||||
self.hashReserved = header.hashReserved
|
||||
self.nTime = header.nTime
|
||||
self.nBits = header.nBits
|
||||
self.nNonce = header.nNonce
|
||||
self.nSolution = header.nSolution
|
||||
self.sha256 = header.sha256
|
||||
self.hash = header.hash
|
||||
self.calc_sha256()
|
||||
|
||||
def set_null(self):
|
||||
self.nVersion = 1
|
||||
self.nVersion = 4
|
||||
self.hashPrevBlock = 0
|
||||
self.hashMerkleRoot = 0
|
||||
self.hashReserved = 0
|
||||
self.nTime = 0
|
||||
self.nBits = 0
|
||||
self.nNonce = 0
|
||||
self.nSolution = []
|
||||
self.sha256 = None
|
||||
self.hash = None
|
||||
|
||||
@@ -458,9 +672,11 @@ class CBlockHeader(object):
|
||||
self.nVersion = struct.unpack("<i", f.read(4))[0]
|
||||
self.hashPrevBlock = deser_uint256(f)
|
||||
self.hashMerkleRoot = deser_uint256(f)
|
||||
self.hashReserved = deser_uint256(f)
|
||||
self.nTime = struct.unpack("<I", f.read(4))[0]
|
||||
self.nBits = struct.unpack("<I", f.read(4))[0]
|
||||
self.nNonce = struct.unpack("<I", f.read(4))[0]
|
||||
self.nNonce = deser_uint256(f)
|
||||
self.nSolution = deser_char_vector(f)
|
||||
self.sha256 = None
|
||||
self.hash = None
|
||||
|
||||
@@ -469,9 +685,11 @@ class CBlockHeader(object):
|
||||
r += struct.pack("<i", self.nVersion)
|
||||
r += ser_uint256(self.hashPrevBlock)
|
||||
r += ser_uint256(self.hashMerkleRoot)
|
||||
r += ser_uint256(self.hashReserved)
|
||||
r += struct.pack("<I", self.nTime)
|
||||
r += struct.pack("<I", self.nBits)
|
||||
r += struct.pack("<I", self.nNonce)
|
||||
r += ser_uint256(self.nNonce)
|
||||
r += ser_char_vector(self.nSolution)
|
||||
return r
|
||||
|
||||
def calc_sha256(self):
|
||||
@@ -480,9 +698,11 @@ class CBlockHeader(object):
|
||||
r += struct.pack("<i", self.nVersion)
|
||||
r += ser_uint256(self.hashPrevBlock)
|
||||
r += ser_uint256(self.hashMerkleRoot)
|
||||
r += ser_uint256(self.hashReserved)
|
||||
r += struct.pack("<I", self.nTime)
|
||||
r += struct.pack("<I", self.nBits)
|
||||
r += struct.pack("<I", self.nNonce)
|
||||
r += ser_uint256(self.nNonce)
|
||||
r += ser_char_vector(self.nSolution)
|
||||
self.sha256 = uint256_from_str(hash256(r))
|
||||
self.hash = hash256(r)[::-1].encode('hex_codec')
|
||||
|
||||
@@ -492,9 +712,9 @@ class CBlockHeader(object):
|
||||
return self.sha256
|
||||
|
||||
def __repr__(self):
|
||||
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
|
||||
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
|
||||
time.ctime(self.nTime), self.nBits, self.nNonce)
|
||||
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x hashReserved=%064x nTime=%s nBits=%08x nNonce=%064x nSolution=%s)" \
|
||||
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, self.hashReserved,
|
||||
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.nSolution))
|
||||
|
||||
|
||||
class CBlock(CBlockHeader):
|
||||
@@ -525,7 +745,13 @@ class CBlock(CBlockHeader):
|
||||
hashes = newhashes
|
||||
return uint256_from_str(hashes[0])
|
||||
|
||||
def is_valid(self):
|
||||
def is_valid(self, n=48, k=5):
|
||||
# H(I||...
|
||||
digest = blake2b(digest_size=(512/n)*n/8, person=zcash_person(n, k))
|
||||
digest.update(super(CBlock, self).serialize()[:108])
|
||||
hash_nonce(digest, self.nNonce)
|
||||
if not gbp_validate(self.nSolution, digest, n, k):
|
||||
return False
|
||||
self.calc_sha256()
|
||||
target = uint256_from_compact(self.nBits)
|
||||
if self.sha256 > target:
|
||||
@@ -537,17 +763,31 @@ class CBlock(CBlockHeader):
|
||||
return False
|
||||
return True
|
||||
|
||||
def solve(self):
|
||||
self.calc_sha256()
|
||||
def solve(self, n=48, k=5):
|
||||
target = uint256_from_compact(self.nBits)
|
||||
while self.sha256 > target:
|
||||
# H(I||...
|
||||
digest = blake2b(digest_size=(512/n)*n/8, person=zcash_person(n, k))
|
||||
digest.update(super(CBlock, self).serialize()[:108])
|
||||
self.nNonce = 0
|
||||
while True:
|
||||
# H(I||V||...
|
||||
curr_digest = digest.copy()
|
||||
hash_nonce(curr_digest, self.nNonce)
|
||||
# (x_1, x_2, ...) = A(I, V, n, k)
|
||||
solns = gbp_basic(curr_digest, n, k)
|
||||
for soln in solns:
|
||||
assert(gbp_validate(curr_digest, soln, n, k))
|
||||
self.nSolution = soln
|
||||
self.rehash()
|
||||
if self.sha256 <= target:
|
||||
return
|
||||
self.nNonce += 1
|
||||
self.rehash()
|
||||
|
||||
def __repr__(self):
|
||||
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
|
||||
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x hashReserved=%064x nTime=%s nBits=%08x nNonce=%064x nSolution=%s vtx=%s)" \
|
||||
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
|
||||
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
|
||||
self.hashReserved, time.ctime(self.nTime), self.nBits,
|
||||
self.nNonce, repr(self.nSolution), repr(self.vtx))
|
||||
|
||||
|
||||
class CUnsignedAlert(object):
|
||||
@@ -629,8 +869,12 @@ class CAlert(object):
|
||||
class msg_version(object):
|
||||
command = "version"
|
||||
|
||||
def __init__(self):
|
||||
self.nVersion = MY_VERSION
|
||||
def __init__(self, overwintered=False):
|
||||
if overwintered:
|
||||
self.nVersion = OVERWINTER_PROTO_VERSION
|
||||
else:
|
||||
self.nVersion = MY_VERSION
|
||||
|
||||
self.nServices = 1
|
||||
self.nTime = time.time()
|
||||
self.addrTo = CAddress()
|
||||
@@ -1082,12 +1326,12 @@ class NodeConn(asyncore.dispatcher):
|
||||
"mempool": msg_mempool
|
||||
}
|
||||
MAGIC_BYTES = {
|
||||
"mainnet": "\xf9\xbe\xb4\xd9", # mainnet
|
||||
"testnet3": "\x0b\x11\x09\x07", # testnet3
|
||||
"regtest": "\xfa\xbf\xb5\xda" # regtest
|
||||
"mainnet": "\x24\xe9\x27\x64", # mainnet
|
||||
"testnet3": "\xfa\x1a\xf9\xbf", # testnet3
|
||||
"regtest": "\xaa\xe8\x3f\x5f" # regtest
|
||||
}
|
||||
|
||||
def __init__(self, dstaddr, dstport, rpc, callback, net="regtest"):
|
||||
def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", overwintered=False):
|
||||
asyncore.dispatcher.__init__(self, map=mininode_socket_map)
|
||||
self.log = logging.getLogger("NodeConn(%s:%d)" % (dstaddr, dstport))
|
||||
self.dstaddr = dstaddr
|
||||
@@ -1104,7 +1348,7 @@ class NodeConn(asyncore.dispatcher):
|
||||
self.disconnect = False
|
||||
|
||||
# stuff version msg into sendbuf
|
||||
vt = msg_version()
|
||||
vt = msg_version(overwintered)
|
||||
vt.addrTo.ip = self.dstaddr
|
||||
vt.addrTo.port = self.dstport
|
||||
vt.addrFrom.ip = "0.0.0.0"
|
||||
|
||||
@@ -24,10 +24,10 @@ if sys.version > '3':
|
||||
bchr = lambda x: bytes([x])
|
||||
bord = lambda x: x
|
||||
|
||||
import copy
|
||||
import struct
|
||||
import binascii
|
||||
|
||||
import test_framework.bignum
|
||||
from test_framework import bignum
|
||||
|
||||
MAX_SCRIPT_SIZE = 10000
|
||||
MAX_SCRIPT_ELEMENT_SIZE = 520
|
||||
@@ -666,7 +666,7 @@ class CScript(bytes):
|
||||
else:
|
||||
other = CScriptOp.encode_op_pushdata(bignum.bn2vch(other))
|
||||
elif isinstance(other, (bytes, bytearray)):
|
||||
other = CScriptOp.encode_op_pushdata(other)
|
||||
other = bytes(CScriptOp.encode_op_pushdata(other))
|
||||
return other
|
||||
|
||||
def __add__(self, other):
|
||||
|
||||
@@ -13,8 +13,11 @@ import shutil
|
||||
import tempfile
|
||||
import traceback
|
||||
|
||||
from authproxy import AuthServiceProxy, JSONRPCException
|
||||
from util import *
|
||||
from authproxy import JSONRPCException
|
||||
from util import assert_equal, check_json_precision, \
|
||||
initialize_chain, initialize_chain_clean, \
|
||||
start_nodes, connect_nodes_bi, stop_nodes, \
|
||||
sync_blocks, sync_mempools, wait_bitcoinds
|
||||
|
||||
|
||||
class BitcoinTestFramework(object):
|
||||
|
||||
@@ -21,8 +21,7 @@ import subprocess
|
||||
import time
|
||||
import re
|
||||
|
||||
from authproxy import AuthServiceProxy, JSONRPCException
|
||||
from util import *
|
||||
from authproxy import AuthServiceProxy
|
||||
|
||||
def p2p_port(n):
|
||||
return 11000 + n + os.getpid()%999
|
||||
@@ -153,7 +152,7 @@ def initialize_chain_clean(test_dir, num_nodes):
|
||||
Useful if a test case wants complete control over initialization.
|
||||
"""
|
||||
for i in range(num_nodes):
|
||||
datadir=initialize_datadir(test_dir, i)
|
||||
initialize_datadir(test_dir, i)
|
||||
|
||||
|
||||
def _rpchost_to_args(rpchost):
|
||||
@@ -214,6 +213,10 @@ def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, binary=None):
|
||||
def log_filename(dirname, n_node, logname):
|
||||
return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
|
||||
|
||||
def check_node(i):
|
||||
bitcoind_processes[i].poll()
|
||||
return bitcoind_processes[i].returncode
|
||||
|
||||
def stop_node(node, i):
|
||||
node.stop()
|
||||
bitcoind_processes[i].wait()
|
||||
@@ -369,3 +372,33 @@ def assert_raises(exc, fun, *args, **kwds):
|
||||
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
|
||||
else:
|
||||
raise AssertionError("No exception raised")
|
||||
|
||||
# Returns txid if operation was a success or None
|
||||
def wait_and_assert_operationid_status(node, myopid, in_status='success', in_errormsg=None):
|
||||
print('waiting for async operation {}'.format(myopid))
|
||||
opids = []
|
||||
opids.append(myopid)
|
||||
timeout = 300
|
||||
status = None
|
||||
errormsg = None
|
||||
txid = None
|
||||
for x in xrange(1, timeout):
|
||||
results = node.z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
if status == "failed":
|
||||
errormsg = results[0]['error']['message']
|
||||
elif status == "success":
|
||||
txid = results[0]['result']['txid']
|
||||
break
|
||||
assert_equal(in_status, status)
|
||||
if errormsg is not None:
|
||||
assert(in_errormsg is not None)
|
||||
assert_equal(in_errormsg in errormsg, True)
|
||||
if os.getenv("PYTHON_DEBUG", ""):
|
||||
print('...returned status: {}'.format(status))
|
||||
if errormsg is not None:
|
||||
print('...returned error: {}'.format(errormsg))
|
||||
return txid
|
||||
|
||||
@@ -8,10 +8,9 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from decimal import Decimal
|
||||
import os
|
||||
import shutil
|
||||
from test_framework.util import assert_equal, connect_nodes, \
|
||||
sync_blocks, gather_inputs
|
||||
|
||||
|
||||
class TxnMallTest(BitcoinTestFramework):
|
||||
|
||||
@@ -77,7 +76,7 @@ class TxnMallTest(BitcoinTestFramework):
|
||||
assert_equal(tx2["confirmations"], 0)
|
||||
|
||||
# Now give doublespend to miner:
|
||||
mutated_txid = self.nodes[2].sendrawtransaction(doublespend["hex"])
|
||||
self.nodes[2].sendrawtransaction(doublespend["hex"])
|
||||
# ... mine a block...
|
||||
self.nodes[2].generate(1)
|
||||
|
||||
|
||||
@@ -5,8 +5,13 @@
|
||||
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from time import *
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, assert_greater_than, \
|
||||
initialize_chain_clean, start_nodes, start_node, connect_nodes_bi, \
|
||||
stop_nodes, sync_blocks, sync_mempools, wait_bitcoinds
|
||||
|
||||
import time
|
||||
from decimal import Decimal
|
||||
|
||||
class WalletTest (BitcoinTestFramework):
|
||||
|
||||
@@ -72,6 +77,18 @@ class WalletTest (BitcoinTestFramework):
|
||||
node0utxos = self.nodes[0].listunspent(1)
|
||||
assert_equal(len(node0utxos), 3)
|
||||
|
||||
# Check 'generated' field of listunspent
|
||||
# Node 0: has one coinbase utxo and two regular utxos
|
||||
assert_equal(sum(int(uxto["generated"] is True) for uxto in node0utxos), 1)
|
||||
# Node 1: has 101 coinbase utxos and no regular utxos
|
||||
node1utxos = self.nodes[1].listunspent(1)
|
||||
assert_equal(len(node1utxos), 101)
|
||||
assert_equal(sum(int(uxto["generated"] is True) for uxto in node1utxos), 101)
|
||||
# Node 2: has no coinbase utxos and two regular utxos
|
||||
node2utxos = self.nodes[2].listunspent(1)
|
||||
assert_equal(len(node2utxos), 2)
|
||||
assert_equal(sum(int(uxto["generated"] is True) for uxto in node2utxos), 0)
|
||||
|
||||
# create both transactions
|
||||
txns_to_send = []
|
||||
for utxo in node0utxos:
|
||||
@@ -100,7 +117,7 @@ class WalletTest (BitcoinTestFramework):
|
||||
# Send 10 BTC normal
|
||||
address = self.nodes[0].getnewaddress("")
|
||||
self.nodes[2].settxfee(Decimal('0.001'))
|
||||
txid = self.nodes[2].sendtoaddress(address, 10, "", "", False)
|
||||
self.nodes[2].sendtoaddress(address, 10, "", "", False)
|
||||
self.sync_all()
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
@@ -110,7 +127,7 @@ class WalletTest (BitcoinTestFramework):
|
||||
assert_equal(self.nodes[0].getbalance("*"), Decimal('10.00000000'))
|
||||
|
||||
# Send 10 BTC with subtract fee from amount
|
||||
txid = self.nodes[2].sendtoaddress(address, 10, "", "", True)
|
||||
self.nodes[2].sendtoaddress(address, 10, "", "", True)
|
||||
self.sync_all()
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
@@ -120,7 +137,7 @@ class WalletTest (BitcoinTestFramework):
|
||||
assert_equal(self.nodes[0].getbalance("*"), Decimal('19.99900000'))
|
||||
|
||||
# Sendmany 10 BTC
|
||||
txid = self.nodes[2].sendmany("", {address: 10}, 0, "", [])
|
||||
self.nodes[2].sendmany("", {address: 10}, 0, "", [])
|
||||
self.sync_all()
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
@@ -130,7 +147,7 @@ class WalletTest (BitcoinTestFramework):
|
||||
assert_equal(self.nodes[0].getbalance("*"), Decimal('29.99900000'))
|
||||
|
||||
# Sendmany 10 BTC with subtract fee from amount
|
||||
txid = self.nodes[2].sendmany("", {address: 10}, 0, "", [address])
|
||||
self.nodes[2].sendmany("", {address: 10}, 0, "", [address])
|
||||
self.sync_all()
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
@@ -171,7 +188,7 @@ class WalletTest (BitcoinTestFramework):
|
||||
signedRawTx = self.nodes[1].signrawtransaction(rawTx)
|
||||
decRawTx = self.nodes[1].decoderawtransaction(signedRawTx['hex'])
|
||||
zeroValueTxid= decRawTx['txid']
|
||||
sendResp = self.nodes[1].sendrawtransaction(signedRawTx['hex'])
|
||||
self.nodes[1].sendrawtransaction(signedRawTx['hex'])
|
||||
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1) #mine a block
|
||||
@@ -238,7 +255,7 @@ class WalletTest (BitcoinTestFramework):
|
||||
self.sync_all()
|
||||
|
||||
mybalance = self.nodes[2].z_getbalance(mytaddr)
|
||||
assert_equal(self.nodes[2].z_getbalance(mytaddr), Decimal('10.0'));
|
||||
assert_equal(mybalance, Decimal('10.0'));
|
||||
|
||||
mytxdetails = self.nodes[2].gettransaction(mytxid)
|
||||
myvjoinsplits = mytxdetails["vjoinsplit"]
|
||||
@@ -254,6 +271,16 @@ class WalletTest (BitcoinTestFramework):
|
||||
for i in xrange(0,num_t_recipients):
|
||||
newtaddr = self.nodes[2].getnewaddress()
|
||||
recipients.append({"address":newtaddr, "amount":amount_per_recipient})
|
||||
|
||||
# Issue #2759 Workaround START
|
||||
# HTTP connection to node 0 may fall into a state, during the few minutes it takes to process
|
||||
# loop above to create new addresses, that when z_sendmany is called with a large amount of
|
||||
# rpc data in recipients, the connection fails with a 'broken pipe' error. Making a RPC call
|
||||
# to node 0 before calling z_sendmany appears to fix this issue, perhaps putting the HTTP
|
||||
# connection into a good state to handle a large amount of data in recipients.
|
||||
self.nodes[0].getinfo()
|
||||
# Issue #2759 Workaround END
|
||||
|
||||
try:
|
||||
self.nodes[0].z_sendmany(myzaddr, recipients)
|
||||
except JSONRPCException,e:
|
||||
@@ -271,6 +298,11 @@ class WalletTest (BitcoinTestFramework):
|
||||
for i in xrange(0,num_z_recipients):
|
||||
newzaddr = self.nodes[2].z_getnewaddress()
|
||||
recipients.append({"address":newzaddr, "amount":amount_per_recipient})
|
||||
|
||||
# Issue #2759 Workaround START
|
||||
self.nodes[0].getinfo()
|
||||
# Issue #2759 Workaround END
|
||||
|
||||
try:
|
||||
self.nodes[0].z_sendmany(myzaddr, recipients)
|
||||
except JSONRPCException,e:
|
||||
@@ -306,7 +338,7 @@ class WalletTest (BitcoinTestFramework):
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[2].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
time.sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
mytxid = results[0]["result"]["txid"]
|
||||
@@ -363,7 +395,7 @@ class WalletTest (BitcoinTestFramework):
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[2].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
time.sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
break
|
||||
|
||||
@@ -6,10 +6,11 @@
|
||||
# This is a regression test for #1941.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from time import *
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
initialize_datadir, start_nodes, start_node, connect_nodes_bi, \
|
||||
bitcoind_processes, wait_and_assert_operationid_status
|
||||
|
||||
import sys
|
||||
from decimal import Decimal
|
||||
|
||||
starttime = 1388534400
|
||||
|
||||
@@ -39,30 +40,6 @@ class Wallet1941RegressionTest (BitcoinTestFramework):
|
||||
connect_nodes_bi(self.nodes, 0, 1)
|
||||
self.sync_all()
|
||||
|
||||
def wait_and_assert_operationid_status(self, myopid, in_status='success', in_errormsg=None):
|
||||
print('waiting for async operation {}'.format(myopid))
|
||||
opids = []
|
||||
opids.append(myopid)
|
||||
timeout = 300
|
||||
status = None
|
||||
errormsg = None
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[0].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
if status == "failed":
|
||||
errormsg = results[0]['error']['message']
|
||||
break
|
||||
print('...returned status: {}'.format(status))
|
||||
print('...error msg: {}'.format(errormsg))
|
||||
assert_equal(in_status, status)
|
||||
if errormsg is not None:
|
||||
assert(in_errormsg is not None)
|
||||
assert_equal(in_errormsg in errormsg, True)
|
||||
print('...returned error: {}'.format(errormsg))
|
||||
|
||||
def run_test (self):
|
||||
print "Mining blocks..."
|
||||
|
||||
@@ -76,7 +53,7 @@ class Wallet1941RegressionTest (BitcoinTestFramework):
|
||||
recipients = []
|
||||
recipients.append({"address":myzaddr, "amount":Decimal('10.0') - Decimal('0.0001')})
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
|
||||
self.wait_and_assert_operationid_status(myopid)
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
self.nodes[0].generate(1)
|
||||
|
||||
# Ensure the block times of the latest blocks exceed the variability
|
||||
|
||||
112
qa/rpc-tests/wallet_anchorfork.py
Executable file
112
qa/rpc-tests/wallet_anchorfork.py
Executable file
@@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2018 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_nodes, stop_nodes, connect_nodes_bi, \
|
||||
wait_and_assert_operationid_status, wait_bitcoinds
|
||||
from decimal import Decimal
|
||||
|
||||
class WalletAnchorForkTest (BitcoinTestFramework):
|
||||
|
||||
def setup_chain(self):
|
||||
print("Initializing test directory "+self.options.tmpdir)
|
||||
initialize_chain_clean(self.options.tmpdir, 4)
|
||||
|
||||
# Start nodes with -regtestprotectcoinbase to set fCoinbaseMustBeProtected to true.
|
||||
def setup_network(self, split=False):
|
||||
self.nodes = start_nodes(3, self.options.tmpdir, extra_args=[['-regtestprotectcoinbase', '-debug=zrpc']] * 3 )
|
||||
connect_nodes_bi(self.nodes,0,1)
|
||||
connect_nodes_bi(self.nodes,1,2)
|
||||
connect_nodes_bi(self.nodes,0,2)
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
def run_test (self):
|
||||
print "Mining blocks..."
|
||||
self.nodes[0].generate(4)
|
||||
|
||||
walletinfo = self.nodes[0].getwalletinfo()
|
||||
assert_equal(walletinfo['immature_balance'], 40)
|
||||
assert_equal(walletinfo['balance'], 0)
|
||||
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(102)
|
||||
self.sync_all()
|
||||
|
||||
assert_equal(self.nodes[0].getbalance(), 40)
|
||||
assert_equal(self.nodes[1].getbalance(), 20)
|
||||
assert_equal(self.nodes[2].getbalance(), 0)
|
||||
|
||||
# At this point in time, commitment tree is the empty root
|
||||
|
||||
# Node 0 creates a joinsplit transaction
|
||||
mytaddr0 = self.nodes[0].getnewaddress()
|
||||
myzaddr0 = self.nodes[0].z_getnewaddress()
|
||||
recipients = []
|
||||
recipients.append({"address":myzaddr0, "amount": Decimal('10.0') - Decimal('0.0001')})
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr0, recipients)
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
|
||||
# Sync up mempools and mine the transaction. All nodes have the same anchor.
|
||||
self.sync_all()
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Stop nodes.
|
||||
stop_nodes(self.nodes)
|
||||
wait_bitcoinds()
|
||||
|
||||
# Relaunch nodes and partition network into two:
|
||||
# A: node 0
|
||||
# B: node 1, 2
|
||||
self.nodes = start_nodes(3, self.options.tmpdir, extra_args=[['-regtestprotectcoinbase', '-debug=zrpc']] * 3 )
|
||||
connect_nodes_bi(self.nodes,1,2)
|
||||
|
||||
# Partition B, node 1 mines an empty block
|
||||
self.nodes[1].generate(1)
|
||||
|
||||
# Partition A, node 0 creates a joinsplit transaction
|
||||
recipients = []
|
||||
recipients.append({"address":myzaddr0, "amount": Decimal('10.0') - Decimal('0.0001')})
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr0, recipients)
|
||||
txid = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
rawhex = self.nodes[0].getrawtransaction(txid)
|
||||
|
||||
# Partition A, node 0 mines a block with the transaction
|
||||
self.nodes[0].generate(1)
|
||||
|
||||
# Partition B, node 1 mines the same joinsplit transaction
|
||||
txid2 = self.nodes[1].sendrawtransaction(rawhex)
|
||||
assert_equal(txid, txid2)
|
||||
self.nodes[1].generate(1)
|
||||
|
||||
# Check that Partition B is one block ahead and that they have different tips
|
||||
assert_equal(self.nodes[0].getblockcount() + 1, self.nodes[1].getblockcount())
|
||||
assert( self.nodes[0].getbestblockhash() != self.nodes[1].getbestblockhash())
|
||||
|
||||
# Shut down all nodes so any in-memory state is saved to disk
|
||||
stop_nodes(self.nodes)
|
||||
wait_bitcoinds()
|
||||
|
||||
# Relaunch nodes and reconnect the entire network
|
||||
self.nodes = start_nodes(3, self.options.tmpdir, extra_args=[['-regtestprotectcoinbase', '-debug=zrpc']] * 3 )
|
||||
connect_nodes_bi(self.nodes,0, 1)
|
||||
connect_nodes_bi(self.nodes,1, 2)
|
||||
connect_nodes_bi(self.nodes,0, 2)
|
||||
|
||||
# Mine a new block and let it propagate
|
||||
self.nodes[1].generate(1)
|
||||
|
||||
# Due to a bug in v1.0.0-1.0.3, node 0 will die with a tree root assertion, so sync_all() will throw an exception.
|
||||
self.sync_all()
|
||||
|
||||
# v1.0.4 will reach here safely
|
||||
assert_equal( self.nodes[0].getbestblockhash(), self.nodes[1].getbestblockhash())
|
||||
assert_equal( self.nodes[1].getbestblockhash(), self.nodes[2].getbestblockhash())
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletAnchorForkTest().main()
|
||||
366
qa/rpc-tests/wallet_mergetoaddress.py
Executable file
366
qa/rpc-tests/wallet_mergetoaddress.py
Executable file
@@ -0,0 +1,366 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2017 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_node, connect_nodes_bi, sync_blocks, sync_mempools, \
|
||||
wait_and_assert_operationid_status
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
class WalletMergeToAddressTest (BitcoinTestFramework):
|
||||
|
||||
def setup_chain(self):
|
||||
print("Initializing test directory "+self.options.tmpdir)
|
||||
initialize_chain_clean(self.options.tmpdir, 4)
|
||||
|
||||
def setup_network(self, split=False):
|
||||
args = ['-debug=zrpcunsafe', '-experimentalfeatures', '-zmergetoaddress']
|
||||
self.nodes = []
|
||||
self.nodes.append(start_node(0, self.options.tmpdir, args))
|
||||
self.nodes.append(start_node(1, self.options.tmpdir, args))
|
||||
args2 = ['-debug=zrpcunsafe', '-experimentalfeatures', '-zmergetoaddress', '-mempooltxinputlimit=7']
|
||||
self.nodes.append(start_node(2, self.options.tmpdir, args2))
|
||||
connect_nodes_bi(self.nodes,0,1)
|
||||
connect_nodes_bi(self.nodes,1,2)
|
||||
connect_nodes_bi(self.nodes,0,2)
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
def run_test (self):
|
||||
print "Mining blocks..."
|
||||
|
||||
self.nodes[0].generate(1)
|
||||
do_not_shield_taddr = self.nodes[0].getnewaddress()
|
||||
|
||||
self.nodes[0].generate(4)
|
||||
walletinfo = self.nodes[0].getwalletinfo()
|
||||
assert_equal(walletinfo['immature_balance'], 50)
|
||||
assert_equal(walletinfo['balance'], 0)
|
||||
self.sync_all()
|
||||
self.nodes[2].generate(1)
|
||||
self.nodes[2].getnewaddress()
|
||||
self.nodes[2].generate(1)
|
||||
self.nodes[2].getnewaddress()
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(101)
|
||||
self.sync_all()
|
||||
assert_equal(self.nodes[0].getbalance(), 50)
|
||||
assert_equal(self.nodes[1].getbalance(), 10)
|
||||
assert_equal(self.nodes[2].getbalance(), 30)
|
||||
|
||||
# Shield the coinbase
|
||||
myzaddr = self.nodes[0].z_getnewaddress()
|
||||
result = self.nodes[0].z_shieldcoinbase("*", myzaddr, 0)
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Prepare some UTXOs and notes for merging
|
||||
mytaddr = self.nodes[0].getnewaddress()
|
||||
mytaddr2 = self.nodes[0].getnewaddress()
|
||||
mytaddr3 = self.nodes[0].getnewaddress()
|
||||
result = self.nodes[0].z_sendmany(myzaddr, [
|
||||
{'address': do_not_shield_taddr, 'amount': 10},
|
||||
{'address': mytaddr, 'amount': 10},
|
||||
{'address': mytaddr2, 'amount': 10},
|
||||
{'address': mytaddr3, 'amount': 10},
|
||||
], 1, 0)
|
||||
wait_and_assert_operationid_status(self.nodes[0], result)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Merging will fail because from arguments need to be in an array
|
||||
try:
|
||||
self.nodes[0].z_mergetoaddress("*", myzaddr)
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("JSON value is not an array as expected" in errorString, True)
|
||||
|
||||
# Merging will fail when trying to spend from watch-only address
|
||||
self.nodes[2].importaddress(mytaddr)
|
||||
try:
|
||||
self.nodes[2].z_mergetoaddress([mytaddr], myzaddr)
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Could not find any funds to merge" in errorString, True)
|
||||
|
||||
# Merging will fail because fee is negative
|
||||
try:
|
||||
self.nodes[0].z_mergetoaddress(["*"], myzaddr, -1)
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Amount out of range" in errorString, True)
|
||||
|
||||
# Merging will fail because fee is larger than MAX_MONEY
|
||||
try:
|
||||
self.nodes[0].z_mergetoaddress(["*"], myzaddr, Decimal('21000000.00000001'))
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Amount out of range" in errorString, True)
|
||||
|
||||
# Merging will fail because fee is larger than sum of UTXOs
|
||||
try:
|
||||
self.nodes[0].z_mergetoaddress(["*"], myzaddr, 999)
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Insufficient funds" in errorString, True)
|
||||
|
||||
# Merging will fail because transparent limit parameter must be at least 0
|
||||
try:
|
||||
self.nodes[0].z_mergetoaddress(["*"], myzaddr, Decimal('0.001'), -1)
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Limit on maximum number of UTXOs cannot be negative" in errorString, True)
|
||||
|
||||
# Merging will fail because transparent limit parameter is absurdly large
|
||||
try:
|
||||
self.nodes[0].z_mergetoaddress(["*"], myzaddr, Decimal('0.001'), 99999999999999)
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("JSON integer out of range" in errorString, True)
|
||||
|
||||
# Merging will fail because shielded limit parameter must be at least 0
|
||||
try:
|
||||
self.nodes[0].z_mergetoaddress(["*"], myzaddr, Decimal('0.001'), 50, -1)
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Limit on maximum number of notes cannot be negative" in errorString, True)
|
||||
|
||||
# Merging will fail because shielded limit parameter is absurdly large
|
||||
try:
|
||||
self.nodes[0].z_mergetoaddress(["*"], myzaddr, Decimal('0.001'), 50, 99999999999999)
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("JSON integer out of range" in errorString, True)
|
||||
|
||||
# Merging will fail for this specific case where it would spend a fee and do nothing
|
||||
try:
|
||||
self.nodes[0].z_mergetoaddress([mytaddr], mytaddr)
|
||||
assert(False)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Destination address is also the only source address, and all its funds are already merged" in errorString, True)
|
||||
|
||||
# Merge UTXOs from node 0 of value 30, standard fee of 0.00010000
|
||||
result = self.nodes[0].z_mergetoaddress([mytaddr, mytaddr2, mytaddr3], myzaddr)
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Confirm balances and that do_not_shield_taddr containing funds of 10 was left alone
|
||||
assert_equal(self.nodes[0].getbalance(), 10)
|
||||
assert_equal(self.nodes[0].z_getbalance(do_not_shield_taddr), Decimal('10.0'))
|
||||
assert_equal(self.nodes[0].z_getbalance(myzaddr), Decimal('39.99990000'))
|
||||
assert_equal(self.nodes[1].getbalance(), 40)
|
||||
assert_equal(self.nodes[2].getbalance(), 30)
|
||||
|
||||
# Shield all notes to another z-addr
|
||||
myzaddr2 = self.nodes[0].z_getnewaddress()
|
||||
result = self.nodes[0].z_mergetoaddress(["ANY_ZADDR"], myzaddr2, 0)
|
||||
assert_equal(result["mergingUTXOs"], Decimal('0'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('0'))
|
||||
assert_equal(result["mergingNotes"], Decimal('2'))
|
||||
assert_equal(result["remainingNotes"], Decimal('0'))
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
self.sync_all()
|
||||
blockhash = self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
assert_equal(len(self.nodes[0].getblock(blockhash[0])['tx']), 2)
|
||||
assert_equal(self.nodes[0].z_getbalance(myzaddr), 0)
|
||||
assert_equal(self.nodes[0].z_getbalance(myzaddr2), Decimal('39.99990000'))
|
||||
|
||||
# Shield coinbase UTXOs from any node 2 taddr, and set fee to 0
|
||||
result = self.nodes[2].z_shieldcoinbase("*", myzaddr, 0)
|
||||
wait_and_assert_operationid_status(self.nodes[2], result['opid'])
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
assert_equal(self.nodes[0].getbalance(), 10)
|
||||
assert_equal(self.nodes[0].z_getbalance(myzaddr), Decimal('30'))
|
||||
assert_equal(self.nodes[0].z_getbalance(myzaddr2), Decimal('39.99990000'))
|
||||
assert_equal(self.nodes[1].getbalance(), 60)
|
||||
assert_equal(self.nodes[2].getbalance(), 0)
|
||||
|
||||
# Merge all notes from node 0 into a node 0 taddr, and set fee to 0
|
||||
result = self.nodes[0].z_mergetoaddress(["ANY_ZADDR"], mytaddr, 0)
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
assert_equal(self.nodes[0].getbalance(), Decimal('79.99990000'))
|
||||
assert_equal(self.nodes[0].z_getbalance(do_not_shield_taddr), Decimal('10.0'))
|
||||
assert_equal(self.nodes[0].z_getbalance(mytaddr), Decimal('69.99990000'))
|
||||
assert_equal(self.nodes[0].z_getbalance(myzaddr), 0)
|
||||
assert_equal(self.nodes[0].z_getbalance(myzaddr2), 0)
|
||||
assert_equal(self.nodes[1].getbalance(), 70)
|
||||
assert_equal(self.nodes[2].getbalance(), 0)
|
||||
|
||||
# Merge all node 0 UTXOs together into a node 1 taddr, and set fee to 0
|
||||
self.nodes[1].getnewaddress() # Ensure we have an empty address
|
||||
n1taddr = self.nodes[1].getnewaddress()
|
||||
result = self.nodes[0].z_mergetoaddress(["ANY_TADDR"], n1taddr, 0)
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
assert_equal(self.nodes[0].getbalance(), 0)
|
||||
assert_equal(self.nodes[0].z_getbalance(do_not_shield_taddr), 0)
|
||||
assert_equal(self.nodes[0].z_getbalance(mytaddr), 0)
|
||||
assert_equal(self.nodes[0].z_getbalance(myzaddr), 0)
|
||||
assert_equal(self.nodes[1].getbalance(), Decimal('159.99990000'))
|
||||
assert_equal(self.nodes[1].z_getbalance(n1taddr), Decimal('79.99990000'))
|
||||
assert_equal(self.nodes[2].getbalance(), 0)
|
||||
|
||||
# Generate 800 regular UTXOs on node 0, and 20 regular UTXOs on node 2
|
||||
mytaddr = self.nodes[0].getnewaddress()
|
||||
n2taddr = self.nodes[2].getnewaddress()
|
||||
self.nodes[1].generate(1000)
|
||||
self.sync_all()
|
||||
for i in range(800):
|
||||
self.nodes[1].sendtoaddress(mytaddr, 1)
|
||||
for i in range(20):
|
||||
self.nodes[1].sendtoaddress(n2taddr, 1)
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Merging the 800 UTXOs will occur over two transactions, since max tx size is 100,000 bytes.
|
||||
# We don't verify mergingTransparentValue as UTXOs are not selected in any specific order, so value can change on each test run.
|
||||
# We set an unrealistically high limit parameter of 99999, to verify that max tx size will constrain the number of UTXOs.
|
||||
result = self.nodes[0].z_mergetoaddress([mytaddr], myzaddr, 0, 99999)
|
||||
assert_equal(result["mergingUTXOs"], Decimal('662'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('138'))
|
||||
assert_equal(result["mergingNotes"], Decimal('0'))
|
||||
assert_equal(result["mergingShieldedValue"], Decimal('0'))
|
||||
assert_equal(result["remainingNotes"], Decimal('0'))
|
||||
assert_equal(result["remainingShieldedValue"], Decimal('0'))
|
||||
remainingTransparentValue = result["remainingTransparentValue"]
|
||||
opid1 = result['opid']
|
||||
|
||||
# Verify that UTXOs are locked (not available for selection) by queuing up another merging operation
|
||||
result = self.nodes[0].z_mergetoaddress([mytaddr], myzaddr, 0, 0)
|
||||
assert_equal(result["mergingUTXOs"], Decimal('138'))
|
||||
assert_equal(result["mergingTransparentValue"], Decimal(remainingTransparentValue))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('0'))
|
||||
assert_equal(result["remainingTransparentValue"], Decimal('0'))
|
||||
assert_equal(result["mergingNotes"], Decimal('0'))
|
||||
assert_equal(result["mergingShieldedValue"], Decimal('0'))
|
||||
assert_equal(result["remainingNotes"], Decimal('0'))
|
||||
assert_equal(result["remainingShieldedValue"], Decimal('0'))
|
||||
opid2 = result['opid']
|
||||
|
||||
# wait for both aysnc operations to complete
|
||||
wait_and_assert_operationid_status(self.nodes[0], opid1)
|
||||
wait_and_assert_operationid_status(self.nodes[0], opid2)
|
||||
|
||||
# sync_all() invokes sync_mempool() but node 2's mempool limit will cause tx1 and tx2 to be rejected.
|
||||
# So instead, we sync on blocks and mempool for node 0 and node 1, and after a new block is generated
|
||||
# which mines tx1 and tx2, all nodes will have an empty mempool which can then be synced.
|
||||
sync_blocks(self.nodes[:2])
|
||||
sync_mempools(self.nodes[:2])
|
||||
# Generate enough blocks to ensure all transactions are mined
|
||||
while self.nodes[1].getmempoolinfo()['size'] > 0:
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Verify maximum number of UTXOs which node 2 can shield is limited by option -mempooltxinputlimit
|
||||
# This option is used when the limit parameter is set to 0.
|
||||
result = self.nodes[2].z_mergetoaddress([n2taddr], myzaddr, Decimal('0.0001'), 0)
|
||||
assert_equal(result["mergingUTXOs"], Decimal('7'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('13'))
|
||||
assert_equal(result["mergingNotes"], Decimal('0'))
|
||||
assert_equal(result["remainingNotes"], Decimal('0'))
|
||||
wait_and_assert_operationid_status(self.nodes[2], result['opid'])
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Verify maximum number of UTXOs which node 0 can shield is set by default limit parameter of 50
|
||||
mytaddr = self.nodes[0].getnewaddress()
|
||||
for i in range(100):
|
||||
self.nodes[1].sendtoaddress(mytaddr, 1)
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
result = self.nodes[0].z_mergetoaddress([mytaddr], myzaddr, Decimal('0.0001'))
|
||||
assert_equal(result["mergingUTXOs"], Decimal('50'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('50'))
|
||||
assert_equal(result["mergingNotes"], Decimal('0'))
|
||||
# Remaining notes are only counted if we are trying to merge any notes
|
||||
assert_equal(result["remainingNotes"], Decimal('0'))
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
|
||||
# Verify maximum number of UTXOs which node 0 can shield can be set by the limit parameter
|
||||
result = self.nodes[0].z_mergetoaddress([mytaddr], myzaddr, Decimal('0.0001'), 33)
|
||||
assert_equal(result["mergingUTXOs"], Decimal('33'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('17'))
|
||||
assert_equal(result["mergingNotes"], Decimal('0'))
|
||||
# Remaining notes are only counted if we are trying to merge any notes
|
||||
assert_equal(result["remainingNotes"], Decimal('0'))
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
# Don't sync node 2 which rejects the tx due to its mempooltxinputlimit
|
||||
sync_blocks(self.nodes[:2])
|
||||
sync_mempools(self.nodes[:2])
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Verify maximum number of notes which node 0 can shield can be set by the limit parameter
|
||||
# Also check that we can set off a second merge before the first one is complete
|
||||
|
||||
# myzaddr has 5 notes at this point
|
||||
result1 = self.nodes[0].z_mergetoaddress([myzaddr], myzaddr, 0.0001, 50, 2)
|
||||
result2 = self.nodes[0].z_mergetoaddress([myzaddr], myzaddr, 0.0001, 50, 2)
|
||||
|
||||
# First merge should select from all notes
|
||||
assert_equal(result1["mergingUTXOs"], Decimal('0'))
|
||||
# Remaining UTXOs are only counted if we are trying to merge any UTXOs
|
||||
assert_equal(result1["remainingUTXOs"], Decimal('0'))
|
||||
assert_equal(result1["mergingNotes"], Decimal('2'))
|
||||
assert_equal(result1["remainingNotes"], Decimal('3'))
|
||||
|
||||
# Second merge should ignore locked notes
|
||||
assert_equal(result2["mergingUTXOs"], Decimal('0'))
|
||||
assert_equal(result2["remainingUTXOs"], Decimal('0'))
|
||||
assert_equal(result2["mergingNotes"], Decimal('2'))
|
||||
assert_equal(result2["remainingNotes"], Decimal('1'))
|
||||
wait_and_assert_operationid_status(self.nodes[0], result1['opid'])
|
||||
wait_and_assert_operationid_status(self.nodes[0], result2['opid'])
|
||||
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Shield both UTXOs and notes to a z-addr
|
||||
result = self.nodes[0].z_mergetoaddress(["*"], myzaddr, 0, 10, 2)
|
||||
assert_equal(result["mergingUTXOs"], Decimal('10'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('7'))
|
||||
assert_equal(result["mergingNotes"], Decimal('2'))
|
||||
assert_equal(result["remainingNotes"], Decimal('1'))
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
# Don't sync node 2 which rejects the tx due to its mempooltxinputlimit
|
||||
sync_blocks(self.nodes[:2])
|
||||
sync_mempools(self.nodes[:2])
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletMergeToAddressTest().main()
|
||||
@@ -5,8 +5,11 @@
|
||||
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from time import *
|
||||
from test_framework.util import assert_equal, start_node, \
|
||||
start_nodes, connect_nodes_bi, bitcoind_processes
|
||||
|
||||
import time
|
||||
from decimal import Decimal
|
||||
|
||||
class WalletNullifiersTest (BitcoinTestFramework):
|
||||
|
||||
@@ -32,7 +35,7 @@ class WalletNullifiersTest (BitcoinTestFramework):
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[0].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
time.sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
assert_equal("success", status)
|
||||
@@ -73,7 +76,7 @@ class WalletNullifiersTest (BitcoinTestFramework):
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[0].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
time.sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
assert_equal("success", status)
|
||||
@@ -105,7 +108,7 @@ class WalletNullifiersTest (BitcoinTestFramework):
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[2].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
time.sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
assert_equal("success", status)
|
||||
@@ -146,11 +149,12 @@ class WalletNullifiersTest (BitcoinTestFramework):
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[1].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
time.sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
assert_equal("success", status)
|
||||
mytxid = results[0]["result"]["txid"]
|
||||
[mytxid] # hush pyflakes
|
||||
break
|
||||
|
||||
self.sync_all()
|
||||
@@ -166,5 +170,50 @@ class WalletNullifiersTest (BitcoinTestFramework):
|
||||
assert_equal(self.nodes[1].z_getbalance(myzaddr), zaddrremaining2)
|
||||
assert_equal(self.nodes[2].z_getbalance(myzaddr), zaddrremaining2)
|
||||
|
||||
# Test viewing keys
|
||||
|
||||
node3mined = Decimal('250.0')
|
||||
assert_equal({k: Decimal(v) for k, v in self.nodes[3].z_gettotalbalance().items()}, {
|
||||
'transparent': node3mined,
|
||||
'private': zsendmany2notevalue,
|
||||
'total': node3mined + zsendmany2notevalue,
|
||||
})
|
||||
|
||||
# add node 1 address and node 2 viewing key to node 3
|
||||
myzvkey = self.nodes[2].z_exportviewingkey(myzaddr)
|
||||
self.nodes[3].importaddress(mytaddr1)
|
||||
self.nodes[3].z_importviewingkey(myzvkey, 'whenkeyisnew', 1)
|
||||
|
||||
# Check the address has been imported
|
||||
assert_equal(myzaddr in self.nodes[3].z_listaddresses(), False)
|
||||
assert_equal(myzaddr in self.nodes[3].z_listaddresses(True), True)
|
||||
|
||||
# Node 3 should see the same received notes as node 2
|
||||
assert_equal(
|
||||
self.nodes[2].z_listreceivedbyaddress(myzaddr),
|
||||
self.nodes[3].z_listreceivedbyaddress(myzaddr))
|
||||
|
||||
# Node 3's balances should be unchanged without explicitly requesting
|
||||
# to include watch-only balances
|
||||
assert_equal({k: Decimal(v) for k, v in self.nodes[3].z_gettotalbalance().items()}, {
|
||||
'transparent': node3mined,
|
||||
'private': zsendmany2notevalue,
|
||||
'total': node3mined + zsendmany2notevalue,
|
||||
})
|
||||
|
||||
# Wallet can't cache nullifiers for notes received by addresses it only has a
|
||||
# viewing key for, and therefore can't detect spends. So it sees a balance
|
||||
# corresponding to the sum of all notes the address received.
|
||||
# TODO: Fix this during the Sapling upgrade (via #2277)
|
||||
assert_equal({k: Decimal(v) for k, v in self.nodes[3].z_gettotalbalance(1, True).items()}, {
|
||||
'transparent': node3mined + Decimal('1.0'),
|
||||
'private': zsendmany2notevalue + zsendmanynotevalue + zaddrremaining + zaddrremaining2,
|
||||
'total': node3mined + Decimal('1.0') + zsendmany2notevalue + zsendmanynotevalue + zaddrremaining + zaddrremaining2,
|
||||
})
|
||||
|
||||
# Check individual balances reflect the above
|
||||
assert_equal(self.nodes[3].z_getbalance(mytaddr1), Decimal('1.0'))
|
||||
assert_equal(self.nodes[3].z_getbalance(myzaddr), zsendmanynotevalue + zaddrremaining + zaddrremaining2)
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletNullifiersTest().main ()
|
||||
|
||||
142
qa/rpc-tests/wallet_overwintertx.py
Executable file
142
qa/rpc-tests/wallet_overwintertx.py
Executable file
@@ -0,0 +1,142 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2018 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_nodes, connect_nodes_bi, wait_and_assert_operationid_status
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
class WalletOverwinterTxTest (BitcoinTestFramework):
|
||||
|
||||
def setup_chain(self):
|
||||
print("Initializing test directory "+self.options.tmpdir)
|
||||
initialize_chain_clean(self.options.tmpdir, 4)
|
||||
|
||||
def setup_network(self, split=False):
|
||||
self.nodes = start_nodes(4, self.options.tmpdir, extra_args=[["-nuparams=5ba81b19:200", "-debug=zrpcunsafe", "-txindex"]] * 4 )
|
||||
connect_nodes_bi(self.nodes,0,1)
|
||||
connect_nodes_bi(self.nodes,1,2)
|
||||
connect_nodes_bi(self.nodes,0,2)
|
||||
connect_nodes_bi(self.nodes,0,3)
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
def run_test (self):
|
||||
self.nodes[0].generate(100)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(98)
|
||||
self.sync_all()
|
||||
# Node 0 has reward from blocks 1 to 98 which are spendable.
|
||||
|
||||
taddr0 = self.nodes[0].getnewaddress()
|
||||
taddr1 = self.nodes[1].getnewaddress()
|
||||
taddr2 = self.nodes[2].getnewaddress()
|
||||
zaddr2 = self.nodes[2].z_getnewaddress()
|
||||
taddr3 = self.nodes[3].getnewaddress()
|
||||
zaddr3 = self.nodes[3].z_getnewaddress()
|
||||
|
||||
#
|
||||
# Currently at block 198. The next block to be mined 199 is a Sprout block
|
||||
#
|
||||
bci = self.nodes[0].getblockchaininfo()
|
||||
assert_equal(bci['consensus']['chaintip'], '00000000')
|
||||
assert_equal(bci['consensus']['nextblock'], '00000000')
|
||||
assert_equal(bci['upgrades']['5ba81b19']['status'], 'pending')
|
||||
|
||||
# Node 0 sends transparent funds to Node 2
|
||||
tsendamount = Decimal('1.0')
|
||||
txid_transparent = self.nodes[0].sendtoaddress(taddr2, tsendamount)
|
||||
self.sync_all()
|
||||
|
||||
# Node 2 sends the zero-confirmation transparent funds to Node 1 using z_sendmany
|
||||
recipients = []
|
||||
recipients.append({"address":taddr1, "amount": Decimal('0.5')})
|
||||
myopid = self.nodes[2].z_sendmany(taddr2, recipients, 0)
|
||||
txid_zsendmany = wait_and_assert_operationid_status(self.nodes[2], myopid)
|
||||
|
||||
# Node 0 shields to Node 2, a coinbase utxo of value 10.0 less fee 0.00010000
|
||||
zsendamount = Decimal('10.0') - Decimal('0.0001')
|
||||
recipients = []
|
||||
recipients.append({"address":zaddr2, "amount": zsendamount})
|
||||
myopid = self.nodes[0].z_sendmany(taddr0, recipients)
|
||||
txid_shielded = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
|
||||
self.sync_all()
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Verify balance
|
||||
assert_equal(self.nodes[1].z_getbalance(taddr1), Decimal('0.5'))
|
||||
assert_equal(self.nodes[2].getbalance(), Decimal('0.4999'))
|
||||
assert_equal(self.nodes[2].z_getbalance(zaddr2), zsendamount)
|
||||
|
||||
# Verify transaction versions are 1 or 2 (intended for Sprout)
|
||||
result = self.nodes[0].getrawtransaction(txid_transparent, 1)
|
||||
assert_equal(result["version"], 1)
|
||||
assert_equal(result["overwintered"], False)
|
||||
result = self.nodes[0].getrawtransaction(txid_zsendmany, 1)
|
||||
assert_equal(result["version"], 1)
|
||||
assert_equal(result["overwintered"], False)
|
||||
result = self.nodes[0].getrawtransaction(txid_shielded, 1)
|
||||
assert_equal(result["version"], 2)
|
||||
assert_equal(result["overwintered"], False)
|
||||
|
||||
#
|
||||
# Currently at block 199. The next block to be mined 200 is an Overwinter block
|
||||
#
|
||||
bci = self.nodes[0].getblockchaininfo()
|
||||
assert_equal(bci['consensus']['chaintip'], '00000000')
|
||||
assert_equal(bci['consensus']['nextblock'], '5ba81b19')
|
||||
assert_equal(bci['upgrades']['5ba81b19']['status'], 'pending')
|
||||
|
||||
# Node 0 sends transparent funds to Node 3
|
||||
tsendamount = Decimal('1.0')
|
||||
txid_transparent = self.nodes[0].sendtoaddress(taddr3, tsendamount)
|
||||
self.sync_all()
|
||||
|
||||
# Node 3 sends the zero-confirmation transparent funds to Node 1 using z_sendmany
|
||||
recipients = []
|
||||
recipients.append({"address":taddr1, "amount": Decimal('0.5')})
|
||||
myopid = self.nodes[3].z_sendmany(taddr3, recipients, 0)
|
||||
txid_zsendmany = wait_and_assert_operationid_status(self.nodes[3], myopid)
|
||||
|
||||
# Node 0 shields to Node 3, a coinbase utxo of value 10.0 less fee 0.00010000
|
||||
zsendamount = Decimal('10.0') - Decimal('0.0001')
|
||||
recipients = []
|
||||
recipients.append({"address":zaddr3, "amount": zsendamount})
|
||||
myopid = self.nodes[0].z_sendmany(taddr0, recipients)
|
||||
txid_shielded = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
|
||||
# Mine the first Overwinter block
|
||||
self.sync_all()
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
bci = self.nodes[0].getblockchaininfo()
|
||||
assert_equal(bci['consensus']['chaintip'], '5ba81b19')
|
||||
assert_equal(bci['consensus']['nextblock'], '5ba81b19')
|
||||
assert_equal(bci['upgrades']['5ba81b19']['status'], 'active')
|
||||
|
||||
# Verify balance
|
||||
assert_equal(self.nodes[1].z_getbalance(taddr1), Decimal('1.0'))
|
||||
assert_equal(self.nodes[3].getbalance(), Decimal('0.4999'))
|
||||
assert_equal(self.nodes[3].z_getbalance(zaddr3), zsendamount)
|
||||
|
||||
# Verify transaction version is 3 (intended for Overwinter)
|
||||
result = self.nodes[0].getrawtransaction(txid_transparent, 1)
|
||||
assert_equal(result["version"], 3)
|
||||
assert_equal(result["overwintered"], True)
|
||||
assert_equal(result["versiongroupid"], "03c48270")
|
||||
result = self.nodes[0].getrawtransaction(txid_zsendmany, 1)
|
||||
assert_equal(result["version"], 3)
|
||||
assert_equal(result["overwintered"], True)
|
||||
assert_equal(result["versiongroupid"], "03c48270")
|
||||
result = self.nodes[0].getrawtransaction(txid_shielded, 1)
|
||||
assert_equal(result["version"], 3)
|
||||
assert_equal(result["overwintered"], True)
|
||||
assert_equal(result["versiongroupid"], "03c48270")
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletOverwinterTxTest().main()
|
||||
@@ -5,8 +5,26 @@
|
||||
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from time import *
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.mininode import COIN
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_nodes, connect_nodes_bi, stop_node, wait_and_assert_operationid_status
|
||||
|
||||
import sys
|
||||
import time
|
||||
import timeit
|
||||
from decimal import Decimal
|
||||
|
||||
def check_value_pool(node, name, total):
|
||||
value_pools = node.getblockchaininfo()['valuePools']
|
||||
found = False
|
||||
for pool in value_pools:
|
||||
if pool['id'] == name:
|
||||
found = True
|
||||
assert_equal(pool['monitored'], True)
|
||||
assert_equal(pool['chainValue'], total)
|
||||
assert_equal(pool['chainValueZat'], total * COIN)
|
||||
assert(found)
|
||||
|
||||
class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
|
||||
@@ -16,41 +34,14 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
|
||||
# Start nodes with -regtestprotectcoinbase to set fCoinbaseMustBeProtected to true.
|
||||
def setup_network(self, split=False):
|
||||
self.nodes = start_nodes(3, self.options.tmpdir, extra_args=[['-regtestprotectcoinbase', '-debug=zrpcunsafe']] * 3 )
|
||||
self.nodes = start_nodes(4, self.options.tmpdir, extra_args=[['-regtestprotectcoinbase', '-debug=zrpcunsafe']] * 4 )
|
||||
connect_nodes_bi(self.nodes,0,1)
|
||||
connect_nodes_bi(self.nodes,1,2)
|
||||
connect_nodes_bi(self.nodes,0,2)
|
||||
connect_nodes_bi(self.nodes,0,3)
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
# Returns txid if operation was a success or None
|
||||
def wait_and_assert_operationid_status(self, myopid, in_status='success', in_errormsg=None):
|
||||
print('waiting for async operation {}'.format(myopid))
|
||||
opids = []
|
||||
opids.append(myopid)
|
||||
timeout = 300
|
||||
status = None
|
||||
errormsg = None
|
||||
txid = None
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[0].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
if status == "failed":
|
||||
errormsg = results[0]['error']['message']
|
||||
elif status == "success":
|
||||
txid = results[0]['result']['txid']
|
||||
break
|
||||
print('...returned status: {}'.format(status))
|
||||
assert_equal(in_status, status)
|
||||
if errormsg is not None:
|
||||
assert(in_errormsg is not None)
|
||||
assert_equal(in_errormsg in errormsg, True)
|
||||
print('...returned error: {}'.format(errormsg))
|
||||
return txid
|
||||
|
||||
def run_test (self):
|
||||
print "Mining blocks..."
|
||||
|
||||
@@ -67,6 +58,12 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
assert_equal(self.nodes[0].getbalance(), 40)
|
||||
assert_equal(self.nodes[1].getbalance(), 10)
|
||||
assert_equal(self.nodes[2].getbalance(), 0)
|
||||
assert_equal(self.nodes[3].getbalance(), 0)
|
||||
|
||||
check_value_pool(self.nodes[0], 'sprout', 0)
|
||||
check_value_pool(self.nodes[1], 'sprout', 0)
|
||||
check_value_pool(self.nodes[2], 'sprout', 0)
|
||||
check_value_pool(self.nodes[3], 'sprout', 0)
|
||||
|
||||
# Send will fail because we are enforcing the consensus rule that
|
||||
# coinbase utxos can only be sent to a zaddr.
|
||||
@@ -81,6 +78,27 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
mytaddr = self.nodes[0].getnewaddress()
|
||||
myzaddr = self.nodes[0].z_getnewaddress()
|
||||
|
||||
# Node 3 will test that watch only address utxos are not selected
|
||||
self.nodes[3].importaddress(mytaddr)
|
||||
recipients= [{"address":myzaddr, "amount": Decimal('1')}]
|
||||
myopid = self.nodes[3].z_sendmany(mytaddr, recipients)
|
||||
errorString=""
|
||||
status = None
|
||||
opids = [myopid]
|
||||
timeout = 10
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[3].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
errorString = results[0]["error"]["message"]
|
||||
break
|
||||
assert_equal("failed", status)
|
||||
assert_equal("no UTXOs found for taddr from address" in errorString, True)
|
||||
stop_node(self.nodes[3], 3)
|
||||
self.nodes.pop()
|
||||
|
||||
# This send will fail because our wallet does not allow any change when protecting a coinbase utxo,
|
||||
# as it's currently not possible to specify a change address in z_sendmany.
|
||||
recipients = []
|
||||
@@ -94,7 +112,7 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[0].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
time.sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
errorString = results[0]["error"]["message"]
|
||||
@@ -112,10 +130,11 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
assert_equal("wallet does not allow any change" in errorString, True)
|
||||
|
||||
# This send will succeed. We send two coinbase utxos totalling 20.0 less a fee of 0.00010000, with no change.
|
||||
shieldvalue = Decimal('20.0') - Decimal('0.0001')
|
||||
recipients = []
|
||||
recipients.append({"address":myzaddr, "amount": Decimal('20.0') - Decimal('0.0001')})
|
||||
recipients.append({"address":myzaddr, "amount": shieldvalue})
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
|
||||
mytxid = self.wait_and_assert_operationid_status(myopid)
|
||||
mytxid = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
@@ -140,11 +159,15 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
assert_equal(Decimal(resp["private"]), Decimal('19.9999'))
|
||||
assert_equal(Decimal(resp["total"]), Decimal('39.9999'))
|
||||
|
||||
# The Sprout value pool should reflect the send
|
||||
sproutvalue = shieldvalue
|
||||
check_value_pool(self.nodes[0], 'sprout', sproutvalue)
|
||||
|
||||
# A custom fee of 0 is okay. Here the node will send the note value back to itself.
|
||||
recipients = []
|
||||
recipients.append({"address":myzaddr, "amount": Decimal('19.9999')})
|
||||
myopid = self.nodes[0].z_sendmany(myzaddr, recipients, 1, Decimal('0.0'))
|
||||
mytxid = self.wait_and_assert_operationid_status(myopid)
|
||||
mytxid = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
@@ -153,11 +176,15 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
assert_equal(Decimal(resp["private"]), Decimal('19.9999'))
|
||||
assert_equal(Decimal(resp["total"]), Decimal('39.9999'))
|
||||
|
||||
# The Sprout value pool should be unchanged
|
||||
check_value_pool(self.nodes[0], 'sprout', sproutvalue)
|
||||
|
||||
# convert note to transparent funds
|
||||
unshieldvalue = Decimal('10.0')
|
||||
recipients = []
|
||||
recipients.append({"address":mytaddr, "amount":Decimal('10.0')})
|
||||
recipients.append({"address":mytaddr, "amount": unshieldvalue})
|
||||
myopid = self.nodes[0].z_sendmany(myzaddr, recipients)
|
||||
mytxid = self.wait_and_assert_operationid_status(myopid)
|
||||
mytxid = wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
assert(mytxid is not None)
|
||||
self.sync_all()
|
||||
|
||||
@@ -169,10 +196,12 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
self.sync_all()
|
||||
|
||||
# check balances
|
||||
sproutvalue -= unshieldvalue + Decimal('0.0001')
|
||||
resp = self.nodes[0].z_gettotalbalance()
|
||||
assert_equal(Decimal(resp["transparent"]), Decimal('30.0'))
|
||||
assert_equal(Decimal(resp["private"]), Decimal('9.9998'))
|
||||
assert_equal(Decimal(resp["total"]), Decimal('39.9998'))
|
||||
check_value_pool(self.nodes[0], 'sprout', sproutvalue)
|
||||
|
||||
# z_sendmany will return an error if there is transparent change output considered dust.
|
||||
# UTXO selection in z_sendmany sorts in ascending order, so smallest utxos are consumed first.
|
||||
@@ -181,7 +210,7 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
amount = Decimal('10.0') - Decimal('0.00010000') - Decimal('0.00000001') # this leaves change at 1 zatoshi less than dust threshold
|
||||
recipients.append({"address":self.nodes[0].getnewaddress(), "amount":amount })
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
|
||||
self.wait_and_assert_operationid_status(myopid, "failed", "Insufficient transparent funds, have 10.00, need 0.00000053 more to avoid creating invalid change output 0.00000001 (dust threshold is 0.00000054)")
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid, "failed", "Insufficient transparent funds, have 10.00, need 0.00000053 more to avoid creating invalid change output 0.00000001 (dust threshold is 0.00000054)")
|
||||
|
||||
# Send will fail because send amount is too big, even when including coinbase utxos
|
||||
errorString = ""
|
||||
@@ -195,9 +224,9 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
recipients = []
|
||||
recipients.append({"address":self.nodes[1].getnewaddress(), "amount":Decimal('10000.0')})
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
|
||||
self.wait_and_assert_operationid_status(myopid, "failed", "Insufficient transparent funds, have 10.00, need 10000.0001")
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid, "failed", "Insufficient transparent funds, have 10.00, need 10000.0001")
|
||||
myopid = self.nodes[0].z_sendmany(myzaddr, recipients)
|
||||
self.wait_and_assert_operationid_status(myopid, "failed", "Insufficient protected funds, have 9.9998, need 10000.0001")
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid, "failed", "Insufficient protected funds, have 9.9998, need 10000.0001")
|
||||
|
||||
# Send will fail because of insufficient funds unless sender uses coinbase utxos
|
||||
try:
|
||||
@@ -216,12 +245,25 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
amount_per_recipient = Decimal('0.00000546') # dust threshold
|
||||
# Note that regtest chainparams does not require standard tx, so setting the amount to be
|
||||
# less than the dust threshold, e.g. 0.00000001 will not result in mempool rejection.
|
||||
start_time = timeit.default_timer()
|
||||
for i in xrange(0,num_t_recipients):
|
||||
newtaddr = self.nodes[2].getnewaddress()
|
||||
recipients.append({"address":newtaddr, "amount":amount_per_recipient})
|
||||
elapsed = timeit.default_timer() - start_time
|
||||
print("...invoked getnewaddress() {} times in {} seconds".format(num_t_recipients, elapsed))
|
||||
|
||||
# Issue #2263 Workaround START
|
||||
# HTTP connection to node 0 may fall into a state, during the few minutes it takes to process
|
||||
# loop above to create new addresses, that when z_sendmany is called with a large amount of
|
||||
# rpc data in recipients, the connection fails with a 'broken pipe' error. Making a RPC call
|
||||
# to node 0 before calling z_sendmany appears to fix this issue, perhaps putting the HTTP
|
||||
# connection into a good state to handle a large amount of data in recipients.
|
||||
self.nodes[0].getinfo()
|
||||
# Issue #2263 Workaround END
|
||||
|
||||
myopid = self.nodes[0].z_sendmany(myzaddr, recipients)
|
||||
try:
|
||||
self.wait_and_assert_operationid_status(myopid)
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
except JSONRPCException as e:
|
||||
print("JSONRPC error: "+e.error['message'])
|
||||
assert(False)
|
||||
@@ -235,7 +277,9 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
|
||||
# check balance
|
||||
node2balance = amount_per_recipient * num_t_recipients
|
||||
sproutvalue -= node2balance + Decimal('0.0001')
|
||||
assert_equal(self.nodes[2].getbalance(), node2balance)
|
||||
check_value_pool(self.nodes[0], 'sprout', sproutvalue)
|
||||
|
||||
# Send will fail because fee is negative
|
||||
try:
|
||||
@@ -284,7 +328,7 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
newzaddr = self.nodes[2].z_getnewaddress()
|
||||
recipients.append({"address":newzaddr, "amount":amount_per_recipient})
|
||||
myopid = self.nodes[0].z_sendmany(myzaddr, recipients, minconf, custom_fee)
|
||||
self.wait_and_assert_operationid_status(myopid)
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
@@ -294,6 +338,8 @@ class WalletProtectCoinbaseTest (BitcoinTestFramework):
|
||||
assert_equal(Decimal(resp["private"]), send_amount)
|
||||
resp = self.nodes[0].z_getbalance(myzaddr)
|
||||
assert_equal(Decimal(resp), zbalance - custom_fee - send_amount)
|
||||
sproutvalue -= custom_fee
|
||||
check_value_pool(self.nodes[0], 'sprout', sproutvalue)
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletProtectCoinbaseTest().main()
|
||||
|
||||
199
qa/rpc-tests/wallet_shieldcoinbase.py
Executable file
199
qa/rpc-tests/wallet_shieldcoinbase.py
Executable file
@@ -0,0 +1,199 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2017 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_node, connect_nodes_bi, sync_blocks, sync_mempools, \
|
||||
wait_and_assert_operationid_status
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
class WalletShieldCoinbaseTest (BitcoinTestFramework):
|
||||
|
||||
def setup_chain(self):
|
||||
print("Initializing test directory "+self.options.tmpdir)
|
||||
initialize_chain_clean(self.options.tmpdir, 4)
|
||||
|
||||
def setup_network(self, split=False):
|
||||
args = ['-regtestprotectcoinbase', '-debug=zrpcunsafe']
|
||||
self.nodes = []
|
||||
self.nodes.append(start_node(0, self.options.tmpdir, args))
|
||||
self.nodes.append(start_node(1, self.options.tmpdir, args))
|
||||
args2 = ['-regtestprotectcoinbase', '-debug=zrpcunsafe', "-mempooltxinputlimit=7"]
|
||||
self.nodes.append(start_node(2, self.options.tmpdir, args2))
|
||||
connect_nodes_bi(self.nodes,0,1)
|
||||
connect_nodes_bi(self.nodes,1,2)
|
||||
connect_nodes_bi(self.nodes,0,2)
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
def run_test (self):
|
||||
print "Mining blocks..."
|
||||
|
||||
self.nodes[0].generate(1)
|
||||
do_not_shield_taddr = self.nodes[0].getnewaddress()
|
||||
|
||||
self.nodes[0].generate(4)
|
||||
walletinfo = self.nodes[0].getwalletinfo()
|
||||
assert_equal(walletinfo['immature_balance'], 50)
|
||||
assert_equal(walletinfo['balance'], 0)
|
||||
self.sync_all()
|
||||
self.nodes[2].generate(1)
|
||||
self.nodes[2].getnewaddress()
|
||||
self.nodes[2].generate(1)
|
||||
self.nodes[2].getnewaddress()
|
||||
self.nodes[2].generate(1)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(101)
|
||||
self.sync_all()
|
||||
assert_equal(self.nodes[0].getbalance(), 50)
|
||||
assert_equal(self.nodes[1].getbalance(), 10)
|
||||
assert_equal(self.nodes[2].getbalance(), 30)
|
||||
|
||||
# Prepare to send taddr->zaddr
|
||||
mytaddr = self.nodes[0].getnewaddress()
|
||||
myzaddr = self.nodes[0].z_getnewaddress()
|
||||
|
||||
# Shielding will fail when trying to spend from watch-only address
|
||||
self.nodes[2].importaddress(mytaddr)
|
||||
try:
|
||||
self.nodes[2].z_shieldcoinbase(mytaddr, myzaddr)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Could not find any coinbase funds to shield" in errorString, True)
|
||||
|
||||
# Shielding will fail because fee is negative
|
||||
try:
|
||||
self.nodes[0].z_shieldcoinbase("*", myzaddr, -1)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Amount out of range" in errorString, True)
|
||||
|
||||
# Shielding will fail because fee is larger than MAX_MONEY
|
||||
try:
|
||||
self.nodes[0].z_shieldcoinbase("*", myzaddr, Decimal('21000000.00000001'))
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Amount out of range" in errorString, True)
|
||||
|
||||
# Shielding will fail because fee is larger than sum of utxos
|
||||
try:
|
||||
self.nodes[0].z_shieldcoinbase("*", myzaddr, 999)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Insufficient coinbase funds" in errorString, True)
|
||||
|
||||
# Shielding will fail because limit parameter must be at least 0
|
||||
try:
|
||||
self.nodes[0].z_shieldcoinbase("*", myzaddr, Decimal('0.001'), -1)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("Limit on maximum number of utxos cannot be negative" in errorString, True)
|
||||
|
||||
# Shielding will fail because limit parameter is absurdly large
|
||||
try:
|
||||
self.nodes[0].z_shieldcoinbase("*", myzaddr, Decimal('0.001'), 99999999999999)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
assert_equal("JSON integer out of range" in errorString, True)
|
||||
|
||||
# Shield coinbase utxos from node 0 of value 40, standard fee of 0.00010000
|
||||
result = self.nodes[0].z_shieldcoinbase(mytaddr, myzaddr)
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Confirm balances and that do_not_shield_taddr containing funds of 10 was left alone
|
||||
assert_equal(self.nodes[0].getbalance(), 10)
|
||||
assert_equal(self.nodes[0].z_getbalance(do_not_shield_taddr), Decimal('10.0'))
|
||||
assert_equal(self.nodes[0].z_getbalance(myzaddr), Decimal('39.99990000'))
|
||||
assert_equal(self.nodes[1].getbalance(), 20)
|
||||
assert_equal(self.nodes[2].getbalance(), 30)
|
||||
|
||||
# Shield coinbase utxos from any node 2 taddr, and set fee to 0
|
||||
result = self.nodes[2].z_shieldcoinbase("*", myzaddr, 0)
|
||||
wait_and_assert_operationid_status(self.nodes[2], result['opid'])
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
assert_equal(self.nodes[0].getbalance(), 10)
|
||||
assert_equal(self.nodes[0].z_getbalance(myzaddr), Decimal('69.99990000'))
|
||||
assert_equal(self.nodes[1].getbalance(), 30)
|
||||
assert_equal(self.nodes[2].getbalance(), 0)
|
||||
|
||||
# Generate 800 coinbase utxos on node 0, and 20 coinbase utxos on node 2
|
||||
self.nodes[0].generate(800)
|
||||
self.sync_all()
|
||||
self.nodes[2].generate(20)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(100)
|
||||
self.sync_all()
|
||||
mytaddr = self.nodes[0].getnewaddress()
|
||||
|
||||
# Shielding the 800 utxos will occur over two transactions, since max tx size is 100,000 bytes.
|
||||
# We don't verify shieldingValue as utxos are not selected in any specific order, so value can change on each test run.
|
||||
# We set an unrealistically high limit parameter of 99999, to verify that max tx size will constrain the number of utxos.
|
||||
result = self.nodes[0].z_shieldcoinbase(mytaddr, myzaddr, 0, 99999)
|
||||
assert_equal(result["shieldingUTXOs"], Decimal('662'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('138'))
|
||||
remainingValue = result["remainingValue"]
|
||||
opid1 = result['opid']
|
||||
|
||||
# Verify that utxos are locked (not available for selection) by queuing up another shielding operation
|
||||
result = self.nodes[0].z_shieldcoinbase(mytaddr, myzaddr, 0, 0)
|
||||
assert_equal(result["shieldingValue"], Decimal(remainingValue))
|
||||
assert_equal(result["shieldingUTXOs"], Decimal('138'))
|
||||
assert_equal(result["remainingValue"], Decimal('0'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('0'))
|
||||
opid2 = result['opid']
|
||||
|
||||
# wait for both aysnc operations to complete
|
||||
wait_and_assert_operationid_status(self.nodes[0], opid1)
|
||||
wait_and_assert_operationid_status(self.nodes[0], opid2)
|
||||
|
||||
# sync_all() invokes sync_mempool() but node 2's mempool limit will cause tx1 and tx2 to be rejected.
|
||||
# So instead, we sync on blocks and mempool for node 0 and node 1, and after a new block is generated
|
||||
# which mines tx1 and tx2, all nodes will have an empty mempool which can then be synced.
|
||||
sync_blocks(self.nodes[:2])
|
||||
sync_mempools(self.nodes[:2])
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Verify maximum number of utxos which node 2 can shield is limited by option -mempooltxinputlimit
|
||||
# This option is used when the limit parameter is set to 0.
|
||||
mytaddr = self.nodes[2].getnewaddress()
|
||||
result = self.nodes[2].z_shieldcoinbase(mytaddr, myzaddr, Decimal('0.0001'), 0)
|
||||
assert_equal(result["shieldingUTXOs"], Decimal('7'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('13'))
|
||||
wait_and_assert_operationid_status(self.nodes[2], result['opid'])
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Verify maximum number of utxos which node 0 can shield is set by default limit parameter of 50
|
||||
self.nodes[0].generate(200)
|
||||
self.sync_all()
|
||||
mytaddr = self.nodes[0].getnewaddress()
|
||||
result = self.nodes[0].z_shieldcoinbase(mytaddr, myzaddr, Decimal('0.0001'))
|
||||
assert_equal(result["shieldingUTXOs"], Decimal('50'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('50'))
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
|
||||
# Verify maximum number of utxos which node 0 can shield can be set by the limit parameter
|
||||
result = self.nodes[0].z_shieldcoinbase(mytaddr, myzaddr, Decimal('0.0001'), 33)
|
||||
assert_equal(result["shieldingUTXOs"], Decimal('33'))
|
||||
assert_equal(result["remainingUTXOs"], Decimal('17'))
|
||||
wait_and_assert_operationid_status(self.nodes[0], result['opid'])
|
||||
# Don't sync node 2 which rejects the tx due to its mempooltxinputlimit
|
||||
sync_blocks(self.nodes[:2])
|
||||
sync_mempools(self.nodes[:2])
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletShieldCoinbaseTest().main()
|
||||
@@ -5,10 +5,11 @@
|
||||
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from time import *
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_nodes, connect_nodes_bi, wait_and_assert_operationid_status
|
||||
|
||||
import sys
|
||||
import time
|
||||
from decimal import Decimal
|
||||
|
||||
class WalletTreeStateTest (BitcoinTestFramework):
|
||||
|
||||
@@ -25,30 +26,6 @@ class WalletTreeStateTest (BitcoinTestFramework):
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
def wait_and_assert_operationid_status(self, myopid, in_status='success', in_errormsg=None):
|
||||
print('waiting for async operation {}'.format(myopid))
|
||||
opids = []
|
||||
opids.append(myopid)
|
||||
timeout = 300
|
||||
status = None
|
||||
errormsg = None
|
||||
for x in xrange(1, timeout):
|
||||
results = self.nodes[0].z_getoperationresult(opids)
|
||||
if len(results)==0:
|
||||
sleep(1)
|
||||
else:
|
||||
status = results[0]["status"]
|
||||
if status == "failed":
|
||||
errormsg = results[0]['error']['message']
|
||||
break
|
||||
print('...returned status: {}'.format(status))
|
||||
print('...error msg: {}'.format(errormsg))
|
||||
assert_equal(in_status, status)
|
||||
if errormsg is not None:
|
||||
assert(in_errormsg is not None)
|
||||
assert_equal(in_errormsg in errormsg, True)
|
||||
print('...returned error: {}'.format(errormsg))
|
||||
|
||||
def run_test (self):
|
||||
print "Mining blocks..."
|
||||
|
||||
@@ -64,17 +41,17 @@ class WalletTreeStateTest (BitcoinTestFramework):
|
||||
recipients = []
|
||||
recipients.append({"address":myzaddr, "amount":Decimal('10.0') - Decimal('0.0001')})
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
|
||||
self.wait_and_assert_operationid_status(myopid)
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
|
||||
self.wait_and_assert_operationid_status(myopid)
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
|
||||
self.wait_and_assert_operationid_status(myopid)
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
@@ -91,7 +68,7 @@ class WalletTreeStateTest (BitcoinTestFramework):
|
||||
recipients = []
|
||||
recipients.append({"address":self.nodes[2].z_getnewaddress(), "amount":Decimal('10.0') - Decimal('0.0001')})
|
||||
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
|
||||
self.wait_and_assert_operationid_status(myopid)
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
|
||||
# Tx 2 will consume all three notes, which must take at least two joinsplits. This is regardless of
|
||||
# the z_sendmany implementation because there are only two inputs per joinsplit.
|
||||
@@ -106,7 +83,7 @@ class WalletTreeStateTest (BitcoinTestFramework):
|
||||
status = results[0]["status"]
|
||||
if status == "executing":
|
||||
break
|
||||
sleep(1)
|
||||
time.sleep(1)
|
||||
|
||||
# Now mine Tx 1 which will change global treestate before Tx 2's second joinsplit begins processing
|
||||
self.sync_all()
|
||||
@@ -114,7 +91,7 @@ class WalletTreeStateTest (BitcoinTestFramework):
|
||||
self.sync_all()
|
||||
|
||||
# Wait for Tx 2 to be created
|
||||
self.wait_and_assert_operationid_status(myopid)
|
||||
wait_and_assert_operationid_status(self.nodes[0], myopid)
|
||||
|
||||
# Note that a bug existed in v1.0.0-1.0.3 where Tx 2 creation would fail with an error:
|
||||
# "Witness for spendable note does not have same anchor as change input"
|
||||
|
||||
@@ -34,9 +34,17 @@ and confirm again balances are correct.
|
||||
"""
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_nodes, start_node, connect_nodes, stop_node, \
|
||||
sync_blocks, sync_mempools
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from random import randint
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
|
||||
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
|
||||
|
||||
class WalletBackupTest(BitcoinTestFramework):
|
||||
@@ -134,6 +142,14 @@ class WalletBackupTest(BitcoinTestFramework):
|
||||
self.nodes[2].backupwallet("walletbak")
|
||||
self.nodes[2].dumpwallet("walletdump")
|
||||
|
||||
# Verify dumpwallet cannot overwrite an existing file
|
||||
try:
|
||||
self.nodes[2].dumpwallet("walletdump")
|
||||
assert(False)
|
||||
except JSONRPCException as e:
|
||||
errorString = e.error['message']
|
||||
assert("Cannot overwrite existing file" in errorString)
|
||||
|
||||
logging.info("More transactions")
|
||||
for i in range(5):
|
||||
self.do_one_round()
|
||||
|
||||
@@ -4,7 +4,9 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, initialize_chain_clean, \
|
||||
start_nodes, start_node, connect_nodes_bi, bitcoind_processes
|
||||
|
||||
|
||||
class ZapWalletTXesTest (BitcoinTestFramework):
|
||||
@@ -27,56 +29,56 @@ class ZapWalletTXesTest (BitcoinTestFramework):
|
||||
self.sync_all()
|
||||
self.nodes[1].generate(101)
|
||||
self.sync_all()
|
||||
|
||||
|
||||
assert_equal(self.nodes[0].getbalance(), 40)
|
||||
|
||||
|
||||
txid0 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
|
||||
txid1 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
|
||||
self.sync_all()
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
|
||||
txid2 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
|
||||
txid3 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5)
|
||||
|
||||
|
||||
tx0 = self.nodes[0].gettransaction(txid0)
|
||||
assert_equal(tx0['txid'], txid0) #tx0 must be available (confirmed)
|
||||
|
||||
assert_equal(tx0['txid'], txid0) # tx0 must be available (confirmed)
|
||||
|
||||
tx1 = self.nodes[0].gettransaction(txid1)
|
||||
assert_equal(tx1['txid'], txid1) #tx1 must be available (confirmed)
|
||||
|
||||
assert_equal(tx1['txid'], txid1) # tx1 must be available (confirmed)
|
||||
|
||||
tx2 = self.nodes[0].gettransaction(txid2)
|
||||
assert_equal(tx2['txid'], txid2) #tx2 must be available (unconfirmed)
|
||||
|
||||
assert_equal(tx2['txid'], txid2) # tx2 must be available (unconfirmed)
|
||||
|
||||
tx3 = self.nodes[0].gettransaction(txid3)
|
||||
assert_equal(tx3['txid'], txid3) #tx3 must be available (unconfirmed)
|
||||
|
||||
#restart bitcoind
|
||||
assert_equal(tx3['txid'], txid3) # tx3 must be available (unconfirmed)
|
||||
|
||||
# restart zcashd
|
||||
self.nodes[0].stop()
|
||||
bitcoind_processes[0].wait()
|
||||
self.nodes[0] = start_node(0,self.options.tmpdir)
|
||||
|
||||
|
||||
tx3 = self.nodes[0].gettransaction(txid3)
|
||||
assert_equal(tx3['txid'], txid3) #tx must be available (unconfirmed)
|
||||
|
||||
assert_equal(tx3['txid'], txid3) # tx must be available (unconfirmed)
|
||||
|
||||
self.nodes[0].stop()
|
||||
bitcoind_processes[0].wait()
|
||||
|
||||
#restart bitcoind with zapwallettxes
|
||||
|
||||
# restart zcashd with zapwallettxes
|
||||
self.nodes[0] = start_node(0,self.options.tmpdir, ["-zapwallettxes=1"])
|
||||
|
||||
|
||||
aException = False
|
||||
try:
|
||||
tx3 = self.nodes[0].gettransaction(txid3)
|
||||
except JSONRPCException,e:
|
||||
print e
|
||||
aException = True
|
||||
|
||||
assert_equal(aException, True) #there must be a expection because the unconfirmed wallettx0 must be gone by now
|
||||
|
||||
assert_equal(aException, True) # there must be a expection because the unconfirmed wallettx0 must be gone by now
|
||||
|
||||
tx0 = self.nodes[0].gettransaction(txid0)
|
||||
assert_equal(tx0['txid'], txid0) #tx0 (confirmed) must still be available because it was confirmed
|
||||
assert_equal(tx0['txid'], txid0) # tx0 (confirmed) must still be available because it was confirmed
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
ZapWalletTXesTest ().main ()
|
||||
ZapWalletTXesTest().main()
|
||||
|
||||
@@ -5,11 +5,9 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from decimal import Decimal
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from test_framework.util import assert_equal, start_node, \
|
||||
gather_inputs
|
||||
|
||||
|
||||
class JoinSplitTest(BitcoinTestFramework):
|
||||
def setup_network(self):
|
||||
|
||||
@@ -5,11 +5,10 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from decimal import Decimal
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.util import assert_equal, connect_nodes, \
|
||||
gather_inputs, sync_blocks
|
||||
|
||||
import time
|
||||
|
||||
class JoinSplitTest(BitcoinTestFramework):
|
||||
@@ -41,7 +40,7 @@ class JoinSplitTest(BitcoinTestFramework):
|
||||
assert_equal(self.cannot_joinsplit(node, txn), True)
|
||||
|
||||
def run_test(self):
|
||||
# All nodes should start with 250 BTC:
|
||||
# All nodes should start with 250 ZEC:
|
||||
starting_balance = 250
|
||||
for i in range(4):
|
||||
assert_equal(self.nodes[i].getbalance(), starting_balance)
|
||||
|
||||
161
qa/rpc-tests/zkey_import_export.py
Executable file
161
qa/rpc-tests/zkey_import_export.py
Executable file
@@ -0,0 +1,161 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2017 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from decimal import Decimal
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import assert_equal, assert_greater_than, start_nodes,\
|
||||
initialize_chain_clean, connect_nodes_bi, wait_and_assert_operationid_status
|
||||
|
||||
import logging
|
||||
import time
|
||||
import math
|
||||
|
||||
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
|
||||
|
||||
|
||||
class ZkeyImportExportTest (BitcoinTestFramework):
|
||||
|
||||
def setup_chain(self):
|
||||
print("Initializing test directory "+self.options.tmpdir)
|
||||
initialize_chain_clean(self.options.tmpdir, 5)
|
||||
|
||||
def setup_network(self, split=False):
|
||||
self.nodes = start_nodes(5, self.options.tmpdir )
|
||||
connect_nodes_bi(self.nodes,0,1)
|
||||
connect_nodes_bi(self.nodes,1,2)
|
||||
connect_nodes_bi(self.nodes,0,2)
|
||||
connect_nodes_bi(self.nodes,0,3)
|
||||
connect_nodes_bi(self.nodes,0,4)
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
def run_test(self):
|
||||
[alice, bob, charlie, david, miner] = self.nodes
|
||||
|
||||
def z_send(from_node, from_addr, to_addr, amount):
|
||||
opid = from_node.z_sendmany(from_addr, [{"address": to_addr, "amount": Decimal(amount)}])
|
||||
wait_and_assert_operationid_status(from_node, opid)
|
||||
self.sync_all()
|
||||
miner.generate(1)
|
||||
self.sync_all()
|
||||
|
||||
def z_getbalance(node, zaddr):
|
||||
bal = node.z_getbalance(zaddr)
|
||||
# Ignore fees for sake of comparison
|
||||
round_balance = math.ceil(bal*100)/100
|
||||
return round_balance
|
||||
|
||||
def verify_utxos(node, amts, zaddr):
|
||||
amts.sort(reverse=True)
|
||||
txs = node.z_listreceivedbyaddress(zaddr)
|
||||
|
||||
def cmp_confirmations_high_to_low(a, b):
|
||||
return cmp(b["amount"], a["amount"])
|
||||
|
||||
txs.sort(cmp_confirmations_high_to_low)
|
||||
print("Sorted txs", txs)
|
||||
print("amts", amts)
|
||||
|
||||
try:
|
||||
assert_equal(amts, [tx["amount"] for tx in txs])
|
||||
except AssertionError:
|
||||
logging.error(
|
||||
'Expected amounts: %r; txs: %r',
|
||||
amts, txs)
|
||||
raise
|
||||
|
||||
def get_private_balance(node):
|
||||
balance = node.z_gettotalbalance()
|
||||
return balance['private']
|
||||
|
||||
def find_imported_key(node, import_zaddr):
|
||||
zaddrs = node.z_listaddresses()
|
||||
assert(import_zaddr in zaddrs)
|
||||
return import_zaddr
|
||||
|
||||
# Seed Alice with some funds
|
||||
alice.generate(10)
|
||||
self.sync_all()
|
||||
miner.generate(100)
|
||||
self.sync_all()
|
||||
# Shield Alice's coinbase funds to her zaddr
|
||||
alice_zaddr = alice.z_getnewaddress()
|
||||
res = alice.z_shieldcoinbase("*", alice_zaddr)
|
||||
wait_and_assert_operationid_status(alice, res['opid'])
|
||||
self.sync_all()
|
||||
miner.generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Now get a pristine z-address for receiving transfers:
|
||||
bob_zaddr = bob.z_getnewaddress()
|
||||
verify_utxos(bob, [], bob_zaddr)
|
||||
# TODO: Verify that charlie doesn't have funds in addr
|
||||
# verify_utxos(charlie, [])
|
||||
|
||||
# the amounts of each txn embodied which generates a single UTXO:
|
||||
amounts = map(Decimal, ['2.3', '3.7', '0.1', '0.5', '1.0', '0.19'])
|
||||
|
||||
# Internal test consistency assertion:
|
||||
assert_greater_than(
|
||||
get_private_balance(alice),
|
||||
reduce(Decimal.__add__, amounts))
|
||||
|
||||
logging.info("Sending pre-export txns...")
|
||||
for amount in amounts[0:2]:
|
||||
z_send(alice, alice_zaddr, bob_zaddr, amount)
|
||||
|
||||
logging.info("Exporting privkey from bob...")
|
||||
privkey = bob.z_exportkey(bob_zaddr)
|
||||
|
||||
logging.info("Sending post-export txns...")
|
||||
for amount in amounts[2:4]:
|
||||
z_send(alice, alice_zaddr, bob_zaddr, amount)
|
||||
|
||||
print("Bob amounts:", amounts[:4])
|
||||
verify_utxos(bob, amounts[:4], bob_zaddr)
|
||||
# verify_utxos(charlie, [])
|
||||
|
||||
logging.info("Importing privkey into charlie...")
|
||||
# z_importkey rescan defaults to "whenkeyisnew", so should rescan here
|
||||
charlie.z_importkey(privkey)
|
||||
ipk_zaddr = find_imported_key(charlie, bob_zaddr)
|
||||
|
||||
# z_importkey should have rescanned for new key, so this should pass:
|
||||
verify_utxos(charlie, amounts[:4], ipk_zaddr)
|
||||
|
||||
# Verify idempotent behavior:
|
||||
charlie.z_importkey(privkey)
|
||||
ipk_zaddr2 = find_imported_key(charlie, bob_zaddr)
|
||||
|
||||
# amounts should be unchanged
|
||||
verify_utxos(charlie, amounts[:4], ipk_zaddr2)
|
||||
|
||||
logging.info("Sending post-import txns...")
|
||||
for amount in amounts[4:]:
|
||||
z_send(alice, alice_zaddr, bob_zaddr, amount)
|
||||
|
||||
verify_utxos(bob, amounts, bob_zaddr)
|
||||
verify_utxos(charlie, amounts, ipk_zaddr)
|
||||
verify_utxos(charlie, amounts, ipk_zaddr2)
|
||||
|
||||
# Try to reproduce zombie balance reported in #1936
|
||||
# At generated zaddr, receive ZEC, and send ZEC back out. bob -> alice
|
||||
for amount in amounts[:2]:
|
||||
print("Sending amount from bob to alice: ", amount)
|
||||
z_send(bob, bob_zaddr, alice_zaddr, amount)
|
||||
|
||||
balance = float(sum(amounts) - sum(amounts[:2]))
|
||||
assert_equal(z_getbalance(bob, bob_zaddr), balance)
|
||||
|
||||
# z_import onto new node "david" (blockchain rescan, default or True?)
|
||||
david.z_importkey(privkey)
|
||||
d_ipk_zaddr = find_imported_key(david, bob_zaddr)
|
||||
|
||||
# Check if amt bob spent is deducted for charlie and david
|
||||
assert_equal(z_getbalance(charlie, ipk_zaddr), balance)
|
||||
assert_equal(z_getbalance(david, d_ipk_zaddr), balance)
|
||||
|
||||
if __name__ == '__main__':
|
||||
ZkeyImportExportTest().main()
|
||||
@@ -8,21 +8,12 @@
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.util import assert_equal, bytes_to_hex_str, start_nodes
|
||||
|
||||
import zmq
|
||||
import binascii
|
||||
import struct
|
||||
|
||||
try:
|
||||
import http.client as httplib
|
||||
except ImportError:
|
||||
import httplib
|
||||
try:
|
||||
import urllib.parse as urlparse
|
||||
except ImportError:
|
||||
import urlparse
|
||||
|
||||
class ZMQTest (BitcoinTestFramework):
|
||||
class ZMQTest(BitcoinTestFramework):
|
||||
|
||||
port = 28332
|
||||
|
||||
@@ -51,8 +42,9 @@ class ZMQTest (BitcoinTestFramework):
|
||||
assert_equal(topic, b"hashtx")
|
||||
body = msg[1]
|
||||
nseq = msg[2]
|
||||
[nseq] # hush pyflakes
|
||||
msgSequence = struct.unpack('<I', msg[-1])[-1]
|
||||
assert_equal(msgSequence, 0) #must be sequence 0 on hashtx
|
||||
assert_equal(msgSequence, 0) # must be sequence 0 on hashtx
|
||||
|
||||
msg = self.zmqSubSocket.recv_multipart()
|
||||
topic = msg[0]
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
REPOROOT="$(readlink -f "$(dirname "$0")"/../../)"
|
||||
|
||||
function test_rpath_runpath {
|
||||
if "${REPOROOT}/qa/zcash/checksec.sh" --file "$1" | grep -q "No RPATH.*No RUNPATH"; then
|
||||
echo PASS: "$1" has no RPATH or RUNPATH.
|
||||
return 0
|
||||
else
|
||||
echo FAIL: "$1" has an RPATH or a RUNPATH.
|
||||
"${REPOROOT}/qa/zcash/checksec.sh" --file "$1"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function test_fortify_source {
|
||||
if { "${REPOROOT}/qa/zcash/checksec.sh" --fortify-file "$1" | grep -q "FORTIFY_SOURCE support available.*Yes"; } &&
|
||||
{ "${REPOROOT}/qa/zcash/checksec.sh" --fortify-file "$1" | grep -q "Binary compiled with FORTIFY_SOURCE support.*Yes"; }; then
|
||||
echo PASS: "$1" has FORTIFY_SOURCE.
|
||||
return 0
|
||||
else
|
||||
echo FAIL: "$1" is missing FORTIFY_SOURCE.
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# PIE, RELRO, Canary, and NX are tested by make check-security.
|
||||
make -C "$REPOROOT/src" check-security
|
||||
|
||||
test_rpath_runpath "${REPOROOT}/src/zcashd"
|
||||
test_rpath_runpath "${REPOROOT}/src/zcash-cli"
|
||||
test_rpath_runpath "${REPOROOT}/src/zcash-gtest"
|
||||
test_rpath_runpath "${REPOROOT}/src/zcash-tx"
|
||||
test_rpath_runpath "${REPOROOT}/src/test/test_bitcoin"
|
||||
test_rpath_runpath "${REPOROOT}/src/zcash/GenerateParams"
|
||||
|
||||
# NOTE: checksec.sh does not reliably determine whether FORTIFY_SOURCE is
|
||||
# enabled for the entire binary. See issue #915.
|
||||
test_fortify_source "${REPOROOT}/src/zcashd"
|
||||
test_fortify_source "${REPOROOT}/src/zcash-cli"
|
||||
test_fortify_source "${REPOROOT}/src/zcash-gtest"
|
||||
test_fortify_source "${REPOROOT}/src/zcash-tx"
|
||||
test_fortify_source "${REPOROOT}/src/test/test_bitcoin"
|
||||
test_fortify_source "${REPOROOT}/src/zcash/GenerateParams"
|
||||
263
qa/zcash/create_benchmark_archive.py
Normal file
263
qa/zcash/create_benchmark_archive.py
Normal file
@@ -0,0 +1,263 @@
|
||||
import binascii
|
||||
import calendar
|
||||
import json
|
||||
import plyvel
|
||||
import progressbar
|
||||
import os
|
||||
import stat
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
|
||||
ZCASH_CLI = './src/zcash-cli'
|
||||
USAGE = """
|
||||
Requirements:
|
||||
- find
|
||||
- xz
|
||||
- %s (edit ZCASH_CLI in this script to alter the path)
|
||||
- A running mainnet zcashd using the default datadir with -txindex=1
|
||||
|
||||
Example usage:
|
||||
|
||||
make -C src/leveldb/
|
||||
virtualenv venv
|
||||
. venv/bin/activate
|
||||
pip install --global-option=build_ext --global-option="-L$(pwd)/src/leveldb/" --global-option="-I$(pwd)/src/leveldb/include/" plyvel
|
||||
pip install progressbar2
|
||||
LD_LIBRARY_PATH=src/leveldb python qa/zcash/create_benchmark_archive.py
|
||||
""" % ZCASH_CLI
|
||||
|
||||
def check_deps():
|
||||
if subprocess.call(['which', 'find', 'xz', ZCASH_CLI], stdout=subprocess.PIPE):
|
||||
print USAGE
|
||||
sys.exit()
|
||||
|
||||
def encode_varint(n):
|
||||
v = bytearray()
|
||||
l = 0
|
||||
while True:
|
||||
v.append((n & 0x7F) | (0x80 if l else 0x00))
|
||||
if (n <= 0x7F):
|
||||
break
|
||||
n = (n >> 7) - 1
|
||||
l += 1
|
||||
return bytes(v)[::-1]
|
||||
|
||||
def decode_varint(v):
|
||||
n = 0
|
||||
for ch in range(len(v)):
|
||||
n = (n << 7) | (ord(v[ch]) & 0x7F)
|
||||
if (ord(v[ch]) & 0x80):
|
||||
n += 1
|
||||
else:
|
||||
return n
|
||||
|
||||
def compress_amount(n):
|
||||
if n == 0:
|
||||
return 0
|
||||
e = 0
|
||||
while (((n % 10) == 0) and e < 9):
|
||||
n /= 10
|
||||
e += 1
|
||||
if e < 9:
|
||||
d = (n % 10)
|
||||
assert(d >= 1 and d <= 9)
|
||||
n /= 10
|
||||
return 1 + (n*9 + d - 1)*10 + e
|
||||
else:
|
||||
return 1 + (n - 1)*10 + 9
|
||||
|
||||
OP_DUP = 0x76
|
||||
OP_EQUAL = 0x87
|
||||
OP_EQUALVERIFY = 0x88
|
||||
OP_HASH160 = 0xa9
|
||||
OP_CHECKSIG = 0xac
|
||||
def to_key_id(script):
|
||||
if len(script) == 25 and \
|
||||
script[0] == OP_DUP and \
|
||||
script[1] == OP_HASH160 and \
|
||||
script[2] == 20 and \
|
||||
script[23] == OP_EQUALVERIFY and \
|
||||
script[24] == OP_CHECKSIG:
|
||||
return script[3:23]
|
||||
return bytes()
|
||||
|
||||
def to_script_id(script):
|
||||
if len(script) == 23 and \
|
||||
script[0] == OP_HASH160 and \
|
||||
script[1] == 20 and \
|
||||
script[22] == OP_EQUAL:
|
||||
return script[2:22]
|
||||
return bytes()
|
||||
|
||||
def to_pubkey(script):
|
||||
if len(script) == 35 and \
|
||||
script[0] == 33 and \
|
||||
script[34] == OP_CHECKSIG and \
|
||||
(script[1] == 0x02 or script[1] == 0x03):
|
||||
return script[1:34]
|
||||
if len(script) == 67 and \
|
||||
script[0] == 65 and \
|
||||
script[66] == OP_CHECKSIG and \
|
||||
script[1] == 0x04:
|
||||
return script[1:66] # assuming is fully valid
|
||||
return bytes()
|
||||
|
||||
def compress_script(script):
|
||||
result = bytearray()
|
||||
|
||||
key_id = to_key_id(script)
|
||||
if key_id:
|
||||
result.append(0x00)
|
||||
result.extend(key_id)
|
||||
return bytes(result)
|
||||
|
||||
script_id = to_script_id(script)
|
||||
if script_id:
|
||||
result.append(0x01)
|
||||
result.extend(script_id)
|
||||
return bytes(result)
|
||||
|
||||
pubkey = to_pubkey(script)
|
||||
if pubkey:
|
||||
result.append(0x00)
|
||||
result.extend(pubkey[1:33])
|
||||
if pubkey[0] == 0x02 or pubkey[0] == 0x03:
|
||||
result[0] = pubkey[0]
|
||||
return bytes(result)
|
||||
elif pubkey[0] == 0x04:
|
||||
result[0] = 0x04 | (pubkey[64] & 0x01)
|
||||
return bytes(result)
|
||||
|
||||
size = len(script) + 6
|
||||
result.append(encode_varint(size))
|
||||
result.extend(script)
|
||||
return bytes(result)
|
||||
|
||||
def deterministic_filter(tarinfo):
|
||||
tarinfo.uid = tarinfo.gid = 0
|
||||
tarinfo.uname = tarinfo.gname = "root"
|
||||
tarinfo.mtime = calendar.timegm(time.strptime('2017-05-17', '%Y-%m-%d'))
|
||||
tarinfo.mode |= stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP
|
||||
tarinfo.mode &= ~stat.S_IWGRP
|
||||
if tarinfo.isdir():
|
||||
tarinfo.mode |= \
|
||||
stat.S_IXUSR | \
|
||||
stat.S_IXGRP | \
|
||||
stat.S_IXOTH
|
||||
else:
|
||||
tarinfo.mode &= \
|
||||
~stat.S_IXUSR & \
|
||||
~stat.S_IXGRP & \
|
||||
~stat.S_IXOTH
|
||||
return tarinfo
|
||||
|
||||
def create_benchmark_archive(blk_hash):
|
||||
blk = json.loads(subprocess.check_output([ZCASH_CLI, 'getblock', blk_hash]))
|
||||
print 'Height: %d' % blk['height']
|
||||
print 'Transactions: %d' % len(blk['tx'])
|
||||
|
||||
os.mkdir('benchmark')
|
||||
with open('benchmark/block-%d.dat' % blk['height'], 'wb') as f:
|
||||
f.write(binascii.unhexlify(subprocess.check_output([ZCASH_CLI, 'getblock', blk_hash, 'false']).strip()))
|
||||
|
||||
txs = [json.loads(subprocess.check_output([ZCASH_CLI, 'getrawtransaction', tx, '1'])
|
||||
) for tx in blk['tx']]
|
||||
|
||||
js_txs = len([tx for tx in txs if len(tx['vjoinsplit']) > 0])
|
||||
if js_txs:
|
||||
print 'Block contains %d JoinSplit-containing transactions' % js_txs
|
||||
return
|
||||
|
||||
inputs = [(x['txid'], x['vout']) for tx in txs for x in tx['vin'] if x.has_key('txid')]
|
||||
print 'Total inputs: %d' % len(inputs)
|
||||
|
||||
unique_inputs = {}
|
||||
for i in sorted(inputs):
|
||||
if unique_inputs.has_key(i[0]):
|
||||
unique_inputs[i[0]].append(i[1])
|
||||
else:
|
||||
unique_inputs[i[0]] = [i[1]]
|
||||
print 'Unique input transactions: %d' % len(unique_inputs)
|
||||
|
||||
db_path = 'benchmark/block-%d-inputs' % blk['height']
|
||||
db = plyvel.DB(db_path, create_if_missing=True)
|
||||
wb = db.write_batch()
|
||||
bar = progressbar.ProgressBar(redirect_stdout=True)
|
||||
print 'Collecting input coins for block'
|
||||
for tx in bar(unique_inputs.keys()):
|
||||
rawtx = json.loads(subprocess.check_output([ZCASH_CLI, 'getrawtransaction', tx, '1']))
|
||||
|
||||
mask_size = 0
|
||||
mask_code = 0
|
||||
b = 0
|
||||
while 2+b*8 < len(rawtx['vout']):
|
||||
zero = True
|
||||
i = 0
|
||||
while i < 8 and 2+b*8+i < len(rawtx['vout']):
|
||||
if 2+b*8+i in unique_inputs[tx]:
|
||||
zero = False
|
||||
i += 1
|
||||
if not zero:
|
||||
mask_size = b + 1
|
||||
mask_code += 1
|
||||
b += 1
|
||||
|
||||
coinbase = len(rawtx['vin']) == 1 and 'coinbase' in rawtx['vin'][0]
|
||||
first = len(rawtx['vout']) > 0 and 0 in unique_inputs[tx]
|
||||
second = len(rawtx['vout']) > 1 and 1 in unique_inputs[tx]
|
||||
code = 8*(mask_code - (0 if first or second else 1)) + \
|
||||
(1 if coinbase else 0) + \
|
||||
(2 if first else 0) + \
|
||||
(4 if second else 0)
|
||||
|
||||
coins = bytearray()
|
||||
# Serialized format:
|
||||
# - VARINT(nVersion)
|
||||
coins.extend(encode_varint(rawtx['version']))
|
||||
# - VARINT(nCode)
|
||||
coins.extend(encode_varint(code))
|
||||
# - unspentness bitvector, for vout[2] and further; least significant byte first
|
||||
for b in range(mask_size):
|
||||
avail = 0
|
||||
i = 0
|
||||
while i < 8 and 2+b*8+i < len(rawtx['vout']):
|
||||
if 2+b*8+i in unique_inputs[tx]:
|
||||
avail |= (1 << i)
|
||||
i += 1
|
||||
coins.append(avail)
|
||||
# - the non-spent CTxOuts (via CTxOutCompressor)
|
||||
for i in range(len(rawtx['vout'])):
|
||||
if i in unique_inputs[tx]:
|
||||
coins.extend(encode_varint(compress_amount(int(rawtx['vout'][i]['valueZat']))))
|
||||
coins.extend(compress_script(
|
||||
binascii.unhexlify(rawtx['vout'][i]['scriptPubKey']['hex'])))
|
||||
# - VARINT(nHeight)
|
||||
coins.extend(encode_varint(json.loads(
|
||||
subprocess.check_output([ZCASH_CLI, 'getblockheader', rawtx['blockhash']])
|
||||
)['height']))
|
||||
|
||||
db_key = b'c' + bytes(binascii.unhexlify(tx)[::-1])
|
||||
db_val = bytes(coins)
|
||||
wb.put(db_key, db_val)
|
||||
|
||||
wb.write()
|
||||
db.close()
|
||||
|
||||
# Make reproducible archive
|
||||
os.remove('%s/LOG' % db_path)
|
||||
files = subprocess.check_output(['find', 'benchmark']).strip().split('\n')
|
||||
archive_name = 'block-%d.tar' % blk['height']
|
||||
tar = tarfile.open(archive_name, 'w')
|
||||
for name in sorted(files):
|
||||
tar.add(name, recursive=False, filter=deterministic_filter)
|
||||
tar.close()
|
||||
subprocess.check_call(['xz', '-6', archive_name])
|
||||
print 'Created archive %s.xz' % archive_name
|
||||
subprocess.call(['rm', '-r', 'benchmark'])
|
||||
|
||||
if __name__ == '__main__':
|
||||
check_deps()
|
||||
create_benchmark_archive('0000000007cdb809e48e51dd0b530e8f5073e0a9e9bd7ae920fe23e874658c74')
|
||||
60
qa/zcash/create_wallet_200k_utxos.py
Executable file
60
qa/zcash/create_wallet_200k_utxos.py
Executable file
@@ -0,0 +1,60 @@
|
||||
#!/usr/bin/env python2
|
||||
# Copyright (c) 2017 The Zcash developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#
|
||||
# Create a large wallet
|
||||
#
|
||||
# To use:
|
||||
# - Copy to qa/rpc-tests/wallet_large.py
|
||||
# - Add wallet_large.py to RPC tests list
|
||||
# - ./qa/pull-tester/rpc-tests.sh wallet_large --nocleanup
|
||||
# - Archive the resulting /tmp/test###### directory
|
||||
#
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import (
|
||||
assert_equal,
|
||||
connect_nodes_bi,
|
||||
initialize_chain_clean,
|
||||
start_nodes,
|
||||
)
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
|
||||
class LargeWalletTest(BitcoinTestFramework):
|
||||
|
||||
def setup_chain(self):
|
||||
print("Initializing test directory "+self.options.tmpdir)
|
||||
initialize_chain_clean(self.options.tmpdir, 2)
|
||||
|
||||
def setup_network(self):
|
||||
self.nodes = start_nodes(2, self.options.tmpdir)
|
||||
connect_nodes_bi(self.nodes, 0, 1)
|
||||
self.is_network_split = False
|
||||
self.sync_all()
|
||||
|
||||
def run_test(self):
|
||||
self.nodes[1].generate(103)
|
||||
self.sync_all()
|
||||
|
||||
inputs = []
|
||||
for i in range(200000):
|
||||
taddr = self.nodes[0].getnewaddress()
|
||||
inputs.append(self.nodes[1].sendtoaddress(taddr, Decimal("0.001")))
|
||||
if i % 1000 == 0:
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
print('Node 0: %d transactions, %d UTXOs' %
|
||||
(len(self.nodes[0].listtransactions()), len(self.nodes[0].listunspent())))
|
||||
print('Node 1: %d transactions, %d UTXOs' %
|
||||
(len(self.nodes[1].listtransactions()), len(self.nodes[1].listunspent())))
|
||||
assert_equal(len(self.nodes[0].listunspent()), len(inputs))
|
||||
|
||||
if __name__ == '__main__':
|
||||
LargeWalletTest().main()
|
||||
@@ -1,41 +0,0 @@
|
||||
#! /usr/bin/env python2
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
def main():
|
||||
this_script = os.path.abspath(sys.argv[0])
|
||||
basedir = os.path.dirname(this_script)
|
||||
arch_dir = os.path.join(
|
||||
basedir,
|
||||
'..',
|
||||
'..',
|
||||
'depends',
|
||||
'x86_64-unknown-linux-gnu',
|
||||
)
|
||||
|
||||
exit_code = 0
|
||||
|
||||
if os.path.isdir(arch_dir):
|
||||
lib_dir = os.path.join(arch_dir, 'lib')
|
||||
libraries = os.listdir(lib_dir)
|
||||
|
||||
for lib in libraries:
|
||||
if lib.find(".so") != -1:
|
||||
print lib
|
||||
exit_code = 1
|
||||
else:
|
||||
exit_code = 2
|
||||
print "arch-specific build dir not present: {}".format(arch_dir)
|
||||
print "Did you build the ./depends tree?"
|
||||
print "Are you on a currently unsupported architecture?"
|
||||
|
||||
if exit_code == 0:
|
||||
print "PASS."
|
||||
else:
|
||||
print "FAIL."
|
||||
|
||||
sys.exit(exit_code)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,47 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Execute all of the automated tests related to Zcash.
|
||||
#
|
||||
|
||||
set -eu
|
||||
|
||||
SUITE_EXIT_STATUS=0
|
||||
REPOROOT="$(readlink -f "$(dirname "$0")"/../../)"
|
||||
|
||||
function run_test_phase
|
||||
{
|
||||
echo "===== BEGIN: $*"
|
||||
set +e
|
||||
eval "$@"
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
echo "===== PASSED: $*"
|
||||
else
|
||||
echo "===== FAILED: $*"
|
||||
SUITE_EXIT_STATUS=1
|
||||
fi
|
||||
set -e
|
||||
}
|
||||
|
||||
cd "${REPOROOT}"
|
||||
|
||||
# Test phases:
|
||||
run_test_phase "${REPOROOT}/qa/zcash/check-security-hardening.sh"
|
||||
run_test_phase "${REPOROOT}/qa/zcash/ensure-no-dot-so-in-depends.py"
|
||||
|
||||
# If make check fails, show test-suite.log as part of our run_test_phase
|
||||
# output (and fail the phase with false):
|
||||
run_test_phase make check '||' \
|
||||
'{' \
|
||||
echo '=== ./src/test-suite.log ===' ';' \
|
||||
cat './src/test-suite.log' ';' \
|
||||
false ';' \
|
||||
'}'
|
||||
|
||||
exit $SUITE_EXIT_STATUS
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
201
qa/zcash/full_test_suite.py
Executable file
201
qa/zcash/full_test_suite.py
Executable file
@@ -0,0 +1,201 @@
|
||||
#!/usr/bin/env python2
|
||||
#
|
||||
# Execute all of the automated tests related to Zcash.
|
||||
#
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
REPOROOT = os.path.dirname(
|
||||
os.path.dirname(
|
||||
os.path.dirname(
|
||||
os.path.abspath(__file__)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
def repofile(filename):
|
||||
return os.path.join(REPOROOT, filename)
|
||||
|
||||
|
||||
#
|
||||
# Custom test runners
|
||||
#
|
||||
|
||||
RE_RPATH_RUNPATH = re.compile('No RPATH.*No RUNPATH')
|
||||
RE_FORTIFY_AVAILABLE = re.compile('FORTIFY_SOURCE support available.*Yes')
|
||||
RE_FORTIFY_USED = re.compile('Binary compiled with FORTIFY_SOURCE support.*Yes')
|
||||
|
||||
def test_rpath_runpath(filename):
|
||||
output = subprocess.check_output(
|
||||
[repofile('qa/zcash/checksec.sh'), '--file', repofile(filename)]
|
||||
)
|
||||
if RE_RPATH_RUNPATH.search(output):
|
||||
print('PASS: %s has no RPATH or RUNPATH.' % filename)
|
||||
return True
|
||||
else:
|
||||
print('FAIL: %s has an RPATH or a RUNPATH.' % filename)
|
||||
print(output)
|
||||
return False
|
||||
|
||||
def test_fortify_source(filename):
|
||||
proc = subprocess.Popen(
|
||||
[repofile('qa/zcash/checksec.sh'), '--fortify-file', repofile(filename)],
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
line1 = proc.stdout.readline()
|
||||
line2 = proc.stdout.readline()
|
||||
proc.terminate()
|
||||
if RE_FORTIFY_AVAILABLE.search(line1) and RE_FORTIFY_USED.search(line2):
|
||||
print('PASS: %s has FORTIFY_SOURCE.' % filename)
|
||||
return True
|
||||
else:
|
||||
print('FAIL: %s is missing FORTIFY_SOURCE.' % filename)
|
||||
return False
|
||||
|
||||
def check_security_hardening():
|
||||
ret = True
|
||||
|
||||
# PIE, RELRO, Canary, and NX are tested by make check-security.
|
||||
ret &= subprocess.call(['make', '-C', repofile('src'), 'check-security']) == 0
|
||||
|
||||
ret &= test_rpath_runpath('src/zcashd')
|
||||
ret &= test_rpath_runpath('src/zcash-cli')
|
||||
ret &= test_rpath_runpath('src/zcash-gtest')
|
||||
ret &= test_rpath_runpath('src/zcash-tx')
|
||||
ret &= test_rpath_runpath('src/test/test_bitcoin')
|
||||
ret &= test_rpath_runpath('src/zcash/GenerateParams')
|
||||
|
||||
# NOTE: checksec.sh does not reliably determine whether FORTIFY_SOURCE
|
||||
# is enabled for the entire binary. See issue #915.
|
||||
ret &= test_fortify_source('src/zcashd')
|
||||
ret &= test_fortify_source('src/zcash-cli')
|
||||
ret &= test_fortify_source('src/zcash-gtest')
|
||||
ret &= test_fortify_source('src/zcash-tx')
|
||||
ret &= test_fortify_source('src/test/test_bitcoin')
|
||||
ret &= test_fortify_source('src/zcash/GenerateParams')
|
||||
|
||||
return ret
|
||||
|
||||
def ensure_no_dot_so_in_depends():
|
||||
arch_dir = os.path.join(
|
||||
REPOROOT,
|
||||
'depends',
|
||||
'x86_64-unknown-linux-gnu',
|
||||
)
|
||||
|
||||
exit_code = 0
|
||||
|
||||
if os.path.isdir(arch_dir):
|
||||
lib_dir = os.path.join(arch_dir, 'lib')
|
||||
libraries = os.listdir(lib_dir)
|
||||
|
||||
for lib in libraries:
|
||||
if lib.find(".so") != -1:
|
||||
print lib
|
||||
exit_code = 1
|
||||
else:
|
||||
exit_code = 2
|
||||
print "arch-specific build dir not present: {}".format(arch_dir)
|
||||
print "Did you build the ./depends tree?"
|
||||
print "Are you on a currently unsupported architecture?"
|
||||
|
||||
if exit_code == 0:
|
||||
print "PASS."
|
||||
else:
|
||||
print "FAIL."
|
||||
|
||||
return exit_code == 0
|
||||
|
||||
def util_test():
|
||||
return subprocess.call(
|
||||
[repofile('src/test/bitcoin-util-test.py')],
|
||||
cwd=repofile('src'),
|
||||
env={'PYTHONPATH': repofile('src/test'), 'srcdir': repofile('src')}
|
||||
) == 0
|
||||
|
||||
|
||||
#
|
||||
# Tests
|
||||
#
|
||||
|
||||
STAGES = [
|
||||
'btest',
|
||||
'gtest',
|
||||
'sec-hard',
|
||||
'no-dot-so',
|
||||
'util-test',
|
||||
'secp256k1',
|
||||
'libsnark',
|
||||
'univalue',
|
||||
'rpc',
|
||||
]
|
||||
|
||||
STAGE_COMMANDS = {
|
||||
'btest': [repofile('src/test/test_bitcoin'), '-p'],
|
||||
'gtest': [repofile('src/zcash-gtest')],
|
||||
'sec-hard': check_security_hardening,
|
||||
'no-dot-so': ensure_no_dot_so_in_depends,
|
||||
'util-test': util_test,
|
||||
'secp256k1': ['make', '-C', repofile('src/secp256k1'), 'check'],
|
||||
'libsnark': ['make', '-C', repofile('src'), 'libsnark-tests'],
|
||||
'univalue': ['make', '-C', repofile('src/univalue'), 'check'],
|
||||
'rpc': [repofile('qa/pull-tester/rpc-tests.sh')],
|
||||
}
|
||||
|
||||
|
||||
#
|
||||
# Test driver
|
||||
#
|
||||
|
||||
def run_stage(stage):
|
||||
print('Running stage %s' % stage)
|
||||
print('=' * (len(stage) + 14))
|
||||
print
|
||||
|
||||
cmd = STAGE_COMMANDS[stage]
|
||||
if type(cmd) == type([]):
|
||||
ret = subprocess.call(cmd) == 0
|
||||
else:
|
||||
ret = cmd()
|
||||
|
||||
print
|
||||
print('-' * (len(stage) + 15))
|
||||
print('Finished stage %s' % stage)
|
||||
print
|
||||
|
||||
return ret
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--list-stages', dest='list', action='store_true')
|
||||
parser.add_argument('stage', nargs='*', default=STAGES,
|
||||
help='One of %s'%STAGES)
|
||||
args = parser.parse_args()
|
||||
|
||||
# Check for list
|
||||
if args.list:
|
||||
for s in STAGES:
|
||||
print(s)
|
||||
sys.exit(0)
|
||||
|
||||
# Check validity of stages
|
||||
for s in args.stage:
|
||||
if s not in STAGES:
|
||||
print("Invalid stage '%s' (choose from %s)" % (s, STAGES))
|
||||
sys.exit(1)
|
||||
|
||||
# Run the stages
|
||||
passed = True
|
||||
for s in args.stage:
|
||||
passed &= run_stage(s)
|
||||
|
||||
if not passed:
|
||||
print("!!! One or more test stages failed !!!")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,37 +1,113 @@
|
||||
#!/bin/bash
|
||||
set -u
|
||||
|
||||
set -e
|
||||
|
||||
DATADIR=./benchmark-datadir
|
||||
SHA256CMD="$(command -v sha256sum || echo shasum)"
|
||||
SHA256ARGS="$(command -v sha256sum >/dev/null || echo '-a 256')"
|
||||
|
||||
function zcash_rpc {
|
||||
./src/zcash-cli -datadir="$DATADIR" -rpcwait -rpcuser=user -rpcpassword=password -rpcport=5983 "$@"
|
||||
./src/zcash-cli -datadir="$DATADIR" -rpcuser=user -rpcpassword=password -rpcport=5983 "$@"
|
||||
}
|
||||
|
||||
function zcash_rpc_slow {
|
||||
# Timeout of 1 hour
|
||||
zcash_rpc -rpcclienttimeout=3600 "$@"
|
||||
}
|
||||
|
||||
function zcash_rpc_veryslow {
|
||||
# Timeout of 2.5 hours
|
||||
zcash_rpc -rpcclienttimeout=9000 "$@"
|
||||
}
|
||||
|
||||
function zcash_rpc_wait_for_start {
|
||||
zcash_rpc -rpcwait getinfo > /dev/null
|
||||
}
|
||||
|
||||
function zcashd_generate {
|
||||
zcash_rpc generate 101 > /dev/null
|
||||
}
|
||||
|
||||
function extract_benchmark_datadir {
|
||||
if [ -f "$1.tar.xz" ]; then
|
||||
# Check the hash of the archive:
|
||||
"$SHA256CMD" $SHA256ARGS -c <<EOF
|
||||
$2 $1.tar.xz
|
||||
EOF
|
||||
ARCHIVE_RESULT=$?
|
||||
else
|
||||
echo "$1.tar.xz not found."
|
||||
ARCHIVE_RESULT=1
|
||||
fi
|
||||
if [ $ARCHIVE_RESULT -ne 0 ]; then
|
||||
zcashd_stop
|
||||
echo
|
||||
echo "Please download it and place it in the base directory of the repository."
|
||||
exit 1
|
||||
fi
|
||||
xzcat "$1.tar.xz" | tar x
|
||||
}
|
||||
|
||||
function use_200k_benchmark {
|
||||
rm -rf benchmark-200k-UTXOs
|
||||
extract_benchmark_datadir benchmark-200k-UTXOs dc8ab89eaa13730da57d9ac373c1f4e818a37181c1443f61fd11327e49fbcc5e
|
||||
DATADIR="./benchmark-200k-UTXOs/node$1"
|
||||
}
|
||||
|
||||
function zcashd_start {
|
||||
rm -rf "$DATADIR"
|
||||
mkdir -p "$DATADIR"
|
||||
touch "$DATADIR/zcash.conf"
|
||||
case "$1" in
|
||||
sendtoaddress|loadwallet|listunspent)
|
||||
case "$2" in
|
||||
200k-recv)
|
||||
use_200k_benchmark 0
|
||||
;;
|
||||
200k-send)
|
||||
use_200k_benchmark 1
|
||||
;;
|
||||
*)
|
||||
echo "Bad arguments to zcashd_start."
|
||||
exit 1
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
rm -rf "$DATADIR"
|
||||
mkdir -p "$DATADIR/regtest"
|
||||
touch "$DATADIR/zcash.conf"
|
||||
esac
|
||||
./src/zcashd -regtest -datadir="$DATADIR" -rpcuser=user -rpcpassword=password -rpcport=5983 -showmetrics=0 &
|
||||
ZCASHD_PID=$!
|
||||
zcash_rpc_wait_for_start
|
||||
}
|
||||
|
||||
function zcashd_stop {
|
||||
zcash_rpc stop > /dev/null
|
||||
wait $ZCASH_PID
|
||||
wait $ZCASHD_PID
|
||||
}
|
||||
|
||||
function zcashd_massif_start {
|
||||
rm -rf "$DATADIR"
|
||||
mkdir -p "$DATADIR"
|
||||
touch "$DATADIR/zcash.conf"
|
||||
case "$1" in
|
||||
sendtoaddress|loadwallet|listunspent)
|
||||
case "$2" in
|
||||
200k-recv)
|
||||
use_200k_benchmark 0
|
||||
;;
|
||||
200k-send)
|
||||
use_200k_benchmark 1
|
||||
;;
|
||||
*)
|
||||
echo "Bad arguments to zcashd_massif_start."
|
||||
exit 1
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
rm -rf "$DATADIR"
|
||||
mkdir -p "$DATADIR/regtest"
|
||||
touch "$DATADIR/zcash.conf"
|
||||
esac
|
||||
rm -f massif.out
|
||||
valgrind --tool=massif --time-unit=ms --massif-out-file=massif.out ./src/zcashd -regtest -datadir="$DATADIR" -rpcuser=user -rpcpassword=password -rpcport=5983 -showmetrics=0 &
|
||||
ZCASHD_PID=$!
|
||||
zcash_rpc_wait_for_start
|
||||
}
|
||||
|
||||
function zcashd_massif_stop {
|
||||
@@ -42,11 +118,12 @@ function zcashd_massif_stop {
|
||||
|
||||
function zcashd_valgrind_start {
|
||||
rm -rf "$DATADIR"
|
||||
mkdir -p "$DATADIR"
|
||||
mkdir -p "$DATADIR/regtest"
|
||||
touch "$DATADIR/zcash.conf"
|
||||
rm -f valgrind.out
|
||||
valgrind --leak-check=yes -v --error-limit=no --log-file="valgrind.out" ./src/zcashd -regtest -datadir="$DATADIR" -rpcuser=user -rpcpassword=password -rpcport=5983 -showmetrics=0 &
|
||||
ZCASHD_PID=$!
|
||||
zcash_rpc_wait_for_start
|
||||
}
|
||||
|
||||
function zcashd_valgrind_stop {
|
||||
@@ -55,12 +132,41 @@ function zcashd_valgrind_stop {
|
||||
cat valgrind.out
|
||||
}
|
||||
|
||||
function extract_benchmark_data {
|
||||
if [ -f "block-107134.tar.xz" ]; then
|
||||
# Check the hash of the archive:
|
||||
"$SHA256CMD" $SHA256ARGS -c <<EOF
|
||||
4bd5ad1149714394e8895fa536725ed5d6c32c99812b962bfa73f03b5ffad4bb block-107134.tar.xz
|
||||
EOF
|
||||
ARCHIVE_RESULT=$?
|
||||
else
|
||||
echo "block-107134.tar.xz not found."
|
||||
ARCHIVE_RESULT=1
|
||||
fi
|
||||
if [ $ARCHIVE_RESULT -ne 0 ]; then
|
||||
zcashd_stop
|
||||
echo
|
||||
echo "Please generate it using qa/zcash/create_benchmark_archive.py"
|
||||
echo "and place it in the base directory of the repository."
|
||||
echo "Usage details are inside the Python script."
|
||||
exit 1
|
||||
fi
|
||||
xzcat block-107134.tar.xz | tar x -C "$DATADIR/regtest"
|
||||
}
|
||||
|
||||
|
||||
if [ $# -lt 2 ]
|
||||
then
|
||||
echo "$0 : At least two arguments are required!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Precomputation
|
||||
case "$1" in
|
||||
*)
|
||||
case "$2" in
|
||||
verifyjoinsplit)
|
||||
zcashd_start
|
||||
zcashd_start "${@:2}"
|
||||
RAWJOINSPLIT=$(zcash_rpc zcsamplejoinsplit)
|
||||
zcashd_stop
|
||||
esac
|
||||
@@ -68,7 +174,7 @@ esac
|
||||
|
||||
case "$1" in
|
||||
time)
|
||||
zcashd_start
|
||||
zcashd_start "${@:2}"
|
||||
case "$2" in
|
||||
sleep)
|
||||
zcash_rpc zcbenchmark sleep 10
|
||||
@@ -83,13 +189,13 @@ case "$1" in
|
||||
zcash_rpc zcbenchmark verifyjoinsplit 1000 "\"$RAWJOINSPLIT\""
|
||||
;;
|
||||
solveequihash)
|
||||
zcash_rpc zcbenchmark solveequihash 50 "${@:3}"
|
||||
zcash_rpc_slow zcbenchmark solveequihash 50 "${@:3}"
|
||||
;;
|
||||
verifyequihash)
|
||||
zcash_rpc zcbenchmark verifyequihash 1000
|
||||
;;
|
||||
validatelargetx)
|
||||
zcash_rpc zcbenchmark validatelargetx 5
|
||||
zcash_rpc zcbenchmark validatelargetx 10 "${@:3}"
|
||||
;;
|
||||
trydecryptnotes)
|
||||
zcash_rpc zcbenchmark trydecryptnotes 1000 "${@:3}"
|
||||
@@ -97,15 +203,28 @@ case "$1" in
|
||||
incnotewitnesses)
|
||||
zcash_rpc zcbenchmark incnotewitnesses 100 "${@:3}"
|
||||
;;
|
||||
connectblockslow)
|
||||
extract_benchmark_data
|
||||
zcash_rpc zcbenchmark connectblockslow 10
|
||||
;;
|
||||
sendtoaddress)
|
||||
zcash_rpc zcbenchmark sendtoaddress 10 "${@:4}"
|
||||
;;
|
||||
loadwallet)
|
||||
zcash_rpc zcbenchmark loadwallet 10
|
||||
;;
|
||||
listunspent)
|
||||
zcash_rpc zcbenchmark listunspent 10
|
||||
;;
|
||||
*)
|
||||
zcashd_stop
|
||||
echo "Bad arguments."
|
||||
echo "Bad arguments to time."
|
||||
exit 1
|
||||
esac
|
||||
zcashd_stop
|
||||
;;
|
||||
memory)
|
||||
zcashd_massif_start
|
||||
zcashd_massif_start "${@:2}"
|
||||
case "$2" in
|
||||
sleep)
|
||||
zcash_rpc zcbenchmark sleep 1
|
||||
@@ -114,26 +233,42 @@ case "$1" in
|
||||
zcash_rpc zcbenchmark parameterloading 1
|
||||
;;
|
||||
createjoinsplit)
|
||||
zcash_rpc zcbenchmark createjoinsplit 1 "${@:3}"
|
||||
zcash_rpc_slow zcbenchmark createjoinsplit 1 "${@:3}"
|
||||
;;
|
||||
verifyjoinsplit)
|
||||
zcash_rpc zcbenchmark verifyjoinsplit 1 "\"$RAWJOINSPLIT\""
|
||||
;;
|
||||
solveequihash)
|
||||
zcash_rpc zcbenchmark solveequihash 1 "${@:3}"
|
||||
zcash_rpc_slow zcbenchmark solveequihash 1 "${@:3}"
|
||||
;;
|
||||
verifyequihash)
|
||||
zcash_rpc zcbenchmark verifyequihash 1
|
||||
;;
|
||||
validatelargetx)
|
||||
zcash_rpc zcbenchmark validatelargetx 1
|
||||
;;
|
||||
trydecryptnotes)
|
||||
zcash_rpc zcbenchmark trydecryptnotes 1 "${@:3}"
|
||||
;;
|
||||
incnotewitnesses)
|
||||
zcash_rpc zcbenchmark incnotewitnesses 1 "${@:3}"
|
||||
;;
|
||||
connectblockslow)
|
||||
extract_benchmark_data
|
||||
zcash_rpc zcbenchmark connectblockslow 1
|
||||
;;
|
||||
sendtoaddress)
|
||||
zcash_rpc zcbenchmark sendtoaddress 1 "${@:4}"
|
||||
;;
|
||||
loadwallet)
|
||||
# The initial load is sufficient for measurement
|
||||
;;
|
||||
listunspent)
|
||||
zcash_rpc zcbenchmark listunspent 1
|
||||
;;
|
||||
*)
|
||||
zcashd_massif_stop
|
||||
echo "Bad arguments."
|
||||
echo "Bad arguments to memory."
|
||||
exit 1
|
||||
esac
|
||||
zcashd_massif_stop
|
||||
@@ -149,13 +284,13 @@ case "$1" in
|
||||
zcash_rpc zcbenchmark parameterloading 1
|
||||
;;
|
||||
createjoinsplit)
|
||||
zcash_rpc zcbenchmark createjoinsplit 1 "${@:3}"
|
||||
zcash_rpc_veryslow zcbenchmark createjoinsplit 1 "${@:3}"
|
||||
;;
|
||||
verifyjoinsplit)
|
||||
zcash_rpc zcbenchmark verifyjoinsplit 1 "\"$RAWJOINSPLIT\""
|
||||
;;
|
||||
solveequihash)
|
||||
zcash_rpc zcbenchmark solveequihash 1 "${@:3}"
|
||||
zcash_rpc_veryslow zcbenchmark solveequihash 1 "${@:3}"
|
||||
;;
|
||||
verifyequihash)
|
||||
zcash_rpc zcbenchmark verifyequihash 1
|
||||
@@ -166,9 +301,13 @@ case "$1" in
|
||||
incnotewitnesses)
|
||||
zcash_rpc zcbenchmark incnotewitnesses 1 "${@:3}"
|
||||
;;
|
||||
connectblockslow)
|
||||
extract_benchmark_data
|
||||
zcash_rpc zcbenchmark connectblockslow 1
|
||||
;;
|
||||
*)
|
||||
zcashd_valgrind_stop
|
||||
echo "Bad arguments."
|
||||
echo "Bad arguments to valgrind."
|
||||
exit 1
|
||||
esac
|
||||
zcashd_valgrind_stop
|
||||
@@ -189,12 +328,12 @@ case "$1" in
|
||||
rm -f valgrind.out
|
||||
;;
|
||||
*)
|
||||
echo "Bad arguments."
|
||||
echo "Bad arguments to valgrind-tests."
|
||||
exit 1
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
echo "Bad arguments."
|
||||
echo "Invalid benchmark type."
|
||||
exit 1
|
||||
esac
|
||||
|
||||
|
||||
Reference in New Issue
Block a user