2 # Copyright (c) 2014-2016 The Bitcoin Core developers
3 # Distributed under the MIT software license, see the accompanying
4 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
5 """Helpful routines for regression testing."""
7 from base64
import b64encode
8 from binascii
import hexlify
, unhexlify
9 from decimal
import Decimal
, ROUND_DOWN
17 from . import coverage
18 from .authproxy
import AuthServiceProxy
, JSONRPCException
20 logger
= logging
.getLogger("TestFramework.utils")
25 def assert_fee_amount(fee
, tx_size
, fee_per_kB
):
26 """Assert the fee was in range"""
27 target_fee
= tx_size
* fee_per_kB
/ 1000
29 raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee
), str(target_fee
)))
30 # allow the wallet's estimation to be at most 2 bytes off
31 if fee
> (tx_size
+ 2) * fee_per_kB
/ 1000:
32 raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee
), str(target_fee
)))
34 def assert_equal(thing1
, thing2
, *args
):
35 if thing1
!= thing2
or any(thing1
!= arg
for arg
in args
):
36 raise AssertionError("not(%s)" % " == ".join(str(arg
) for arg
in (thing1
, thing2
) + args
))
38 def assert_greater_than(thing1
, thing2
):
40 raise AssertionError("%s <= %s" % (str(thing1
), str(thing2
)))
42 def assert_greater_than_or_equal(thing1
, thing2
):
44 raise AssertionError("%s < %s" % (str(thing1
), str(thing2
)))
46 def assert_raises(exc
, fun
, *args
, **kwds
):
47 assert_raises_message(exc
, None, fun
, *args
, **kwds
)
49 def assert_raises_message(exc
, message
, fun
, *args
, **kwds
):
53 if message
is not None and message
not in e
.error
['message']:
54 raise AssertionError("Expected substring not found:" + e
.error
['message'])
55 except Exception as e
:
56 raise AssertionError("Unexpected exception raised: " + type(e
).__name
__)
58 raise AssertionError("No exception raised")
60 def assert_raises_jsonrpc(code
, message
, fun
, *args
, **kwds
):
61 """Run an RPC and verify that a specific JSONRPC exception code and message is raised.
63 Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
64 and verifies that the error code and message are as expected. Throws AssertionError if
65 no JSONRPCException was raised or if the error code/message are not as expected.
68 code (int), optional: the error code returned by the RPC call (defined
69 in src/rpc/protocol.h). Set to None if checking the error code is not required.
70 message (string), optional: [a substring of] the error string returned by the
71 RPC call. Set to None if checking the error string is not required.
72 fun (function): the function to call. This should be the name of an RPC.
73 args*: positional arguments for the function.
74 kwds**: named arguments for the function.
78 except JSONRPCException
as e
:
79 # JSONRPCException was thrown as expected. Check the code and message values are correct.
80 if (code
is not None) and (code
!= e
.error
["code"]):
81 raise AssertionError("Unexpected JSONRPC error code %i" % e
.error
["code"])
82 if (message
is not None) and (message
not in e
.error
['message']):
83 raise AssertionError("Expected substring not found:" + e
.error
['message'])
84 except Exception as e
:
85 raise AssertionError("Unexpected exception raised: " + type(e
).__name
__)
87 raise AssertionError("No exception raised")
89 def assert_is_hex_string(string
):
92 except Exception as e
:
94 "Couldn't interpret %r as hexadecimal; raised: %s" % (string
, e
))
96 def assert_is_hash_string(string
, length
=64):
97 if not isinstance(string
, str):
98 raise AssertionError("Expected a string, got type %r" % type(string
))
99 elif length
and len(string
) != length
:
100 raise AssertionError(
101 "String of length %d expected; got %d" % (length
, len(string
)))
102 elif not re
.match('[abcdef0-9]+$', string
):
103 raise AssertionError(
104 "String %r contains invalid characters for a hash." % string
)
106 def assert_array_result(object_array
, to_match
, expected
, should_not_find
=False):
108 Pass in array of JSON objects, a dictionary with key/value pairs
109 to match against, and another dictionary with expected key/value
111 If the should_not_find flag is true, to_match should not be found
115 assert_equal(expected
, {})
117 for item
in object_array
:
119 for key
, value
in to_match
.items():
120 if item
[key
] != value
:
124 elif should_not_find
:
125 num_matched
= num_matched
+ 1
126 for key
, value
in expected
.items():
127 if item
[key
] != value
:
128 raise AssertionError("%s : expected %s=%s" % (str(item
), str(key
), str(value
)))
129 num_matched
= num_matched
+ 1
130 if num_matched
== 0 and not should_not_find
:
131 raise AssertionError("No objects matched %s" % (str(to_match
)))
132 if num_matched
> 0 and should_not_find
:
133 raise AssertionError("Objects were found %s" % (str(to_match
)))
138 def check_json_precision():
139 """Make sure json library being used does not lose precision converting BTC values"""
140 n
= Decimal("20000000.00000003")
141 satoshis
= int(json
.loads(json
.dumps(float(n
))) * 1.0e8
)
142 if satoshis
!= 2000000000000003:
143 raise RuntimeError("JSON encode/decode loses precision")
145 def count_bytes(hex_string
):
146 return len(bytearray
.fromhex(hex_string
))
148 def bytes_to_hex_str(byte_str
):
149 return hexlify(byte_str
).decode('ascii')
151 def hex_str_to_bytes(hex_str
):
152 return unhexlify(hex_str
.encode('ascii'))
154 def str_to_b64str(string
):
155 return b64encode(string
.encode('utf-8')).decode('ascii')
157 def satoshi_round(amount
):
158 return Decimal(amount
).quantize(Decimal('0.00000001'), rounding
=ROUND_DOWN
)
160 def wait_until(predicate
, *, attempts
=float('inf'), timeout
=float('inf'), lock
=None):
161 if attempts
== float('inf') and timeout
== float('inf'):
164 timeout
+= time
.time()
166 while attempt
< attempts
and time
.time() < timeout
:
177 # Print the cause of the timeout
178 assert_greater_than(attempts
, attempt
)
179 assert_greater_than(timeout
, time
.time())
180 raise RuntimeError('Unreachable')
182 # RPC/P2P connection constants and functions
183 ############################################
185 # The maximum number of nodes a single test can spawn
187 # Don't assign rpc or p2p ports lower than this
189 # The number of ports to "reserve" for p2p and rpc, each
193 # Must be initialized with a unique integer for each process
196 def get_rpc_proxy(url
, node_number
, timeout
=None, coveragedir
=None):
199 url (str): URL of the RPC server to call
200 node_number (int): the node number (or id) that this calls to
203 timeout (int): HTTP timeout in seconds
206 AuthServiceProxy. convenience object for making RPC calls.
210 if timeout
is not None:
211 proxy_kwargs
['timeout'] = timeout
213 proxy
= AuthServiceProxy(url
, **proxy_kwargs
)
214 proxy
.url
= url
# store URL on proxy for info
216 coverage_logfile
= coverage
.get_filename(
217 coveragedir
, node_number
) if coveragedir
else None
219 return coverage
.AuthServiceProxyWrapper(proxy
, coverage_logfile
)
222 assert(n
<= MAX_NODES
)
223 return PORT_MIN
+ n
+ (MAX_NODES
* PortSeed
.n
) % (PORT_RANGE
- 1 - MAX_NODES
)
226 return PORT_MIN
+ PORT_RANGE
+ n
+ (MAX_NODES
* PortSeed
.n
) % (PORT_RANGE
- 1 - MAX_NODES
)
228 def rpc_url(datadir
, i
, rpchost
=None):
229 rpc_u
, rpc_p
= get_auth_cookie(datadir
)
233 parts
= rpchost
.split(':')
238 return "http://%s:%s@%s:%d" % (rpc_u
, rpc_p
, host
, int(port
))
243 def initialize_datadir(dirname
, n
):
244 datadir
= os
.path
.join(dirname
, "node" + str(n
))
245 if not os
.path
.isdir(datadir
):
247 with
open(os
.path
.join(datadir
, "bitcoin.conf"), 'w', encoding
='utf8') as f
:
248 f
.write("regtest=1\n")
249 f
.write("port=" + str(p2p_port(n
)) + "\n")
250 f
.write("rpcport=" + str(rpc_port(n
)) + "\n")
251 f
.write("listenonion=0\n")
254 def get_datadir_path(dirname
, n
):
255 return os
.path
.join(dirname
, "node" + str(n
))
257 def get_auth_cookie(datadir
):
260 if os
.path
.isfile(os
.path
.join(datadir
, "bitcoin.conf")):
261 with
open(os
.path
.join(datadir
, "bitcoin.conf"), 'r', encoding
='utf8') as f
:
263 if line
.startswith("rpcuser="):
264 assert user
is None # Ensure that there is only one rpcuser line
265 user
= line
.split("=")[1].strip("\n")
266 if line
.startswith("rpcpassword="):
267 assert password
is None # Ensure that there is only one rpcpassword line
268 password
= line
.split("=")[1].strip("\n")
269 if os
.path
.isfile(os
.path
.join(datadir
, "regtest", ".cookie")):
270 with
open(os
.path
.join(datadir
, "regtest", ".cookie"), 'r') as f
:
272 split_userpass
= userpass
.split(':')
273 user
= split_userpass
[0]
274 password
= split_userpass
[1]
275 if user
is None or password
is None:
276 raise ValueError("No RPC credentials")
277 return user
, password
279 def log_filename(dirname
, n_node
, logname
):
280 return os
.path
.join(dirname
, "node" + str(n_node
), "regtest", logname
)
282 def get_bip9_status(node
, key
):
283 info
= node
.getblockchaininfo()
284 return info
['bip9_softforks'][key
]
286 def set_node_times(nodes
, t
):
290 def disconnect_nodes(from_connection
, node_num
):
291 for peer_id
in [peer
['id'] for peer
in from_connection
.getpeerinfo() if "testnode%d" % node_num
in peer
['subver']]:
292 from_connection
.disconnectnode(nodeid
=peer_id
)
295 if [peer
['id'] for peer
in from_connection
.getpeerinfo() if "testnode%d" % node_num
in peer
['subver']] == []:
299 raise AssertionError("timed out waiting for disconnect")
301 def connect_nodes(from_connection
, node_num
):
302 ip_port
= "127.0.0.1:" + str(p2p_port(node_num
))
303 from_connection
.addnode(ip_port
, "onetry")
304 # poll until version handshake complete to avoid race conditions
305 # with transaction relaying
306 while any(peer
['version'] == 0 for peer
in from_connection
.getpeerinfo()):
309 def connect_nodes_bi(nodes
, a
, b
):
310 connect_nodes(nodes
[a
], b
)
311 connect_nodes(nodes
[b
], a
)
313 def sync_blocks(rpc_connections
, *, wait
=1, timeout
=60):
315 Wait until everybody has the same tip.
317 sync_blocks needs to be called with an rpc_connections set that has least
318 one node already synced to the latest, stable tip, otherwise there's a
319 chance it might return before all nodes are stably synced.
321 # Use getblockcount() instead of waitforblockheight() to determine the
322 # initial max height because the two RPCs look at different internal global
323 # variables (chainActive vs latestBlock) and the former gets updated
325 maxheight
= max(x
.getblockcount() for x
in rpc_connections
)
326 start_time
= cur_time
= time
.time()
327 while cur_time
<= start_time
+ timeout
:
328 tips
= [r
.waitforblockheight(maxheight
, int(wait
* 1000)) for r
in rpc_connections
]
329 if all(t
["height"] == maxheight
for t
in tips
):
330 if all(t
["hash"] == tips
[0]["hash"] for t
in tips
):
332 raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
333 "".join("\n {!r}".format(tip
) for tip
in tips
)))
334 cur_time
= time
.time()
335 raise AssertionError("Block sync to height {} timed out:{}".format(
336 maxheight
, "".join("\n {!r}".format(tip
) for tip
in tips
)))
338 def sync_chain(rpc_connections
, *, wait
=1, timeout
=60):
340 Wait until everybody has the same best block
343 best_hash
= [x
.getbestblockhash() for x
in rpc_connections
]
344 if best_hash
== [best_hash
[0]] * len(best_hash
):
348 raise AssertionError("Chain sync failed: Best block hashes don't match")
350 def sync_mempools(rpc_connections
, *, wait
=1, timeout
=60):
352 Wait until everybody has the same transactions in their memory
356 pool
= set(rpc_connections
[0].getrawmempool())
358 for i
in range(1, len(rpc_connections
)):
359 if set(rpc_connections
[i
].getrawmempool()) == pool
:
360 num_match
= num_match
+ 1
361 if num_match
== len(rpc_connections
):
365 raise AssertionError("Mempool sync failed")
367 # Transaction/Block functions
368 #############################
370 def find_output(node
, txid
, amount
):
372 Return index to output of txid with value amount
373 Raises exception if there is none.
375 txdata
= node
.getrawtransaction(txid
, 1)
376 for i
in range(len(txdata
["vout"])):
377 if txdata
["vout"][i
]["value"] == amount
:
379 raise RuntimeError("find_output txid %s : %s not found" % (txid
, str(amount
)))
381 def gather_inputs(from_node
, amount_needed
, confirmations_required
=1):
383 Return a random set of unspent txouts that are enough to pay amount_needed
385 assert(confirmations_required
>= 0)
386 utxo
= from_node
.listunspent(confirmations_required
)
389 total_in
= Decimal("0.00000000")
390 while total_in
< amount_needed
and len(utxo
) > 0:
392 total_in
+= t
["amount"]
393 inputs
.append({"txid": t
["txid"], "vout": t
["vout"], "address": t
["address"]})
394 if total_in
< amount_needed
:
395 raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed
, total_in
))
396 return (total_in
, inputs
)
398 def make_change(from_node
, amount_in
, amount_out
, fee
):
400 Create change output(s), return them
403 amount
= amount_out
+ fee
404 change
= amount_in
- amount
405 if change
> amount
* 2:
406 # Create an extra change output to break up big inputs
407 change_address
= from_node
.getnewaddress()
408 # Split change in two, being careful of rounding:
409 outputs
[change_address
] = Decimal(change
/ 2).quantize(Decimal('0.00000001'), rounding
=ROUND_DOWN
)
410 change
= amount_in
- amount
- outputs
[change_address
]
412 outputs
[from_node
.getnewaddress()] = change
415 def random_transaction(nodes
, amount
, min_fee
, fee_increment
, fee_variants
):
417 Create a random transaction.
418 Returns (txid, hex-encoded-transaction-data, fee)
420 from_node
= random
.choice(nodes
)
421 to_node
= random
.choice(nodes
)
422 fee
= min_fee
+ fee_increment
* random
.randint(0, fee_variants
)
424 (total_in
, inputs
) = gather_inputs(from_node
, amount
+ fee
)
425 outputs
= make_change(from_node
, total_in
, amount
, fee
)
426 outputs
[to_node
.getnewaddress()] = float(amount
)
428 rawtx
= from_node
.createrawtransaction(inputs
, outputs
)
429 signresult
= from_node
.signrawtransaction(rawtx
)
430 txid
= from_node
.sendrawtransaction(signresult
["hex"], True)
432 return (txid
, signresult
["hex"], fee
)
434 # Helper to create at least "count" utxos
435 # Pass in a fee that is sufficient for relay and mining new transactions.
436 def create_confirmed_utxos(fee
, node
, count
):
437 to_generate
= int(0.5 * count
) + 101
438 while to_generate
> 0:
439 node
.generate(min(25, to_generate
))
441 utxos
= node
.listunspent()
442 iterations
= count
- len(utxos
)
443 addr1
= node
.getnewaddress()
444 addr2
= node
.getnewaddress()
447 for i
in range(iterations
):
450 inputs
.append({"txid": t
["txid"], "vout": t
["vout"]})
452 send_value
= t
['amount'] - fee
453 outputs
[addr1
] = satoshi_round(send_value
/ 2)
454 outputs
[addr2
] = satoshi_round(send_value
/ 2)
455 raw_tx
= node
.createrawtransaction(inputs
, outputs
)
456 signed_tx
= node
.signrawtransaction(raw_tx
)["hex"]
457 node
.sendrawtransaction(signed_tx
)
459 while (node
.getmempoolinfo()['size'] > 0):
462 utxos
= node
.listunspent()
463 assert(len(utxos
) >= count
)
466 # Create large OP_RETURN txouts that can be appended to a transaction
467 # to make it large (helper for constructing large transactions).
468 def gen_return_txouts():
469 # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
470 # So we have big transactions (and therefore can't fit very many into each block)
471 # create one script_pubkey
472 script_pubkey
= "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
474 script_pubkey
= script_pubkey
+ "01"
475 # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
479 txouts
= txouts
+ "0000000000000000"
480 # add length of script_pubkey
481 txouts
= txouts
+ "fd0402"
483 txouts
= txouts
+ script_pubkey
486 def create_tx(node
, coinbase
, to_address
, amount
):
487 inputs
= [{"txid": coinbase
, "vout": 0}]
488 outputs
= {to_address
: amount
}
489 rawtx
= node
.createrawtransaction(inputs
, outputs
)
490 signresult
= node
.signrawtransaction(rawtx
)
491 assert_equal(signresult
["complete"], True)
492 return signresult
["hex"]
494 # Create a spend of each passed-in utxo, splicing in "txouts" to each raw
495 # transaction to make it large. See gen_return_txouts() above.
496 def create_lots_of_big_transactions(node
, txouts
, utxos
, num
, fee
):
497 addr
= node
.getnewaddress()
501 inputs
= [{"txid": t
["txid"], "vout": t
["vout"]}]
503 change
= t
['amount'] - fee
504 outputs
[addr
] = satoshi_round(change
)
505 rawtx
= node
.createrawtransaction(inputs
, outputs
)
507 newtx
= newtx
+ txouts
508 newtx
= newtx
+ rawtx
[94:]
509 signresult
= node
.signrawtransaction(newtx
, None, None, "NONE")
510 txid
= node
.sendrawtransaction(signresult
["hex"], True)
514 def mine_large_block(node
, utxos
=None):
515 # generate a 66k transaction,
516 # and 14 of them is close to the 1MB block limit
518 txouts
= gen_return_txouts()
519 utxos
= utxos
if utxos
is not None else []
522 utxos
.extend(node
.listunspent())
523 fee
= 100 * node
.getnetworkinfo()["relayfee"]
524 create_lots_of_big_transactions(node
, txouts
, utxos
, num
, fee
=fee
)