[tests] fix flake8 warnings in test_framework.py and util.py
[bitcoinplatinum.git] / test / functional / test_framework / util.py
blobd02a130a1f829c0df4777f644534a966a2e32c1d
1 #!/usr/bin/env python3
2 # Copyright (c) 2014-2016 The Bitcoin Core developers
3 # Distributed under the MIT software license, see the accompanying
4 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
5 """Helpful routines for regression testing."""
7 from base64 import b64encode
8 from binascii import hexlify, unhexlify
9 from decimal import Decimal, ROUND_DOWN
10 import errno
11 import http.client
12 import json
13 import logging
14 import os
15 import random
16 import re
17 import subprocess
18 import tempfile
19 import time
21 from . import coverage
22 from .authproxy import AuthServiceProxy, JSONRPCException
24 COVERAGE_DIR = None
26 logger = logging.getLogger("TestFramework.utils")
28 # The maximum number of nodes a single test can spawn
29 MAX_NODES = 8
30 # Don't assign rpc or p2p ports lower than this
31 PORT_MIN = 11000
32 # The number of ports to "reserve" for p2p and rpc, each
33 PORT_RANGE = 5000
35 BITCOIND_PROC_WAIT_TIMEOUT = 60
38 class PortSeed:
39 # Must be initialized with a unique integer for each process
40 n = None
42 # Set Mocktime default to OFF.
43 # MOCKTIME is only needed for scripts that use the
44 # cached version of the blockchain. If the cached
45 # version of the blockchain is used without MOCKTIME
46 # then the mempools will not sync due to IBD.
47 MOCKTIME = 0
49 def enable_mocktime():
50 # For backwared compatibility of the python scripts
51 # with previous versions of the cache, set MOCKTIME
52 # to Jan 1, 2014 + (201 * 10 * 60)
53 global MOCKTIME
54 MOCKTIME = 1388534400 + (201 * 10 * 60)
56 def disable_mocktime():
57 global MOCKTIME
58 MOCKTIME = 0
60 def get_mocktime():
61 return MOCKTIME
63 def enable_coverage(dirname):
64 """Maintain a log of which RPC calls are made during testing."""
65 global COVERAGE_DIR
66 COVERAGE_DIR = dirname
69 def get_rpc_proxy(url, node_number, timeout=None):
70 """
71 Args:
72 url (str): URL of the RPC server to call
73 node_number (int): the node number (or id) that this calls to
75 Kwargs:
76 timeout (int): HTTP timeout in seconds
78 Returns:
79 AuthServiceProxy. convenience object for making RPC calls.
81 """
82 proxy_kwargs = {}
83 if timeout is not None:
84 proxy_kwargs['timeout'] = timeout
86 proxy = AuthServiceProxy(url, **proxy_kwargs)
87 proxy.url = url # store URL on proxy for info
89 coverage_logfile = coverage.get_filename(
90 COVERAGE_DIR, node_number) if COVERAGE_DIR else None
92 return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
95 def p2p_port(n):
96 assert(n <= MAX_NODES)
97 return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
99 def rpc_port(n):
100 return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
102 def check_json_precision():
103 """Make sure json library being used does not lose precision converting BTC values"""
104 n = Decimal("20000000.00000003")
105 satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8)
106 if satoshis != 2000000000000003:
107 raise RuntimeError("JSON encode/decode loses precision")
109 def count_bytes(hex_string):
110 return len(bytearray.fromhex(hex_string))
112 def bytes_to_hex_str(byte_str):
113 return hexlify(byte_str).decode('ascii')
115 def hex_str_to_bytes(hex_str):
116 return unhexlify(hex_str.encode('ascii'))
118 def str_to_b64str(string):
119 return b64encode(string.encode('utf-8')).decode('ascii')
121 def sync_blocks(rpc_connections, *, wait=1, timeout=60):
123 Wait until everybody has the same tip.
125 sync_blocks needs to be called with an rpc_connections set that has least
126 one node already synced to the latest, stable tip, otherwise there's a
127 chance it might return before all nodes are stably synced.
129 # Use getblockcount() instead of waitforblockheight() to determine the
130 # initial max height because the two RPCs look at different internal global
131 # variables (chainActive vs latestBlock) and the former gets updated
132 # earlier.
133 maxheight = max(x.getblockcount() for x in rpc_connections)
134 start_time = cur_time = time.time()
135 while cur_time <= start_time + timeout:
136 tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
137 if all(t["height"] == maxheight for t in tips):
138 if all(t["hash"] == tips[0]["hash"] for t in tips):
139 return
140 raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
141 "".join("\n {!r}".format(tip) for tip in tips)))
142 cur_time = time.time()
143 raise AssertionError("Block sync to height {} timed out:{}".format(
144 maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
146 def sync_chain(rpc_connections, *, wait=1, timeout=60):
148 Wait until everybody has the same best block
150 while timeout > 0:
151 best_hash = [x.getbestblockhash() for x in rpc_connections]
152 if best_hash == [best_hash[0]] * len(best_hash):
153 return
154 time.sleep(wait)
155 timeout -= wait
156 raise AssertionError("Chain sync failed: Best block hashes don't match")
158 def sync_mempools(rpc_connections, *, wait=1, timeout=60):
160 Wait until everybody has the same transactions in their memory
161 pools
163 while timeout > 0:
164 pool = set(rpc_connections[0].getrawmempool())
165 num_match = 1
166 for i in range(1, len(rpc_connections)):
167 if set(rpc_connections[i].getrawmempool()) == pool:
168 num_match = num_match + 1
169 if num_match == len(rpc_connections):
170 return
171 time.sleep(wait)
172 timeout -= wait
173 raise AssertionError("Mempool sync failed")
175 bitcoind_processes = {}
177 def initialize_datadir(dirname, n):
178 datadir = os.path.join(dirname, "node" + str(n))
179 if not os.path.isdir(datadir):
180 os.makedirs(datadir)
181 with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f:
182 f.write("regtest=1\n")
183 f.write("port=" + str(p2p_port(n)) + "\n")
184 f.write("rpcport=" + str(rpc_port(n)) + "\n")
185 f.write("listenonion=0\n")
186 return datadir
188 def get_datadir_path(dirname, n):
189 return os.path.join(dirname, "node" + str(n))
191 def get_auth_cookie(datadir, n):
192 user = None
193 password = None
194 if os.path.isfile(os.path.join(datadir, "bitcoin.conf")):
195 with open(os.path.join(datadir, "bitcoin.conf"), 'r') as f:
196 for line in f:
197 if line.startswith("rpcuser="):
198 assert user is None # Ensure that there is only one rpcuser line
199 user = line.split("=")[1].strip("\n")
200 if line.startswith("rpcpassword="):
201 assert password is None # Ensure that there is only one rpcpassword line
202 password = line.split("=")[1].strip("\n")
203 if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")):
204 with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f:
205 userpass = f.read()
206 split_userpass = userpass.split(':')
207 user = split_userpass[0]
208 password = split_userpass[1]
209 if user is None or password is None:
210 raise ValueError("No RPC credentials")
211 return user, password
213 def rpc_url(datadir, i, rpchost=None):
214 rpc_u, rpc_p = get_auth_cookie(datadir, i)
215 host = '127.0.0.1'
216 port = rpc_port(i)
217 if rpchost:
218 parts = rpchost.split(':')
219 if len(parts) == 2:
220 host, port = parts
221 else:
222 host = rpchost
223 return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
225 def wait_for_bitcoind_start(process, datadir, i, rpchost=None):
227 Wait for bitcoind to start. This means that RPC is accessible and fully initialized.
228 Raise an exception if bitcoind exits during initialization.
230 while True:
231 if process.poll() is not None:
232 raise Exception('bitcoind exited with status %i during initialization' % process.returncode)
233 try:
234 # Check if .cookie file to be created
235 rpc = get_rpc_proxy(rpc_url(datadir, i, rpchost), i)
236 rpc.getblockcount()
237 break # break out of loop on success
238 except IOError as e:
239 if e.errno != errno.ECONNREFUSED: # Port not yet open?
240 raise # unknown IO error
241 except JSONRPCException as e: # Initialization phase
242 if e.error['code'] != -28: # RPC in warmup?
243 raise # unknown JSON RPC exception
244 except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting
245 if "No RPC credentials" not in str(e):
246 raise
247 time.sleep(0.25)
249 def wait_for_node_exit(node_index, timeout):
250 bitcoind_processes[node_index].wait(timeout)
252 def _start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, stderr=None):
253 """Start a bitcoind and return RPC connection to it
255 This function should only be called from within test_framework, not by individual test scripts."""
257 datadir = os.path.join(dirname, "node" + str(i))
258 if binary is None:
259 binary = os.getenv("BITCOIND", "bitcoind")
260 args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(get_mocktime()), "-uacomment=testnode%d" % i]
261 if extra_args is not None:
262 args.extend(extra_args)
263 bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr)
264 logger.debug("initialize_chain: bitcoind started, waiting for RPC to come up")
265 wait_for_bitcoind_start(bitcoind_processes[i], datadir, i, rpchost)
266 logger.debug("initialize_chain: RPC successfully started")
267 proxy = get_rpc_proxy(rpc_url(datadir, i, rpchost), i, timeout=timewait)
269 if COVERAGE_DIR:
270 coverage.write_all_rpc_commands(COVERAGE_DIR, proxy)
272 return proxy
274 def assert_start_raises_init_error(i, dirname, extra_args=None, expected_msg=None):
275 with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr:
276 try:
277 node = _start_node(i, dirname, extra_args, stderr=log_stderr)
278 _stop_node(node, i)
279 except Exception as e:
280 assert 'bitcoind exited' in str(e) # node must have shutdown
281 if expected_msg is not None:
282 log_stderr.seek(0)
283 stderr = log_stderr.read().decode('utf-8')
284 if expected_msg not in stderr:
285 raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr)
286 else:
287 if expected_msg is None:
288 assert_msg = "bitcoind should have exited with an error"
289 else:
290 assert_msg = "bitcoind should have exited with expected error " + expected_msg
291 raise AssertionError(assert_msg)
293 def _start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
294 """Start multiple bitcoinds, return RPC connections to them
296 This function should only be called from within test_framework, not by individual test scripts."""
298 if extra_args is None:
299 extra_args = [None] * num_nodes
300 if binary is None:
301 binary = [None] * num_nodes
302 assert_equal(len(extra_args), num_nodes)
303 assert_equal(len(binary), num_nodes)
304 rpcs = []
305 try:
306 for i in range(num_nodes):
307 rpcs.append(_start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i]))
308 except:
309 # If one node failed to start, stop the others
310 _stop_nodes(rpcs)
311 raise
312 return rpcs
314 def log_filename(dirname, n_node, logname):
315 return os.path.join(dirname, "node" + str(n_node), "regtest", logname)
317 def _stop_node(node, i):
318 """Stop a bitcoind test node
320 This function should only be called from within test_framework, not by individual test scripts."""
322 logger.debug("Stopping node %d" % i)
323 try:
324 node.stop()
325 except http.client.CannotSendRequest as e:
326 logger.exception("Unable to stop node")
327 return_code = bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT)
328 del bitcoind_processes[i]
329 assert_equal(return_code, 0)
331 def _stop_nodes(nodes):
332 """Stop multiple bitcoind test nodes
334 This function should only be called from within test_framework, not by individual test scripts."""
336 for i, node in enumerate(nodes):
337 _stop_node(node, i)
338 assert not bitcoind_processes.values() # All connections must be gone now
340 def set_node_times(nodes, t):
341 for node in nodes:
342 node.setmocktime(t)
344 def disconnect_nodes(from_connection, node_num):
345 for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
346 from_connection.disconnectnode(nodeid=peer_id)
348 for _ in range(50):
349 if [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == []:
350 break
351 time.sleep(0.1)
352 else:
353 raise AssertionError("timed out waiting for disconnect")
355 def connect_nodes(from_connection, node_num):
356 ip_port = "127.0.0.1:" + str(p2p_port(node_num))
357 from_connection.addnode(ip_port, "onetry")
358 # poll until version handshake complete to avoid race conditions
359 # with transaction relaying
360 while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
361 time.sleep(0.1)
363 def connect_nodes_bi(nodes, a, b):
364 connect_nodes(nodes[a], b)
365 connect_nodes(nodes[b], a)
367 def find_output(node, txid, amount):
369 Return index to output of txid with value amount
370 Raises exception if there is none.
372 txdata = node.getrawtransaction(txid, 1)
373 for i in range(len(txdata["vout"])):
374 if txdata["vout"][i]["value"] == amount:
375 return i
376 raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
378 def gather_inputs(from_node, amount_needed, confirmations_required=1):
380 Return a random set of unspent txouts that are enough to pay amount_needed
382 assert(confirmations_required >= 0)
383 utxo = from_node.listunspent(confirmations_required)
384 random.shuffle(utxo)
385 inputs = []
386 total_in = Decimal("0.00000000")
387 while total_in < amount_needed and len(utxo) > 0:
388 t = utxo.pop()
389 total_in += t["amount"]
390 inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
391 if total_in < amount_needed:
392 raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
393 return (total_in, inputs)
395 def make_change(from_node, amount_in, amount_out, fee):
397 Create change output(s), return them
399 outputs = {}
400 amount = amount_out + fee
401 change = amount_in - amount
402 if change > amount * 2:
403 # Create an extra change output to break up big inputs
404 change_address = from_node.getnewaddress()
405 # Split change in two, being careful of rounding:
406 outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
407 change = amount_in - amount - outputs[change_address]
408 if change > 0:
409 outputs[from_node.getnewaddress()] = change
410 return outputs
412 def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
414 Create a random transaction.
415 Returns (txid, hex-encoded-transaction-data, fee)
417 from_node = random.choice(nodes)
418 to_node = random.choice(nodes)
419 fee = min_fee + fee_increment * random.randint(0, fee_variants)
421 (total_in, inputs) = gather_inputs(from_node, amount + fee)
422 outputs = make_change(from_node, total_in, amount, fee)
423 outputs[to_node.getnewaddress()] = float(amount)
425 rawtx = from_node.createrawtransaction(inputs, outputs)
426 signresult = from_node.signrawtransaction(rawtx)
427 txid = from_node.sendrawtransaction(signresult["hex"], True)
429 return (txid, signresult["hex"], fee)
431 def assert_fee_amount(fee, tx_size, fee_per_kB):
432 """Assert the fee was in range"""
433 target_fee = tx_size * fee_per_kB / 1000
434 if fee < target_fee:
435 raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
436 # allow the wallet's estimation to be at most 2 bytes off
437 if fee > (tx_size + 2) * fee_per_kB / 1000:
438 raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
440 def assert_equal(thing1, thing2, *args):
441 if thing1 != thing2 or any(thing1 != arg for arg in args):
442 raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
444 def assert_greater_than(thing1, thing2):
445 if thing1 <= thing2:
446 raise AssertionError("%s <= %s" % (str(thing1), str(thing2)))
448 def assert_greater_than_or_equal(thing1, thing2):
449 if thing1 < thing2:
450 raise AssertionError("%s < %s" % (str(thing1), str(thing2)))
452 def assert_raises(exc, fun, *args, **kwds):
453 assert_raises_message(exc, None, fun, *args, **kwds)
455 def assert_raises_message(exc, message, fun, *args, **kwds):
456 try:
457 fun(*args, **kwds)
458 except exc as e:
459 if message is not None and message not in e.error['message']:
460 raise AssertionError("Expected substring not found:" + e.error['message'])
461 except Exception as e:
462 raise AssertionError("Unexpected exception raised: " + type(e).__name__)
463 else:
464 raise AssertionError("No exception raised")
466 def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
467 """Run an RPC and verify that a specific JSONRPC exception code and message is raised.
469 Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
470 and verifies that the error code and message are as expected. Throws AssertionError if
471 no JSONRPCException was returned or if the error code/message are not as expected.
473 Args:
474 code (int), optional: the error code returned by the RPC call (defined
475 in src/rpc/protocol.h). Set to None if checking the error code is not required.
476 message (string), optional: [a substring of] the error string returned by the
477 RPC call. Set to None if checking the error string is not required
478 fun (function): the function to call. This should be the name of an RPC.
479 args*: positional arguments for the function.
480 kwds**: named arguments for the function.
482 try:
483 fun(*args, **kwds)
484 except JSONRPCException as e:
485 # JSONRPCException was thrown as expected. Check the code and message values are correct.
486 if (code is not None) and (code != e.error["code"]):
487 raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
488 if (message is not None) and (message not in e.error['message']):
489 raise AssertionError("Expected substring not found:" + e.error['message'])
490 except Exception as e:
491 raise AssertionError("Unexpected exception raised: " + type(e).__name__)
492 else:
493 raise AssertionError("No exception raised")
495 def assert_is_hex_string(string):
496 try:
497 int(string, 16)
498 except Exception as e:
499 raise AssertionError(
500 "Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
502 def assert_is_hash_string(string, length=64):
503 if not isinstance(string, str):
504 raise AssertionError("Expected a string, got type %r" % type(string))
505 elif length and len(string) != length:
506 raise AssertionError(
507 "String of length %d expected; got %d" % (length, len(string)))
508 elif not re.match('[abcdef0-9]+$', string):
509 raise AssertionError(
510 "String %r contains invalid characters for a hash." % string)
512 def assert_array_result(object_array, to_match, expected, should_not_find=False):
514 Pass in array of JSON objects, a dictionary with key/value pairs
515 to match against, and another dictionary with expected key/value
516 pairs.
517 If the should_not_find flag is true, to_match should not be found
518 in object_array
520 if should_not_find:
521 assert_equal(expected, {})
522 num_matched = 0
523 for item in object_array:
524 all_match = True
525 for key, value in to_match.items():
526 if item[key] != value:
527 all_match = False
528 if not all_match:
529 continue
530 elif should_not_find:
531 num_matched = num_matched + 1
532 for key, value in expected.items():
533 if item[key] != value:
534 raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value)))
535 num_matched = num_matched + 1
536 if num_matched == 0 and not should_not_find:
537 raise AssertionError("No objects matched %s" % (str(to_match)))
538 if num_matched > 0 and should_not_find:
539 raise AssertionError("Objects were found %s" % (str(to_match)))
541 def satoshi_round(amount):
542 return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
544 # Helper to create at least "count" utxos
545 # Pass in a fee that is sufficient for relay and mining new transactions.
546 def create_confirmed_utxos(fee, node, count):
547 node.generate(int(0.5 * count) + 101)
548 utxos = node.listunspent()
549 iterations = count - len(utxos)
550 addr1 = node.getnewaddress()
551 addr2 = node.getnewaddress()
552 if iterations <= 0:
553 return utxos
554 for i in range(iterations):
555 t = utxos.pop()
556 inputs = []
557 inputs.append({"txid": t["txid"], "vout": t["vout"]})
558 outputs = {}
559 send_value = t['amount'] - fee
560 outputs[addr1] = satoshi_round(send_value / 2)
561 outputs[addr2] = satoshi_round(send_value / 2)
562 raw_tx = node.createrawtransaction(inputs, outputs)
563 signed_tx = node.signrawtransaction(raw_tx)["hex"]
564 node.sendrawtransaction(signed_tx)
566 while (node.getmempoolinfo()['size'] > 0):
567 node.generate(1)
569 utxos = node.listunspent()
570 assert(len(utxos) >= count)
571 return utxos
573 # Create large OP_RETURN txouts that can be appended to a transaction
574 # to make it large (helper for constructing large transactions).
575 def gen_return_txouts():
576 # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
577 # So we have big transactions (and therefore can't fit very many into each block)
578 # create one script_pubkey
579 script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
580 for i in range(512):
581 script_pubkey = script_pubkey + "01"
582 # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
583 txouts = "81"
584 for k in range(128):
585 # add txout value
586 txouts = txouts + "0000000000000000"
587 # add length of script_pubkey
588 txouts = txouts + "fd0402"
589 # add script_pubkey
590 txouts = txouts + script_pubkey
591 return txouts
593 def create_tx(node, coinbase, to_address, amount):
594 inputs = [{"txid": coinbase, "vout": 0}]
595 outputs = {to_address: amount}
596 rawtx = node.createrawtransaction(inputs, outputs)
597 signresult = node.signrawtransaction(rawtx)
598 assert_equal(signresult["complete"], True)
599 return signresult["hex"]
601 # Create a spend of each passed-in utxo, splicing in "txouts" to each raw
602 # transaction to make it large. See gen_return_txouts() above.
603 def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
604 addr = node.getnewaddress()
605 txids = []
606 for _ in range(num):
607 t = utxos.pop()
608 inputs = [{"txid": t["txid"], "vout": t["vout"]}]
609 outputs = {}
610 change = t['amount'] - fee
611 outputs[addr] = satoshi_round(change)
612 rawtx = node.createrawtransaction(inputs, outputs)
613 newtx = rawtx[0:92]
614 newtx = newtx + txouts
615 newtx = newtx + rawtx[94:]
616 signresult = node.signrawtransaction(newtx, None, None, "NONE")
617 txid = node.sendrawtransaction(signresult["hex"], True)
618 txids.append(txid)
619 return txids
621 def mine_large_block(node, utxos=None):
622 # generate a 66k transaction,
623 # and 14 of them is close to the 1MB block limit
624 num = 14
625 txouts = gen_return_txouts()
626 utxos = utxos if utxos is not None else []
627 if len(utxos) < num:
628 utxos.clear()
629 utxos.extend(node.listunspent())
630 fee = 100 * node.getnetworkinfo()["relayfee"]
631 create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
632 node.generate(1)
634 def get_bip9_status(node, key):
635 info = node.getblockchaininfo()
636 return info['bip9_softforks'][key]