changeset 27006:a7038e5de3c7

Rename rpc-tests directory to functional
author John Newbery <john@johnnewbery.com>
date Thu, 09 Mar 2017 09:44:57 -0500
parents f1682de901e4
children a3c629c82625
files .travis.yml Makefile.am README.md configure.ac contrib/devtools/copyright_header.py doc/developer-notes.md test/README.md test/functional/.gitignore test/functional/README.md test/functional/abandonconflict.py test/functional/assumevalid.py test/functional/bip65-cltv-p2p.py test/functional/bip65-cltv.py test/functional/bip68-112-113-p2p.py test/functional/bip68-sequence.py test/functional/bip9-softforks.py test/functional/bipdersig-p2p.py test/functional/bipdersig.py test/functional/blockchain.py test/functional/bumpfee.py test/functional/create_cache.py test/functional/decodescript.py test/functional/disablewallet.py test/functional/forknotify.py test/functional/fundrawtransaction.py test/functional/getblocktemplate_longpoll.py test/functional/getblocktemplate_proposals.py test/functional/getchaintips.py test/functional/httpbasics.py test/functional/import-rescan.py test/functional/importmulti.py test/functional/importprunedfunds.py test/functional/invalidateblock.py test/functional/invalidblockrequest.py test/functional/invalidtxrequest.py test/functional/keypool.py test/functional/listsinceblock.py test/functional/listtransactions.py test/functional/maxblocksinflight.py test/functional/maxuploadtarget.py test/functional/mempool_limit.py test/functional/mempool_packages.py test/functional/mempool_reorg.py test/functional/mempool_resurrect_test.py test/functional/mempool_spendcoinbase.py test/functional/merkle_blocks.py test/functional/multi_rpc.py test/functional/nodehandling.py test/functional/nulldummy.py test/functional/p2p-acceptblock.py test/functional/p2p-compactblocks.py test/functional/p2p-feefilter.py test/functional/p2p-fullblocktest.py test/functional/p2p-leaktests.py test/functional/p2p-mempool.py test/functional/p2p-segwit.py test/functional/p2p-timeouts.py test/functional/p2p-versionbits-warning.py test/functional/preciousblock.py test/functional/prioritise_transaction.py test/functional/proxy_test.py test/functional/pruning.py test/functional/rawtransactions.py test/functional/receivedby.py test/functional/reindex.py test/functional/replace-by-fee.py test/functional/rest.py test/functional/rpcbind_test.py test/functional/rpcnamedargs.py test/functional/segwit.py test/functional/sendheaders.py test/functional/signmessages.py test/functional/signrawtransactions.py test/functional/smartfees.py test/functional/test_framework/__init__.py test/functional/test_framework/address.py test/functional/test_framework/authproxy.py test/functional/test_framework/bignum.py test/functional/test_framework/blockstore.py test/functional/test_framework/blocktools.py test/functional/test_framework/comptool.py test/functional/test_framework/coverage.py test/functional/test_framework/key.py test/functional/test_framework/mininode.py test/functional/test_framework/netutil.py test/functional/test_framework/script.py test/functional/test_framework/siphash.py test/functional/test_framework/socks5.py test/functional/test_framework/test_framework.py test/functional/test_framework/util.py test/functional/txn_clone.py test/functional/txn_doublespend.py test/functional/wallet-accounts.py test/functional/wallet-dump.py test/functional/wallet-hd.py test/functional/wallet.py test/functional/walletbackup.py test/functional/zapwallettxes.py test/functional/zmq_test.py test/pull-tester/rpc-tests.py test/rpc-tests/.gitignore test/rpc-tests/README.md test/rpc-tests/abandonconflict.py test/rpc-tests/assumevalid.py test/rpc-tests/bip65-cltv-p2p.py test/rpc-tests/bip65-cltv.py test/rpc-tests/bip68-112-113-p2p.py test/rpc-tests/bip68-sequence.py test/rpc-tests/bip9-softforks.py test/rpc-tests/bipdersig-p2p.py test/rpc-tests/bipdersig.py test/rpc-tests/blockchain.py test/rpc-tests/bumpfee.py test/rpc-tests/create_cache.py test/rpc-tests/decodescript.py test/rpc-tests/disablewallet.py test/rpc-tests/forknotify.py test/rpc-tests/fundrawtransaction.py test/rpc-tests/getblocktemplate_longpoll.py test/rpc-tests/getblocktemplate_proposals.py test/rpc-tests/getchaintips.py test/rpc-tests/httpbasics.py test/rpc-tests/import-rescan.py test/rpc-tests/importmulti.py test/rpc-tests/importprunedfunds.py test/rpc-tests/invalidateblock.py test/rpc-tests/invalidblockrequest.py test/rpc-tests/invalidtxrequest.py test/rpc-tests/keypool.py test/rpc-tests/listsinceblock.py test/rpc-tests/listtransactions.py test/rpc-tests/maxblocksinflight.py test/rpc-tests/maxuploadtarget.py test/rpc-tests/mempool_limit.py test/rpc-tests/mempool_packages.py test/rpc-tests/mempool_reorg.py test/rpc-tests/mempool_resurrect_test.py test/rpc-tests/mempool_spendcoinbase.py test/rpc-tests/merkle_blocks.py test/rpc-tests/multi_rpc.py test/rpc-tests/nodehandling.py test/rpc-tests/nulldummy.py test/rpc-tests/p2p-acceptblock.py test/rpc-tests/p2p-compactblocks.py test/rpc-tests/p2p-feefilter.py test/rpc-tests/p2p-fullblocktest.py test/rpc-tests/p2p-leaktests.py test/rpc-tests/p2p-mempool.py test/rpc-tests/p2p-segwit.py test/rpc-tests/p2p-timeouts.py test/rpc-tests/p2p-versionbits-warning.py test/rpc-tests/preciousblock.py test/rpc-tests/prioritise_transaction.py test/rpc-tests/proxy_test.py test/rpc-tests/pruning.py test/rpc-tests/rawtransactions.py test/rpc-tests/receivedby.py test/rpc-tests/reindex.py test/rpc-tests/replace-by-fee.py test/rpc-tests/rest.py test/rpc-tests/rpcbind_test.py test/rpc-tests/rpcnamedargs.py test/rpc-tests/segwit.py test/rpc-tests/sendheaders.py test/rpc-tests/signmessages.py test/rpc-tests/signrawtransactions.py test/rpc-tests/smartfees.py test/rpc-tests/test_framework/__init__.py test/rpc-tests/test_framework/address.py test/rpc-tests/test_framework/authproxy.py test/rpc-tests/test_framework/bignum.py test/rpc-tests/test_framework/blockstore.py test/rpc-tests/test_framework/blocktools.py test/rpc-tests/test_framework/comptool.py test/rpc-tests/test_framework/coverage.py test/rpc-tests/test_framework/key.py test/rpc-tests/test_framework/mininode.py test/rpc-tests/test_framework/netutil.py test/rpc-tests/test_framework/script.py test/rpc-tests/test_framework/siphash.py test/rpc-tests/test_framework/socks5.py test/rpc-tests/test_framework/test_framework.py test/rpc-tests/test_framework/util.py test/rpc-tests/txn_clone.py test/rpc-tests/txn_doublespend.py test/rpc-tests/wallet-accounts.py test/rpc-tests/wallet-dump.py test/rpc-tests/wallet-hd.py test/rpc-tests/wallet.py test/rpc-tests/walletbackup.py test/rpc-tests/zapwallettxes.py test/rpc-tests/zmq_test.py
diffstat 190 files changed, 22690 insertions(+), 22691 deletions(-) [+]
line wrap: on
line diff
--- a/.travis.yml	Wed Mar 08 15:56:30 2017 -0500
+++ b/.travis.yml	Thu Mar 09 09:44:57 2017 -0500
@@ -70,7 +70,7 @@
     - make $MAKEJOBS $GOAL || ( echo "Build failure. Verbose build follows." && make $GOAL V=1 ; false )
     - export LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib
     - if [ "$RUN_TESTS" = "true" ]; then make $MAKEJOBS check VERBOSE=1; fi
-    - if [ "$RUN_TESTS" = "true" ]; then qa/pull-tester/rpc-tests.py --coverage; fi
+    - if [ "$RUN_TESTS" = "true" ]; then functional/pull-tester/rpc-tests.py --coverage; fi
 after_script:
     - echo $TRAVIS_COMMIT_RANGE
     - echo $TRAVIS_COMMIT_LOG
--- a/Makefile.am	Wed Mar 08 15:56:30 2017 -0500
+++ b/Makefile.am	Thu Mar 09 09:44:57 2017 -0500
@@ -61,7 +61,7 @@
 
 COVERAGE_INFO = baseline_filtered_combined.info baseline.info \
   leveldb_baseline.info test_bitcoin_filtered.info total_coverage.info \
-  baseline_filtered.info rpc_test.info rpc_test_filtered.info \
+  baseline_filtered.info functional_test.info functional_test_filtered.info \
   leveldb_baseline_filtered.info test_bitcoin_coverage.info test_bitcoin.info
 
 dist-hook:
@@ -194,20 +194,20 @@
 test_bitcoin_filtered.info: test_bitcoin.info
 	$(LCOV) -r $< "/usr/include/*" -o $@
 
-rpc_test.info: test_bitcoin_filtered.info
+functional_test.info: test_bitcoin_filtered.info
 	-@TIMEOUT=15 python test/pull-tester/rpc-tests.py $(EXTENDED_RPC_TESTS)
-	$(LCOV) -c -d $(abs_builddir)/src --t rpc-tests -o $@
+	$(LCOV) -c -d $(abs_builddir)/src --t functional-tests -o $@
 	$(LCOV) -z -d $(abs_builddir)/src
 	$(LCOV) -z -d $(abs_builddir)/src/leveldb
 
-rpc_test_filtered.info: rpc_test.info
+functional_test_filtered.info: functional_test.info
 	$(LCOV) -r $< "/usr/include/*" -o $@
 
 test_bitcoin_coverage.info: baseline_filtered_combined.info test_bitcoin_filtered.info
 	$(LCOV) -a baseline_filtered.info -a leveldb_baseline_filtered.info -a test_bitcoin_filtered.info -o $@
 
-total_coverage.info: baseline_filtered_combined.info test_bitcoin_filtered.info rpc_test_filtered.info
-	$(LCOV) -a baseline_filtered.info -a leveldb_baseline_filtered.info -a test_bitcoin_filtered.info -a rpc_test_filtered.info -o $@ | $(GREP) "\%" | $(AWK) '{ print substr($$3,2,50) "/" $$5 }' > coverage_percent.txt
+total_coverage.info: baseline_filtered_combined.info test_bitcoin_filtered.info functional_test_filtered.info
+	$(LCOV) -a baseline_filtered.info -a leveldb_baseline_filtered.info -a test_bitcoin_filtered.info -a functional_test_filtered.info -o $@ | $(GREP) "\%" | $(AWK) '{ print substr($$3,2,50) "/" $$5 }' > coverage_percent.txt
 
 test_bitcoin.coverage/.dirstamp:  test_bitcoin_coverage.info
 	$(GENHTML) -s $< -o $(@D)
@@ -223,7 +223,7 @@
 
 dist_noinst_SCRIPTS = autogen.sh
 
-EXTRA_DIST = $(top_srcdir)/share/genbuild.sh test/pull-tester/rpc-tests.py test/rpc-tests $(DIST_CONTRIB) $(DIST_DOCS) $(WINDOWS_PACKAGING) $(OSX_PACKAGING) $(BIN_CHECKS)
+EXTRA_DIST = $(top_srcdir)/share/genbuild.sh test/pull-tester/rpc-tests.py test/functional $(DIST_CONTRIB) $(DIST_DOCS) $(WINDOWS_PACKAGING) $(OSX_PACKAGING) $(BIN_CHECKS)
 
 CLEANFILES = $(OSX_DMG) $(BITCOIN_WIN_INSTALLER)
 
--- a/README.md	Wed Mar 08 15:56:30 2017 -0500
+++ b/README.md	Thu Mar 09 09:44:57 2017 -0500
@@ -54,7 +54,7 @@
 (assuming they weren't disabled in configure) with: `make check`. Further details on running
 and extending unit tests can be found in [/src/test/README.md](/src/test/README.md).
 
-There are also [regression and integration tests](/test) of the RPC interface, written
+There are also [regression and integration tests](/test), written
 in Python, that are run automatically on the build server.
 These tests can be run (if the [test dependencies](/test) are installed) with: `test/pull-tester/rpc-tests.py`
 
--- a/configure.ac	Wed Mar 08 15:56:30 2017 -0500
+++ b/configure.ac	Thu Mar 09 09:44:57 2017 -0500
@@ -1147,8 +1147,7 @@
 AC_SUBST(ZMQ_LIBS)
 AC_SUBST(PROTOBUF_LIBS)
 AC_SUBST(QR_LIBS)
-AC_CONFIG_FILES([Makefile src/Makefile doc/man/Makefile share/setup.nsi share/qt/Info.plist src/test/buildenv.py])
-AC_CONFIG_FILES([test/pull-tester/tests_config.ini],[chmod +x test/pull-tester/tests_config.ini])
+AC_CONFIG_FILES([Makefile src/Makefile doc/man/Makefile share/setup.nsi share/qt/Info.plist src/test/buildenv.py test/pull-tester/tests_config.ini])
 AC_CONFIG_FILES([contrib/devtools/split-debug.sh],[chmod +x contrib/devtools/split-debug.sh])
 AC_CONFIG_LINKS([test/pull-tester/rpc-tests.py:test/pull-tester/rpc-tests.py])
 
--- a/contrib/devtools/copyright_header.py	Wed Mar 08 15:56:30 2017 -0500
+++ b/contrib/devtools/copyright_header.py	Thu Mar 09 09:44:57 2017 -0500
@@ -32,7 +32,7 @@
     'src/tinyformat.h',
     'src/leveldb/util/env_win.cc',
     'src/crypto/ctaes/bench.c',
-    'test/rpc-tests/test_framework/bignum.py',
+    'test/functional/test_framework/bignum.py',
     # python init:
     '*__init__.py',
 ]
--- a/doc/developer-notes.md	Wed Mar 08 15:56:30 2017 -0500
+++ b/doc/developer-notes.md	Thu Mar 09 09:44:57 2017 -0500
@@ -132,7 +132,7 @@
 are testing multi-machine code that needs to operate across the internet.
 
 If you are testing something that can run on one machine, run with the -regtest option.
-In regression test mode, blocks can be created on-demand; see test/rpc-tests/ for tests
+In regression test mode, blocks can be created on-demand; see test/functional/ for tests
 that run in -regtest mode.
 
 **DEBUG_LOCKORDER**
@@ -252,7 +252,7 @@
 
   - *Rationale*: In RPC code that conditionally uses the wallet (such as
     `validateaddress`) it is easy to forget that global pointer `pwalletMain`
-    can be NULL. See `test/rpc-tests/disablewallet.py` for functional tests
+    can be NULL. See `test/functional/disablewallet.py` for functional tests
     exercising the API with `-disablewallet`
 
 - Include `db_cxx.h` (BerkeleyDB header) only when `ENABLE_WALLET` is set
--- a/test/README.md	Wed Mar 08 15:56:30 2017 -0500
+++ b/test/README.md	Thu Mar 09 09:44:57 2017 -0500
@@ -1,5 +1,5 @@
 The [pull-tester](/test/pull-tester/) folder contains a script to call
-multiple tests from the [rpc-tests](/test/rpc-tests/) folder.
+multiple tests from the [functional](/test/functional/) folder.
 
 Every pull request to the bitcoin repository is built and run through
 the regression test suite. You can also run all or only individual
@@ -83,5 +83,5 @@
 Writing tests
 =============
 You are encouraged to write tests for new or existing features.
-Further information about the test framework and individual RPC
-tests is found in [test/rpc-tests](/test/rpc-tests).
+Further information about the test framework and individual functional
+tests is found in [test/functional](/test/functional).
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/.gitignore	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,2 @@
+*.pyc
+cache
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/README.md	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,108 @@
+Regression tests
+================
+
+### [test_framework/authproxy.py](test_framework/authproxy.py)
+Taken from the [python-bitcoinrpc repository](https://github.com/jgarzik/python-bitcoinrpc).
+
+### [test_framework/test_framework.py](test_framework/test_framework.py)
+Base class for new regression tests.
+
+### [test_framework/util.py](test_framework/util.py)
+Generally useful functions.
+
+### [test_framework/mininode.py](test_framework/mininode.py)
+Basic code to support p2p connectivity to a bitcoind.
+
+### [test_framework/comptool.py](test_framework/comptool.py)
+Framework for comparison-tool style, p2p tests.
+
+### [test_framework/script.py](test_framework/script.py)
+Utilities for manipulating transaction scripts (originally from python-bitcoinlib)
+
+### [test_framework/blockstore.py](test_framework/blockstore.py)
+Implements disk-backed block and tx storage.
+
+### [test_framework/key.py](test_framework/key.py)
+Wrapper around OpenSSL EC_Key (originally from python-bitcoinlib)
+
+### [test_framework/bignum.py](test_framework/bignum.py)
+Helpers for script.py
+
+### [test_framework/blocktools.py](test_framework/blocktools.py)
+Helper functions for creating blocks and transactions.
+
+P2P test design notes
+---------------------
+
+## Mininode
+
+* ```mininode.py``` contains all the definitions for objects that pass
+over the network (```CBlock```, ```CTransaction```, etc, along with the network-level
+wrappers for them, ```msg_block```, ```msg_tx```, etc).
+
+* P2P tests have two threads.  One thread handles all network communication
+with the bitcoind(s) being tested (using python's asyncore package); the other
+implements the test logic.
+
+* ```NodeConn``` is the class used to connect to a bitcoind.  If you implement
+a callback class that derives from ```NodeConnCB``` and pass that to the
+```NodeConn``` object, your code will receive the appropriate callbacks when
+events of interest arrive.
+
+* You can pass the same handler to multiple ```NodeConn```'s if you like, or pass
+different ones to each -- whatever makes the most sense for your test.
+
+* Call ```NetworkThread.start()``` after all ```NodeConn``` objects are created to
+start the networking thread.  (Continue with the test logic in your existing
+thread.)
+
+* RPC calls are available in p2p tests.
+
+* Can be used to write free-form tests, where specific p2p-protocol behavior
+is tested.  Examples: ```p2p-accept-block.py```, ```maxblocksinflight.py```.
+
+## Comptool
+
+* Testing framework for writing tests that compare the block/tx acceptance
+behavior of a bitcoind against 1 or more other bitcoind instances, or against
+known outcomes, or both.
+
+* Set the ```num_nodes``` variable (defined in ```ComparisonTestFramework```) to start up
+1 or more nodes.  If using 1 node, then ```--testbinary``` can be used as a command line
+option to change the bitcoind binary used by the test.  If using 2 or more nodes,
+then ```--refbinary``` can be optionally used to change the bitcoind that will be used
+on nodes 2 and up.
+
+* Implement a (generator) function called ```get_tests()``` which yields ```TestInstance```s.
+Each ```TestInstance``` consists of:
+  - a list of ```[object, outcome, hash]``` entries
+    * ```object``` is a ```CBlock```, ```CTransaction```, or
+    ```CBlockHeader```.  ```CBlock```'s and ```CTransaction```'s are tested for
+    acceptance.  ```CBlockHeader```s can be used so that the test runner can deliver
+    complete headers-chains when requested from the bitcoind, to allow writing
+    tests where blocks can be delivered out of order but still processed by
+    headers-first bitcoind's.
+    * ```outcome``` is ```True```, ```False```, or ```None```.  If ```True```
+    or ```False```, the tip is compared with the expected tip -- either the
+    block passed in, or the hash specified as the optional 3rd entry.  If
+    ```None``` is specified, then the test will compare all the bitcoind's
+    being tested to see if they all agree on what the best tip is.
+    * ```hash``` is the block hash of the tip to compare against. Optional to
+    specify; if left out then the hash of the block passed in will be used as
+    the expected tip.  This allows for specifying an expected tip while testing
+    the handling of either invalid blocks or blocks delivered out of order,
+    which complete a longer chain.
+  - ```sync_every_block```: ```True/False```.  If ```False```, then all blocks
+    are inv'ed together, and the test runner waits until the node receives the
+    last one, and tests only the last block for tip acceptance using the
+    outcome and specified tip.  If ```True```, then each block is tested in
+    sequence and synced (this is slower when processing many blocks).
+  - ```sync_every_transaction```: ```True/False```.  Analogous to
+    ```sync_every_block```, except if the outcome on the last tx is "None",
+    then the contents of the entire mempool are compared across all bitcoind
+    connections.  If ```True``` or ```False```, then only the last tx's
+    acceptance is tested against the given outcome.
+
+* For examples of tests written in this framework, see
+  ```invalidblockrequest.py``` and ```p2p-fullblocktest.py```.
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/abandonconflict.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,166 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the abandontransaction RPC.
+
+ The abandontransaction RPC marks a transaction and all its in-wallet
+ descendants as abandoned which allows their inputs to be respent. It can be
+ used to replace "stuck" or evicted transactions. It only works on transactions
+ which are not included in a block and are not currently in the mempool. It has
+ no effect on transactions which are already conflicted or abandoned.
+"""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import urllib.parse
+
+class AbandonConflictTest(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 2
+        self.setup_clean_chain = False
+
+    def setup_network(self):
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir, ["-minrelaytxfee=0.00001"]))
+        self.nodes.append(start_node(1, self.options.tmpdir))
+        connect_nodes(self.nodes[0], 1)
+
+    def run_test(self):
+        self.nodes[1].generate(100)
+        sync_blocks(self.nodes)
+        balance = self.nodes[0].getbalance()
+        txA = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
+        txB = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
+        txC = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
+        sync_mempools(self.nodes)
+        self.nodes[1].generate(1)
+
+        sync_blocks(self.nodes)
+        newbalance = self.nodes[0].getbalance()
+        assert(balance - newbalance < Decimal("0.001")) #no more than fees lost
+        balance = newbalance
+
+        url = urllib.parse.urlparse(self.nodes[1].url)
+        self.nodes[0].disconnectnode(url.hostname+":"+str(p2p_port(1)))
+
+        # Identify the 10btc outputs
+        nA = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txA, 1)["vout"]) if vout["value"] == Decimal("10"))
+        nB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txB, 1)["vout"]) if vout["value"] == Decimal("10"))
+        nC = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txC, 1)["vout"]) if vout["value"] == Decimal("10"))
+
+        inputs =[]
+        # spend 10btc outputs from txA and txB
+        inputs.append({"txid":txA, "vout":nA})
+        inputs.append({"txid":txB, "vout":nB})
+        outputs = {}
+
+        outputs[self.nodes[0].getnewaddress()] = Decimal("14.99998")
+        outputs[self.nodes[1].getnewaddress()] = Decimal("5")
+        signed = self.nodes[0].signrawtransaction(self.nodes[0].createrawtransaction(inputs, outputs))
+        txAB1 = self.nodes[0].sendrawtransaction(signed["hex"])
+
+        # Identify the 14.99998btc output
+        nAB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txAB1, 1)["vout"]) if vout["value"] == Decimal("14.99998"))
+
+        #Create a child tx spending AB1 and C
+        inputs = []
+        inputs.append({"txid":txAB1, "vout":nAB})
+        inputs.append({"txid":txC, "vout":nC})
+        outputs = {}
+        outputs[self.nodes[0].getnewaddress()] = Decimal("24.9996")
+        signed2 = self.nodes[0].signrawtransaction(self.nodes[0].createrawtransaction(inputs, outputs))
+        txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"])
+
+        # In mempool txs from self should increase balance from change
+        newbalance = self.nodes[0].getbalance()
+        assert_equal(newbalance, balance - Decimal("30") + Decimal("24.9996"))
+        balance = newbalance
+
+        # Restart the node with a higher min relay fee so the parent tx is no longer in mempool
+        # TODO: redo with eviction
+        stop_node(self.nodes[0],0)
+        self.nodes[0]=start_node(0, self.options.tmpdir, ["-minrelaytxfee=0.0001"])
+
+        # Verify txs no longer in mempool
+        assert_equal(len(self.nodes[0].getrawmempool()), 0)
+
+        # Not in mempool txs from self should only reduce balance
+        # inputs are still spent, but change not received
+        newbalance = self.nodes[0].getbalance()
+        assert_equal(newbalance, balance - Decimal("24.9996"))
+        # Unconfirmed received funds that are not in mempool, also shouldn't show
+        # up in unconfirmed balance
+        unconfbalance = self.nodes[0].getunconfirmedbalance() + self.nodes[0].getbalance()
+        assert_equal(unconfbalance, newbalance)
+        # Also shouldn't show up in listunspent
+        assert(not txABC2 in [utxo["txid"] for utxo in self.nodes[0].listunspent(0)])
+        balance = newbalance
+
+        # Abandon original transaction and verify inputs are available again
+        # including that the child tx was also abandoned
+        self.nodes[0].abandontransaction(txAB1)
+        newbalance = self.nodes[0].getbalance()
+        assert_equal(newbalance, balance + Decimal("30"))
+        balance = newbalance
+
+        # Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned
+        stop_node(self.nodes[0],0)
+        self.nodes[0]=start_node(0, self.options.tmpdir, ["-minrelaytxfee=0.00001"])
+        assert_equal(len(self.nodes[0].getrawmempool()), 0)
+        assert_equal(self.nodes[0].getbalance(), balance)
+
+        # But if its received again then it is unabandoned
+        # And since now in mempool, the change is available
+        # But its child tx remains abandoned
+        self.nodes[0].sendrawtransaction(signed["hex"])
+        newbalance = self.nodes[0].getbalance()
+        assert_equal(newbalance, balance - Decimal("20") + Decimal("14.99998"))
+        balance = newbalance
+
+        # Send child tx again so its unabandoned
+        self.nodes[0].sendrawtransaction(signed2["hex"])
+        newbalance = self.nodes[0].getbalance()
+        assert_equal(newbalance, balance - Decimal("10") - Decimal("14.99998") + Decimal("24.9996"))
+        balance = newbalance
+
+        # Remove using high relay fee again
+        stop_node(self.nodes[0],0)
+        self.nodes[0]=start_node(0, self.options.tmpdir, ["-minrelaytxfee=0.0001"])
+        assert_equal(len(self.nodes[0].getrawmempool()), 0)
+        newbalance = self.nodes[0].getbalance()
+        assert_equal(newbalance, balance - Decimal("24.9996"))
+        balance = newbalance
+
+        # Create a double spend of AB1 by spending again from only A's 10 output
+        # Mine double spend from node 1
+        inputs =[]
+        inputs.append({"txid":txA, "vout":nA})
+        outputs = {}
+        outputs[self.nodes[1].getnewaddress()] = Decimal("9.9999")
+        tx = self.nodes[0].createrawtransaction(inputs, outputs)
+        signed = self.nodes[0].signrawtransaction(tx)
+        self.nodes[1].sendrawtransaction(signed["hex"])
+        self.nodes[1].generate(1)
+
+        connect_nodes(self.nodes[0], 1)
+        sync_blocks(self.nodes)
+
+        # Verify that B and C's 10 BTC outputs are available for spending again because AB1 is now conflicted
+        newbalance = self.nodes[0].getbalance()
+        assert_equal(newbalance, balance + Decimal("20"))
+        balance = newbalance
+
+        # There is currently a minor bug around this and so this test doesn't work.  See Issue #7315
+        # Invalidate the block with the double spend and B's 10 BTC output should no longer be available
+        # Don't think C's should either
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+        newbalance = self.nodes[0].getbalance()
+        #assert_equal(newbalance, balance - Decimal("10"))
+        self.log.info("If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer")
+        self.log.info("conflicted has not resumed causing its inputs to be seen as spent.  See Issue #7315")
+        self.log.info(str(balance) + " -> " + str(newbalance) + " ?")
+
+if __name__ == '__main__':
+    AbandonConflictTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/assumevalid.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,190 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test logic for skipping signature validation on old blocks.
+
+Test logic for skipping signature validation on blocks which we've assumed
+valid (https://github.com/bitcoin/bitcoin/pull/9484)
+
+We build a chain that includes and invalid signature for one of the
+transactions:
+
+    0:        genesis block
+    1:        block 1 with coinbase transaction output.
+    2-101:    bury that block with 100 blocks so the coinbase transaction
+              output can be spent
+    102:      a block containing a transaction spending the coinbase
+              transaction output. The transaction has an invalid signature. 
+    103-2202: bury the bad block with just over two weeks' worth of blocks
+              (2100 blocks)
+
+Start three nodes:
+
+    - node0 has no -assumevalid parameter. Try to sync to block 2202. It will
+      reject block 102 and only sync as far as block 101
+    - node1 has -assumevalid set to the hash of block 102. Try to sync to
+      block 2202. node1 will sync all the way to block 2202.
+    - node2 has -assumevalid set to the hash of block 102. Try to sync to
+      block 200. node2 will reject block 102 since it's assumed valid, but it
+      isn't buried by at least two weeks' work.
+"""
+
+from test_framework.mininode import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.blocktools import create_block, create_coinbase
+from test_framework.key import CECKey
+from test_framework.script import *
+
+class BaseNode(SingleNodeConnCB):
+    def __init__(self):
+        SingleNodeConnCB.__init__(self)
+        self.last_inv = None
+        self.last_headers = None
+        self.last_block = None
+        self.last_getdata = None
+        self.block_announced = False
+        self.last_getheaders = None
+        self.disconnected = False
+        self.last_blockhash_announced = None
+
+    def on_close(self, conn):
+        self.disconnected = True
+
+    def wait_for_disconnect(self, timeout=60):
+        test_function = lambda: self.disconnected
+        assert(wait_until(test_function, timeout=timeout))
+        return
+
+    def send_header_for_blocks(self, new_blocks):
+        headers_message = msg_headers()
+        headers_message.headers = [ CBlockHeader(b) for b in new_blocks ]
+        self.send_message(headers_message)
+
+class SendHeadersTest(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 3
+
+    def setup_network(self):
+        # Start node0. We don't start the other nodes yet since
+        # we need to pre-mine a block with an invalid transaction
+        # signature so we can pass in the block hash as assumevalid.
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir))
+
+    def run_test(self):
+
+        # Connect to node0
+        node0 = BaseNode()
+        connections = []
+        connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0))
+        node0.add_connection(connections[0])
+
+        NetworkThread().start() # Start up network handling in another thread
+        node0.wait_for_verack()
+
+        # Build the blockchain
+        self.tip = int(self.nodes[0].getbestblockhash(), 16)
+        self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1
+
+        self.blocks = []
+
+        # Get a pubkey for the coinbase TXO
+        coinbase_key = CECKey()
+        coinbase_key.set_secretbytes(b"horsebattery")
+        coinbase_pubkey = coinbase_key.get_pubkey()
+
+        # Create the first block with a coinbase output to our key
+        height = 1
+        block = create_block(self.tip, create_coinbase(height, coinbase_pubkey), self.block_time)
+        self.blocks.append(block)
+        self.block_time += 1
+        block.solve()
+        # Save the coinbase for later
+        self.block1 = block
+        self.tip = block.sha256
+        height += 1
+
+        # Bury the block 100 deep so the coinbase output is spendable
+        for i in range(100):
+            block = create_block(self.tip, create_coinbase(height), self.block_time)
+            block.solve()
+            self.blocks.append(block)
+            self.tip = block.sha256
+            self.block_time += 1
+            height += 1
+
+        # Create a transaction spending the coinbase output with an invalid (null) signature
+        tx = CTransaction()
+        tx.vin.append(CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b""))
+        tx.vout.append(CTxOut(49*100000000, CScript([OP_TRUE])))
+        tx.calc_sha256()
+
+        block102 = create_block(self.tip, create_coinbase(height), self.block_time)
+        self.block_time += 1
+        block102.vtx.extend([tx])
+        block102.hashMerkleRoot = block102.calc_merkle_root()
+        block102.rehash()
+        block102.solve()
+        self.blocks.append(block102)
+        self.tip = block102.sha256
+        self.block_time += 1
+        height += 1
+
+        # Bury the assumed valid block 2100 deep
+        for i in range(2100):
+            block = create_block(self.tip, create_coinbase(height), self.block_time)
+            block.nVersion = 4
+            block.solve()
+            self.blocks.append(block)
+            self.tip = block.sha256
+            self.block_time += 1
+            height += 1
+
+        # Start node1 and node2 with assumevalid so they accept a block with a bad signature.
+        self.nodes.append(start_node(1, self.options.tmpdir,
+                                     ["-assumevalid=" + hex(block102.sha256)]))
+        node1 = BaseNode()  # connects to node1
+        connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], node1))
+        node1.add_connection(connections[1])
+        node1.wait_for_verack()
+
+        self.nodes.append(start_node(2, self.options.tmpdir,
+                                     ["-assumevalid=" + hex(block102.sha256)]))
+        node2 = BaseNode()  # connects to node2
+        connections.append(NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], node2))
+        node2.add_connection(connections[2])
+        node2.wait_for_verack()
+
+        # send header lists to all three nodes
+        node0.send_header_for_blocks(self.blocks[0:2000])
+        node0.send_header_for_blocks(self.blocks[2000:])
+        node1.send_header_for_blocks(self.blocks[0:2000])
+        node1.send_header_for_blocks(self.blocks[2000:])
+        node2.send_header_for_blocks(self.blocks[0:200])
+
+        # Send 102 blocks to node0. Block 102 will be rejected.
+        for i in range(101):
+            node0.send_message(msg_block(self.blocks[i]))
+        node0.sync_with_ping() # make sure the most recent block is synced
+        node0.send_message(msg_block(self.blocks[101]))
+        assert_equal(self.nodes[0].getblock(self.nodes[0].getbestblockhash())['height'], 101)
+
+        # Send 3102 blocks to node1. All blocks will be accepted.
+        for i in range(2202):
+            node1.send_message(msg_block(self.blocks[i]))
+        node1.sync_with_ping() # make sure the most recent block is synced
+        assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202)
+
+        # Send 102 blocks to node2. Block 102 will be rejected.
+        for i in range(101):
+            node2.send_message(msg_block(self.blocks[i]))
+        node2.sync_with_ping() # make sure the most recent block is synced
+        node2.send_message(msg_block(self.blocks[101]))
+        assert_equal(self.nodes[2].getblock(self.nodes[2].getbestblockhash())['height'], 101)
+
+if __name__ == '__main__':
+    SendHeadersTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/bip65-cltv-p2p.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,181 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test BIP65 (CHECKLOCKTIMEVERIFY).
+
+Connect to a single node.
+Mine 2 (version 3) blocks (save the coinbases for later).
+Generate 98 more version 3 blocks, verify the node accepts.
+Mine 749 version 4 blocks, verify the node accepts.
+Check that the new CLTV rules are not enforced on the 750th version 4 block.
+Check that the new CLTV rules are enforced on the 751st version 4 block.
+Mine 199 new version blocks.
+Mine 1 old-version block.
+Mine 1 new version block.
+Mine 1 old version block, see that the node rejects.
+"""
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.mininode import CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.comptool import TestInstance, TestManager
+from test_framework.script import CScript, OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP
+from io import BytesIO
+import time
+
+def cltv_invalidate(tx):
+    '''Modify the signature in vin 0 of the tx to fail CLTV
+
+    Prepends -1 CLTV DROP in the scriptSig itself.
+    '''
+    tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP] +
+                                  list(CScript(tx.vin[0].scriptSig)))
+
+
+class BIP65Test(ComparisonTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 1
+
+    def setup_network(self):
+        # Must set the blockversion for this test
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir,
+                                 extra_args=[['-whitelist=127.0.0.1', '-blockversion=3']],
+                                 binary=[self.options.testbinary])
+
+    def run_test(self):
+        test = TestManager(self, self.options.tmpdir)
+        test.add_all_connections(self.nodes)
+        NetworkThread().start() # Start up network handling in another thread
+        test.run()
+
+    def create_transaction(self, node, coinbase, to_address, amount):
+        from_txid = node.getblock(coinbase)['tx'][0]
+        inputs = [{ "txid" : from_txid, "vout" : 0}]
+        outputs = { to_address : amount }
+        rawtx = node.createrawtransaction(inputs, outputs)
+        signresult = node.signrawtransaction(rawtx)
+        tx = CTransaction()
+        f = BytesIO(hex_str_to_bytes(signresult['hex']))
+        tx.deserialize(f)
+        return tx
+
+    def get_tests(self):
+
+        self.coinbase_blocks = self.nodes[0].generate(2)
+        height = 3  # height of the next block to build
+        self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
+        self.nodeaddress = self.nodes[0].getnewaddress()
+        self.last_block_time = int(time.time())
+
+        ''' 398 more version 3 blocks '''
+        test_blocks = []
+        for i in range(398):
+            block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+            block.nVersion = 3
+            block.rehash()
+            block.solve()
+            test_blocks.append([block, True])
+            self.last_block_time += 1
+            self.tip = block.sha256
+            height += 1
+        yield TestInstance(test_blocks, sync_every_block=False)
+
+        ''' Mine 749 version 4 blocks '''
+        test_blocks = []
+        for i in range(749):
+            block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+            block.nVersion = 4
+            block.rehash()
+            block.solve()
+            test_blocks.append([block, True])
+            self.last_block_time += 1
+            self.tip = block.sha256
+            height += 1
+        yield TestInstance(test_blocks, sync_every_block=False)
+
+        '''
+        Check that the new CLTV rules are not enforced in the 750th
+        version 3 block.
+        '''
+        spendtx = self.create_transaction(self.nodes[0],
+                self.coinbase_blocks[0], self.nodeaddress, 1.0)
+        cltv_invalidate(spendtx)
+        spendtx.rehash()
+
+        block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+        block.nVersion = 4
+        block.vtx.append(spendtx)
+        block.hashMerkleRoot = block.calc_merkle_root()
+        block.rehash()
+        block.solve()
+
+        self.last_block_time += 1
+        self.tip = block.sha256
+        height += 1
+        yield TestInstance([[block, True]])
+
+        ''' Mine 199 new version blocks on last valid tip '''
+        test_blocks = []
+        for i in range(199):
+            block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+            block.nVersion = 4
+            block.rehash()
+            block.solve()
+            test_blocks.append([block, True])
+            self.last_block_time += 1
+            self.tip = block.sha256
+            height += 1
+        yield TestInstance(test_blocks, sync_every_block=False)
+
+        ''' Mine 1 old version block '''
+        block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+        block.nVersion = 3
+        block.rehash()
+        block.solve()
+        self.last_block_time += 1
+        self.tip = block.sha256
+        height += 1
+        yield TestInstance([[block, True]])
+
+        ''' Mine 1 new version block '''
+        block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+        block.nVersion = 4
+        block.rehash()
+        block.solve()
+        self.last_block_time += 1
+        self.tip = block.sha256
+        height += 1
+        yield TestInstance([[block, True]])
+
+        '''
+        Check that the new CLTV rules are enforced in the 951st version 4
+        block.
+        '''
+        spendtx = self.create_transaction(self.nodes[0],
+                self.coinbase_blocks[1], self.nodeaddress, 1.0)
+        cltv_invalidate(spendtx)
+        spendtx.rehash()
+
+        block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+        block.nVersion = 4
+        block.vtx.append(spendtx)
+        block.hashMerkleRoot = block.calc_merkle_root()
+        block.rehash()
+        block.solve()
+        self.last_block_time += 1
+        yield TestInstance([[block, False]])
+
+        ''' Mine 1 old version block, should be invalid '''
+        block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+        block.nVersion = 3
+        block.rehash()
+        block.solve()
+        self.last_block_time += 1
+        yield TestInstance([[block, False]])
+
+if __name__ == '__main__':
+    BIP65Test().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/bip65-cltv.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,85 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the CHECKLOCKTIMEVERIFY (BIP65) soft-fork logic."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class BIP65Test(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 3
+        self.setup_clean_chain = False
+
+    def setup_network(self):
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir, []))
+        self.nodes.append(start_node(1, self.options.tmpdir, ["-blockversion=3"]))
+        self.nodes.append(start_node(2, self.options.tmpdir, ["-blockversion=4"]))
+        connect_nodes(self.nodes[1], 0)
+        connect_nodes(self.nodes[2], 0)
+        self.is_network_split = False
+        self.sync_all()
+
+    def run_test(self):
+        cnt = self.nodes[0].getblockcount()
+
+        # Mine some old-version blocks
+        self.nodes[1].generate(200)
+        cnt += 100
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 100):
+            raise AssertionError("Failed to mine 100 version=3 blocks")
+
+        # Mine 750 new-version blocks
+        for i in range(15):
+            self.nodes[2].generate(50)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 850):
+            raise AssertionError("Failed to mine 750 version=4 blocks")
+
+        # TODO: check that new CHECKLOCKTIMEVERIFY rules are not enforced
+
+        # Mine 1 new-version block
+        self.nodes[2].generate(1)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 851):
+            raise AssertionError("Failed to mine a version=4 blocks")
+
+        # TODO: check that new CHECKLOCKTIMEVERIFY rules are enforced
+
+        # Mine 198 new-version blocks
+        for i in range(2):
+            self.nodes[2].generate(99)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 1049):
+            raise AssertionError("Failed to mine 198 version=4 blocks")
+
+        # Mine 1 old-version block
+        self.nodes[1].generate(1)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 1050):
+            raise AssertionError("Failed to mine a version=3 block after 949 version=4 blocks")
+
+        # Mine 1 new-version blocks
+        self.nodes[2].generate(1)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 1051):
+            raise AssertionError("Failed to mine a version=4 block")
+
+        # Mine 1 old-version blocks. This should fail
+        assert_raises_jsonrpc(-1,"CreateNewBlock: TestBlockValidity failed: bad-version(0x00000003)", self.nodes[1].generate, 1)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 1051):
+            raise AssertionError("Accepted a version=3 block after 950 version=4 blocks")
+
+        # Mine 1 new-version blocks
+        self.nodes[2].generate(1)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 1052):
+            raise AssertionError("Failed to mine a version=4 block")
+
+if __name__ == '__main__':
+    BIP65Test().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/bip68-112-113-p2p.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,539 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test activation of the first version bits soft fork.
+
+This soft fork will activate the following BIPS:
+BIP 68  - nSequence relative lock times
+BIP 112 - CHECKSEQUENCEVERIFY
+BIP 113 - MedianTimePast semantics for nLockTime
+
+regtest lock-in with 108/144 block signalling
+activation after a further 144 blocks
+
+mine 82 blocks whose coinbases will be used to generate inputs for our tests
+mine 61 blocks to transition from DEFINED to STARTED
+mine 144 blocks only 100 of which are signaling readiness in order to fail to change state this period
+mine 144 blocks with 108 signaling and verify STARTED->LOCKED_IN
+mine 140 blocks and seed block chain with the 82 inputs will use for our tests at height 572
+mine 3 blocks and verify still at LOCKED_IN and test that enforcement has not triggered
+mine 1 block and test that enforcement has triggered (which triggers ACTIVE)
+Test BIP 113 is enforced
+Mine 4 blocks so next height is 580 and test BIP 68 is enforced for time and height
+Mine 1 block so next height is 581 and test BIP 68 now passes time but not height
+Mine 1 block so next height is 582 and test BIP 68 now passes time and height
+Test that BIP 112 is enforced
+
+Various transactions will be used to test that the BIPs rules are not enforced before the soft fork activates
+And that after the soft fork activates transactions pass and fail as they should according to the rules.
+For each BIP, transactions of versions 1 and 2 will be tested.
+----------------
+BIP 113:
+bip113tx - modify the nLocktime variable
+
+BIP 68:
+bip68txs - 16 txs with nSequence relative locktime of 10 with various bits set as per the relative_locktimes below
+
+BIP 112:
+bip112txs_vary_nSequence - 16 txs with nSequence relative_locktimes of 10 evaluated against 10 OP_CSV OP_DROP
+bip112txs_vary_nSequence_9 - 16 txs with nSequence relative_locktimes of 9 evaluated against 10 OP_CSV OP_DROP
+bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
+bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
+bip112tx_special - test negative argument to OP_CSV
+"""
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.mininode import ToHex, CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.comptool import TestInstance, TestManager
+from test_framework.script import *
+from io import BytesIO
+import time
+
+base_relative_locktime = 10
+seq_disable_flag = 1<<31
+seq_random_high_bit = 1<<25
+seq_type_flag = 1<<22
+seq_random_low_bit = 1<<18
+
+# b31,b25,b22,b18 represent the 31st, 25th, 22nd and 18th bits respectively in the nSequence field
+# relative_locktimes[b31][b25][b22][b18] is a base_relative_locktime with the indicated bits set if their indices are 1
+relative_locktimes = []
+for b31 in range(2):
+    b25times = []
+    for b25 in range(2):
+        b22times = []
+        for b22 in range(2):
+            b18times = []
+            for b18 in range(2):
+                rlt = base_relative_locktime
+                if (b31):
+                    rlt = rlt | seq_disable_flag
+                if (b25):
+                    rlt = rlt | seq_random_high_bit
+                if (b22):
+                    rlt = rlt | seq_type_flag
+                if (b18):
+                    rlt = rlt | seq_random_low_bit
+                b18times.append(rlt)
+            b22times.append(b18times)
+        b25times.append(b22times)
+    relative_locktimes.append(b25times)
+
+def all_rlt_txs(txarray):
+    txs = []
+    for b31 in range(2):
+        for b25 in range(2):
+            for b22 in range(2):
+                for b18 in range(2):
+                    txs.append(txarray[b31][b25][b22][b18])
+    return txs
+
+class BIP68_112_113Test(ComparisonTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 1
+
+    def setup_network(self):
+        # Must set the blockversion for this test
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir,
+                                 extra_args=[['-whitelist=127.0.0.1', '-blockversion=4']],
+                                 binary=[self.options.testbinary])
+
+    def run_test(self):
+        test = TestManager(self, self.options.tmpdir)
+        test.add_all_connections(self.nodes)
+        NetworkThread().start() # Start up network handling in another thread
+        test.run()
+
+    def send_generic_input_tx(self, node, coinbases):
+        amount = Decimal("49.99")
+        return node.sendrawtransaction(ToHex(self.sign_transaction(node, self.create_transaction(node, node.getblock(coinbases.pop())['tx'][0], self.nodeaddress, amount))))
+
+    def create_transaction(self, node, txid, to_address, amount):
+        inputs = [{ "txid" : txid, "vout" : 0}]
+        outputs = { to_address : amount }
+        rawtx = node.createrawtransaction(inputs, outputs)
+        tx = CTransaction()
+        f = BytesIO(hex_str_to_bytes(rawtx))
+        tx.deserialize(f)
+        return tx
+
+    def sign_transaction(self, node, unsignedtx):
+        rawtx = ToHex(unsignedtx)
+        signresult = node.signrawtransaction(rawtx)
+        tx = CTransaction()
+        f = BytesIO(hex_str_to_bytes(signresult['hex']))
+        tx.deserialize(f)
+        return tx
+
+    def generate_blocks(self, number, version, test_blocks = []):
+        for i in range(number):
+            block = self.create_test_block([], version)
+            test_blocks.append([block, True])
+            self.last_block_time += 600
+            self.tip = block.sha256
+            self.tipheight += 1
+        return test_blocks
+
+    def create_test_block(self, txs, version = 536870912):
+        block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600)
+        block.nVersion = version
+        block.vtx.extend(txs)
+        block.hashMerkleRoot = block.calc_merkle_root()
+        block.rehash()
+        block.solve()
+        return block
+
+    def create_bip68txs(self, bip68inputs, txversion, locktime_delta = 0):
+        txs = []
+        assert(len(bip68inputs) >= 16)
+        i = 0
+        for b31 in range(2):
+            b25txs = []
+            for b25 in range(2):
+                b22txs = []
+                for b22 in range(2):
+                    b18txs = []
+                    for b18 in range(2):
+                        tx =  self.create_transaction(self.nodes[0], bip68inputs[i], self.nodeaddress, Decimal("49.98"))
+                        i += 1
+                        tx.nVersion = txversion
+                        tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
+                        b18txs.append(self.sign_transaction(self.nodes[0], tx))
+                    b22txs.append(b18txs)
+                b25txs.append(b22txs)
+            txs.append(b25txs)
+        return txs
+
+    def create_bip112special(self, input, txversion):
+        tx = self.create_transaction(self.nodes[0], input, self.nodeaddress, Decimal("49.98"))
+        tx.nVersion = txversion
+        signtx = self.sign_transaction(self.nodes[0], tx)
+        signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+        return signtx
+
+    def create_bip112txs(self, bip112inputs, varyOP_CSV, txversion, locktime_delta = 0):
+        txs = []
+        assert(len(bip112inputs) >= 16)
+        i = 0
+        for b31 in range(2):
+            b25txs = []
+            for b25 in range(2):
+                b22txs = []
+                for b22 in range(2):
+                    b18txs = []
+                    for b18 in range(2):
+                        tx =  self.create_transaction(self.nodes[0], bip112inputs[i], self.nodeaddress, Decimal("49.98"))
+                        i += 1
+                        if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed
+                            tx.vin[0].nSequence = base_relative_locktime + locktime_delta
+                        else: # vary nSequence instead, OP_CSV is fixed
+                            tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
+                        tx.nVersion = txversion
+                        signtx = self.sign_transaction(self.nodes[0], tx)
+                        if (varyOP_CSV):
+                            signtx.vin[0].scriptSig = CScript([relative_locktimes[b31][b25][b22][b18], OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+                        else:
+                            signtx.vin[0].scriptSig = CScript([base_relative_locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+                        b18txs.append(signtx)
+                    b22txs.append(b18txs)
+                b25txs.append(b22txs)
+            txs.append(b25txs)
+        return txs
+
+    def get_tests(self):
+        long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
+        self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
+        self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2*32 + 1) # 82 blocks generated for inputs
+        self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
+        self.tipheight = 82 # height of the next block to build
+        self.last_block_time = long_past_time
+        self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
+        self.nodeaddress = self.nodes[0].getnewaddress()
+
+        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
+        test_blocks = self.generate_blocks(61, 4)
+        yield TestInstance(test_blocks, sync_every_block=False) # 1
+        # Advanced from DEFINED to STARTED, height = 143
+        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
+
+        # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0
+        # using a variety of bits to simulate multiple parallel softforks
+        test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready)
+        test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
+        test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
+        test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
+        yield TestInstance(test_blocks, sync_every_block=False) # 2
+        # Failed to advance past STARTED, height = 287
+        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
+
+        # 108 out of 144 signal bit 0 to achieve lock-in
+        # using a variety of bits to simulate multiple parallel softforks
+        test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready)
+        test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
+        test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
+        test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
+        yield TestInstance(test_blocks, sync_every_block=False) # 3
+        # Advanced from STARTED to LOCKED_IN, height = 431
+        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
+
+        # 140 more version 4 blocks
+        test_blocks = self.generate_blocks(140, 4)
+        yield TestInstance(test_blocks, sync_every_block=False) # 4
+
+        ### Inputs at height = 572
+        # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
+        # Note we reuse inputs for v1 and v2 txs so must test these separately
+        # 16 normal inputs
+        bip68inputs = []
+        for i in range(16):
+            bip68inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+        # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
+        bip112basicinputs = []
+        for j in range(2):
+            inputs = []
+            for i in range(16):
+                inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+            bip112basicinputs.append(inputs)
+        # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
+        bip112diverseinputs = []
+        for j in range(2):
+            inputs = []
+            for i in range(16):
+                inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+            bip112diverseinputs.append(inputs)
+        # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
+        bip112specialinput = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
+        # 1 normal input
+        bip113input = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
+
+        self.nodes[0].setmocktime(self.last_block_time + 600)
+        inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
+        self.nodes[0].setmocktime(0)
+        self.tip = int("0x" + inputblockhash, 0)
+        self.tipheight += 1
+        self.last_block_time += 600
+        assert_equal(len(self.nodes[0].getblock(inputblockhash,True)["tx"]), 82+1)
+
+        # 2 more version 4 blocks
+        test_blocks = self.generate_blocks(2, 4)
+        yield TestInstance(test_blocks, sync_every_block=False) # 5
+        # Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)
+        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
+
+        # Test both version 1 and version 2 transactions for all tests
+        # BIP113 test transaction will be modified before each use to put in appropriate block time
+        bip113tx_v1 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
+        bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
+        bip113tx_v1.nVersion = 1
+        bip113tx_v2 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
+        bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
+        bip113tx_v2.nVersion = 2
+
+        # For BIP68 test all 16 relative sequence locktimes
+        bip68txs_v1 = self.create_bip68txs(bip68inputs, 1)
+        bip68txs_v2 = self.create_bip68txs(bip68inputs, 2)
+
+        # For BIP112 test:
+        # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
+        bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1)
+        bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2)
+        # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
+        bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1)
+        bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1)
+        # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
+        bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1)
+        bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2)
+        # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
+        bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1)
+        bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1)
+        # -1 OP_CSV OP_DROP input
+        bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1)
+        bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2)
+
+
+        ### TESTING ###
+        ##################################
+        ### Before Soft Forks Activate ###
+        ##################################
+        # All txs should pass
+        ### Version 1 txs ###
+        success_txs = []
+        # add BIP113 tx and -1 CSV tx
+        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+        success_txs.append(bip113signed1)
+        success_txs.append(bip112tx_special_v1)
+        # add BIP 68 txs
+        success_txs.extend(all_rlt_txs(bip68txs_v1))
+        # add BIP 112 with seq=10 txs
+        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
+        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
+        # try BIP 112 with seq=9 txs
+        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
+        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
+        yield TestInstance([[self.create_test_block(success_txs), True]]) # 6
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+        ### Version 2 txs ###
+        success_txs = []
+        # add BIP113 tx and -1 CSV tx
+        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+        success_txs.append(bip113signed2)
+        success_txs.append(bip112tx_special_v2)
+        # add BIP 68 txs
+        success_txs.extend(all_rlt_txs(bip68txs_v2))
+        # add BIP 112 with seq=10 txs
+        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
+        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
+        # try BIP 112 with seq=9 txs
+        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
+        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
+        yield TestInstance([[self.create_test_block(success_txs), True]]) # 7
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+
+        # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
+        test_blocks = self.generate_blocks(1, 4)
+        yield TestInstance(test_blocks, sync_every_block=False) # 8
+        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')
+
+
+        #################################
+        ### After Soft Forks Activate ###
+        #################################
+        ### BIP 113 ###
+        # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
+        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+        for bip113tx in [bip113signed1, bip113signed2]:
+            yield TestInstance([[self.create_test_block([bip113tx]), False]]) # 9,10
+        # BIP 113 tests should now pass if the locktime is < MTP
+        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
+        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
+        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+        for bip113tx in [bip113signed1, bip113signed2]:
+            yield TestInstance([[self.create_test_block([bip113tx]), True]]) # 11,12
+            self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+        # Next block height = 580 after 4 blocks of random version
+        test_blocks = self.generate_blocks(4, 1234)
+        yield TestInstance(test_blocks, sync_every_block=False) # 13
+
+        ### BIP 68 ###
+        ### Version 1 txs ###
+        # All still pass
+        success_txs = []
+        success_txs.extend(all_rlt_txs(bip68txs_v1))
+        yield TestInstance([[self.create_test_block(success_txs), True]]) # 14
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+        ### Version 2 txs ###
+        bip68success_txs = []
+        # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
+        for b25 in range(2):
+            for b22 in range(2):
+                for b18 in range(2):
+                    bip68success_txs.append(bip68txs_v2[1][b25][b22][b18])
+        yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 15
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+        # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
+        bip68timetxs = []
+        for b25 in range(2):
+            for b18 in range(2):
+                bip68timetxs.append(bip68txs_v2[0][b25][1][b18])
+        for tx in bip68timetxs:
+            yield TestInstance([[self.create_test_block([tx]), False]]) # 16 - 19
+        bip68heighttxs = []
+        for b25 in range(2):
+            for b18 in range(2):
+                bip68heighttxs.append(bip68txs_v2[0][b25][0][b18])
+        for tx in bip68heighttxs:
+            yield TestInstance([[self.create_test_block([tx]), False]]) # 20 - 23
+
+        # Advance one block to 581
+        test_blocks = self.generate_blocks(1, 1234)
+        yield TestInstance(test_blocks, sync_every_block=False) # 24
+
+        # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
+        bip68success_txs.extend(bip68timetxs)
+        yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 25
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+        for tx in bip68heighttxs:
+            yield TestInstance([[self.create_test_block([tx]), False]]) # 26 - 29
+
+        # Advance one block to 582
+        test_blocks = self.generate_blocks(1, 1234)
+        yield TestInstance(test_blocks, sync_every_block=False) # 30
+
+        # All BIP 68 txs should pass
+        bip68success_txs.extend(bip68heighttxs)
+        yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 31
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+
+        ### BIP 112 ###
+        ### Version 1 txs ###
+        # -1 OP_CSV tx should fail
+        yield TestInstance([[self.create_test_block([bip112tx_special_v1]), False]]) #32
+        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
+        success_txs = []
+        for b25 in range(2):
+            for b22 in range(2):
+                for b18 in range(2):
+                    success_txs.append(bip112txs_vary_OP_CSV_v1[1][b25][b22][b18])
+                    success_txs.append(bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18])
+        yield TestInstance([[self.create_test_block(success_txs), True]]) # 33
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
+        fail_txs = []
+        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
+        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
+        for b25 in range(2):
+            for b22 in range(2):
+                for b18 in range(2):
+                    fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18])
+                    fail_txs.append(bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18])
+
+        for tx in fail_txs:
+            yield TestInstance([[self.create_test_block([tx]), False]]) # 34 - 81
+
+        ### Version 2 txs ###
+        # -1 OP_CSV tx should fail
+        yield TestInstance([[self.create_test_block([bip112tx_special_v2]), False]]) #82
+
+        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
+        success_txs = []
+        for b25 in range(2):
+            for b22 in range(2):
+                for b18 in range(2):
+                    success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV
+                    success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV_9
+
+        yield TestInstance([[self.create_test_block(success_txs), True]]) # 83
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+        ## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
+        # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
+        fail_txs = []
+        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) # 16/16 of vary_nSequence_9
+        for b25 in range(2):
+            for b22 in range(2):
+                for b18 in range(2):
+                    fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22][b18]) # 16/16 of vary_OP_CSV_9
+
+        for tx in fail_txs:
+            yield TestInstance([[self.create_test_block([tx]), False]]) # 84 - 107
+
+        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
+        fail_txs = []
+        for b25 in range(2):
+            for b22 in range(2):
+                for b18 in range(2):
+                    fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22][b18]) # 8/16 of vary_nSequence
+        for tx in fail_txs:
+            yield TestInstance([[self.create_test_block([tx]), False]]) # 108-115
+
+        # If sequencelock types mismatch, tx should fail
+        fail_txs = []
+        for b25 in range(2):
+            for b18 in range(2):
+                fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1][b18]) # 12/16 of vary_nSequence
+                fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1][b18]) # 12/16 of vary_OP_CSV
+        for tx in fail_txs:
+            yield TestInstance([[self.create_test_block([tx]), False]]) # 116-123
+
+        # Remaining txs should pass, just test masking works properly
+        success_txs = []
+        for b25 in range(2):
+            for b18 in range(2):
+                success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0][b18]) # 16/16 of vary_nSequence
+                success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0][b18]) # 16/16 of vary_OP_CSV
+        yield TestInstance([[self.create_test_block(success_txs), True]]) # 124
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+        # Additional test, of checking that comparison of two time types works properly
+        time_txs = []
+        for b25 in range(2):
+            for b18 in range(2):
+                tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18]
+                tx.vin[0].nSequence = base_relative_locktime | seq_type_flag
+                signtx = self.sign_transaction(self.nodes[0], tx)
+                time_txs.append(signtx)
+        yield TestInstance([[self.create_test_block(time_txs), True]]) # 125
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+        ### Missing aspects of test
+        ##  Testing empty stack fails
+
+
+if __name__ == '__main__':
+    BIP68_112_113Test().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/bip68-sequence.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,398 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test BIP68 implementation."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.blocktools import *
+
+SEQUENCE_LOCKTIME_DISABLE_FLAG = (1<<31)
+SEQUENCE_LOCKTIME_TYPE_FLAG = (1<<22) # this means use time (0 means height)
+SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift
+SEQUENCE_LOCKTIME_MASK = 0x0000ffff
+
+# RPC error for non-BIP68 final transactions
+NOT_FINAL_ERROR = "64: non-BIP68-final"
+
+class BIP68Test(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 2
+        self.setup_clean_chain = False
+
+    def setup_network(self):
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir))
+        self.nodes.append(start_node(1, self.options.tmpdir, ["-acceptnonstdtxn=0"]))
+        self.is_network_split = False
+        self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
+        connect_nodes(self.nodes[0], 1)
+
+    def run_test(self):
+        # Generate some coins
+        self.nodes[0].generate(110)
+
+        self.log.info("Running test disable flag")
+        self.test_disable_flag()
+
+        self.log.info("Running test sequence-lock-confirmed-inputs")
+        self.test_sequence_lock_confirmed_inputs()
+
+        self.log.info("Running test sequence-lock-unconfirmed-inputs")
+        self.test_sequence_lock_unconfirmed_inputs()
+
+        self.log.info("Running test BIP68 not consensus before versionbits activation")
+        self.test_bip68_not_consensus()
+
+        self.log.info("Activating BIP68 (and 112/113)")
+        self.activateCSV()
+
+        self.log.info("Verifying nVersion=2 transactions are standard.")
+        self.log.info("Note that nVersion=2 transactions are always standard (independent of BIP68 activation status).")
+        self.test_version2_relay()
+
+        self.log.info("Passed")
+
+    # Test that BIP68 is not in effect if tx version is 1, or if
+    # the first sequence bit is set.
+    def test_disable_flag(self):
+        # Create some unconfirmed inputs
+        new_addr = self.nodes[0].getnewaddress()
+        self.nodes[0].sendtoaddress(new_addr, 2) # send 2 BTC
+
+        utxos = self.nodes[0].listunspent(0, 0)
+        assert(len(utxos) > 0)
+
+        utxo = utxos[0]
+
+        tx1 = CTransaction()
+        value = int(satoshi_round(utxo["amount"] - self.relayfee)*COIN)
+
+        # Check that the disable flag disables relative locktime.
+        # If sequence locks were used, this would require 1 block for the
+        # input to mature.
+        sequence_value = SEQUENCE_LOCKTIME_DISABLE_FLAG | 1
+        tx1.vin = [CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), nSequence=sequence_value)] 
+        tx1.vout = [CTxOut(value, CScript([b'a']))]
+
+        tx1_signed = self.nodes[0].signrawtransaction(ToHex(tx1))["hex"]
+        tx1_id = self.nodes[0].sendrawtransaction(tx1_signed)
+        tx1_id = int(tx1_id, 16)
+
+        # This transaction will enable sequence-locks, so this transaction should
+        # fail
+        tx2 = CTransaction()
+        tx2.nVersion = 2
+        sequence_value = sequence_value & 0x7fffffff
+        tx2.vin = [CTxIn(COutPoint(tx1_id, 0), nSequence=sequence_value)]
+        tx2.vout = [CTxOut(int(value-self.relayfee*COIN), CScript([b'a']))]
+        tx2.rehash()
+
+        assert_raises_jsonrpc(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx2))
+
+        # Setting the version back down to 1 should disable the sequence lock,
+        # so this should be accepted.
+        tx2.nVersion = 1
+
+        self.nodes[0].sendrawtransaction(ToHex(tx2))
+
+    # Calculate the median time past of a prior block ("confirmations" before
+    # the current tip).
+    def get_median_time_past(self, confirmations):
+        block_hash = self.nodes[0].getblockhash(self.nodes[0].getblockcount()-confirmations)
+        return self.nodes[0].getblockheader(block_hash)["mediantime"]
+
+    # Test that sequence locks are respected for transactions spending confirmed inputs.
+    def test_sequence_lock_confirmed_inputs(self):
+        # Create lots of confirmed utxos, and use them to generate lots of random
+        # transactions.
+        max_outputs = 50
+        addresses = []
+        while len(addresses) < max_outputs:
+            addresses.append(self.nodes[0].getnewaddress())
+        while len(self.nodes[0].listunspent()) < 200:
+            import random
+            random.shuffle(addresses)
+            num_outputs = random.randint(1, max_outputs)
+            outputs = {}
+            for i in range(num_outputs):
+                outputs[addresses[i]] = random.randint(1, 20)*0.01
+            self.nodes[0].sendmany("", outputs)
+            self.nodes[0].generate(1)
+
+        utxos = self.nodes[0].listunspent()
+
+        # Try creating a lot of random transactions.
+        # Each time, choose a random number of inputs, and randomly set
+        # some of those inputs to be sequence locked (and randomly choose
+        # between height/time locking). Small random chance of making the locks
+        # all pass.
+        for i in range(400):
+            # Randomly choose up to 10 inputs
+            num_inputs = random.randint(1, 10)
+            random.shuffle(utxos)
+
+            # Track whether any sequence locks used should fail
+            should_pass = True
+            
+            # Track whether this transaction was built with sequence locks
+            using_sequence_locks = False
+
+            tx = CTransaction()
+            tx.nVersion = 2
+            value = 0
+            for j in range(num_inputs):
+                sequence_value = 0xfffffffe # this disables sequence locks
+
+                # 50% chance we enable sequence locks
+                if random.randint(0,1):
+                    using_sequence_locks = True
+
+                    # 10% of the time, make the input sequence value pass
+                    input_will_pass = (random.randint(1,10) == 1)
+                    sequence_value = utxos[j]["confirmations"]
+                    if not input_will_pass:
+                        sequence_value += 1
+                        should_pass = False
+
+                    # Figure out what the median-time-past was for the confirmed input
+                    # Note that if an input has N confirmations, we're going back N blocks
+                    # from the tip so that we're looking up MTP of the block
+                    # PRIOR to the one the input appears in, as per the BIP68 spec.
+                    orig_time = self.get_median_time_past(utxos[j]["confirmations"])
+                    cur_time = self.get_median_time_past(0) # MTP of the tip
+
+                    # can only timelock this input if it's not too old -- otherwise use height
+                    can_time_lock = True
+                    if ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) >= SEQUENCE_LOCKTIME_MASK:
+                        can_time_lock = False
+
+                    # if time-lockable, then 50% chance we make this a time lock
+                    if random.randint(0,1) and can_time_lock:
+                        # Find first time-lock value that fails, or latest one that succeeds
+                        time_delta = sequence_value << SEQUENCE_LOCKTIME_GRANULARITY
+                        if input_will_pass and time_delta > cur_time - orig_time:
+                            sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)
+                        elif (not input_will_pass and time_delta <= cur_time - orig_time):
+                            sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)+1
+                        sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
+                tx.vin.append(CTxIn(COutPoint(int(utxos[j]["txid"], 16), utxos[j]["vout"]), nSequence=sequence_value))
+                value += utxos[j]["amount"]*COIN
+            # Overestimate the size of the tx - signatures should be less than 120 bytes, and leave 50 for the output
+            tx_size = len(ToHex(tx))//2 + 120*num_inputs + 50
+            tx.vout.append(CTxOut(int(value-self.relayfee*tx_size*COIN/1000), CScript([b'a'])))
+            rawtx = self.nodes[0].signrawtransaction(ToHex(tx))["hex"]
+
+            if (using_sequence_locks and not should_pass):
+                # This transaction should be rejected
+                assert_raises_jsonrpc(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, rawtx)
+            else:
+                # This raw transaction should be accepted
+                self.nodes[0].sendrawtransaction(rawtx)
+                utxos = self.nodes[0].listunspent()
+
+    # Test that sequence locks on unconfirmed inputs must have nSequence
+    # height or time of 0 to be accepted.
+    # Then test that BIP68-invalid transactions are removed from the mempool
+    # after a reorg.
+    def test_sequence_lock_unconfirmed_inputs(self):
+        # Store height so we can easily reset the chain at the end of the test
+        cur_height = self.nodes[0].getblockcount()
+
+        # Create a mempool tx.
+        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
+        tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
+        tx1.rehash()
+
+        # Anyone-can-spend mempool tx.
+        # Sequence lock of 0 should pass.
+        tx2 = CTransaction()
+        tx2.nVersion = 2
+        tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
+        tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
+        tx2_raw = self.nodes[0].signrawtransaction(ToHex(tx2))["hex"]
+        tx2 = FromHex(tx2, tx2_raw)
+        tx2.rehash()
+
+        self.nodes[0].sendrawtransaction(tx2_raw)
+
+        # Create a spend of the 0th output of orig_tx with a sequence lock
+        # of 1, and test what happens when submitting.
+        # orig_tx.vout[0] must be an anyone-can-spend output
+        def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock):
+            sequence_value = 1
+            if not use_height_lock:
+                sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
+
+            tx = CTransaction()
+            tx.nVersion = 2
+            tx.vin = [CTxIn(COutPoint(orig_tx.sha256, 0), nSequence=sequence_value)]
+            tx.vout = [CTxOut(int(orig_tx.vout[0].nValue - relayfee*COIN), CScript([b'a']))]
+            tx.rehash()
+
+            if (orig_tx.hash in node.getrawmempool()):
+                # sendrawtransaction should fail if the tx is in the mempool
+                assert_raises_jsonrpc(-26, NOT_FINAL_ERROR, node.sendrawtransaction, ToHex(tx))
+            else:
+                # sendrawtransaction should succeed if the tx is not in the mempool
+                node.sendrawtransaction(ToHex(tx))
+
+            return tx
+
+        test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True)
+        test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
+
+        # Now mine some blocks, but make sure tx2 doesn't get mined.
+        # Use prioritisetransaction to lower the effective feerate to 0
+        self.nodes[0].prioritisetransaction(tx2.hash, int(-self.relayfee*COIN))
+        cur_time = int(time.time())
+        for i in range(10):
+            self.nodes[0].setmocktime(cur_time + 600)
+            self.nodes[0].generate(1)
+            cur_time += 600
+
+        assert(tx2.hash in self.nodes[0].getrawmempool())
+
+        test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True)
+        test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
+
+        # Mine tx2, and then try again
+        self.nodes[0].prioritisetransaction(tx2.hash, int(self.relayfee*COIN))
+
+        # Advance the time on the node so that we can test timelocks
+        self.nodes[0].setmocktime(cur_time+600)
+        self.nodes[0].generate(1)
+        assert(tx2.hash not in self.nodes[0].getrawmempool())
+
+        # Now that tx2 is not in the mempool, a sequence locked spend should
+        # succeed
+        tx3 = test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
+        assert(tx3.hash in self.nodes[0].getrawmempool())
+
+        self.nodes[0].generate(1)
+        assert(tx3.hash not in self.nodes[0].getrawmempool())
+
+        # One more test, this time using height locks
+        tx4 = test_nonzero_locks(tx3, self.nodes[0], self.relayfee, use_height_lock=True)
+        assert(tx4.hash in self.nodes[0].getrawmempool())
+
+        # Now try combining confirmed and unconfirmed inputs
+        tx5 = test_nonzero_locks(tx4, self.nodes[0], self.relayfee, use_height_lock=True)
+        assert(tx5.hash not in self.nodes[0].getrawmempool())
+
+        utxos = self.nodes[0].listunspent()
+        tx5.vin.append(CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), nSequence=1))
+        tx5.vout[0].nValue += int(utxos[0]["amount"]*COIN)
+        raw_tx5 = self.nodes[0].signrawtransaction(ToHex(tx5))["hex"]
+
+        assert_raises_jsonrpc(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, raw_tx5)
+
+        # Test mempool-BIP68 consistency after reorg
+        #
+        # State of the transactions in the last blocks:
+        # ... -> [ tx2 ] ->  [ tx3 ]
+        #         tip-1        tip
+        # And currently tx4 is in the mempool.
+        #
+        # If we invalidate the tip, tx3 should get added to the mempool, causing
+        # tx4 to be removed (fails sequence-lock).
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+        assert(tx4.hash not in self.nodes[0].getrawmempool())
+        assert(tx3.hash in self.nodes[0].getrawmempool())
+
+        # Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in
+        # diagram above).
+        # This would cause tx2 to be added back to the mempool, which in turn causes
+        # tx3 to be removed.
+        tip = int(self.nodes[0].getblockhash(self.nodes[0].getblockcount()-1), 16)
+        height = self.nodes[0].getblockcount()
+        for i in range(2):
+            block = create_block(tip, create_coinbase(height), cur_time)
+            block.nVersion = 3
+            block.rehash()
+            block.solve()
+            tip = block.sha256
+            height += 1
+            self.nodes[0].submitblock(ToHex(block))
+            cur_time += 1
+
+        mempool = self.nodes[0].getrawmempool()
+        assert(tx3.hash not in mempool)
+        assert(tx2.hash in mempool)
+
+        # Reset the chain and get rid of the mocktimed-blocks
+        self.nodes[0].setmocktime(0)
+        self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height+1))
+        self.nodes[0].generate(10)
+
+    # Make sure that BIP68 isn't being used to validate blocks, prior to
+    # versionbits activation.  If more blocks are mined prior to this test
+    # being run, then it's possible the test has activated the soft fork, and
+    # this test should be moved to run earlier, or deleted.
+    def test_bip68_not_consensus(self):
+        assert(get_bip9_status(self.nodes[0], 'csv')['status'] != 'active')
+        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
+
+        tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
+        tx1.rehash()
+
+        # Make an anyone-can-spend transaction
+        tx2 = CTransaction()
+        tx2.nVersion = 1
+        tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
+        tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
+
+        # sign tx2
+        tx2_raw = self.nodes[0].signrawtransaction(ToHex(tx2))["hex"]
+        tx2 = FromHex(tx2, tx2_raw)
+        tx2.rehash()
+
+        self.nodes[0].sendrawtransaction(ToHex(tx2))
+        
+        # Now make an invalid spend of tx2 according to BIP68
+        sequence_value = 100 # 100 block relative locktime
+
+        tx3 = CTransaction()
+        tx3.nVersion = 2
+        tx3.vin = [CTxIn(COutPoint(tx2.sha256, 0), nSequence=sequence_value)]
+        tx3.vout = [CTxOut(int(tx2.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
+        tx3.rehash()
+
+        assert_raises_jsonrpc(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx3))
+
+        # make a block that violates bip68; ensure that the tip updates
+        tip = int(self.nodes[0].getbestblockhash(), 16)
+        block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1))
+        block.nVersion = 3
+        block.vtx.extend([tx1, tx2, tx3])
+        block.hashMerkleRoot = block.calc_merkle_root()
+        block.rehash()
+        block.solve()
+
+        self.nodes[0].submitblock(ToHex(block))
+        assert_equal(self.nodes[0].getbestblockhash(), block.hash)
+
+    def activateCSV(self):
+        # activation should happen at block height 432 (3 periods)
+        min_activation_height = 432
+        height = self.nodes[0].getblockcount()
+        assert(height < 432)
+        self.nodes[0].generate(432-height)
+        assert(get_bip9_status(self.nodes[0], 'csv')['status'] == 'active')
+        sync_blocks(self.nodes)
+
+    # Use self.nodes[1] to test that version 2 transactions are standard.
+    def test_version2_relay(self):
+        inputs = [ ]
+        outputs = { self.nodes[1].getnewaddress() : 1.0 }
+        rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
+        rawtxfund = self.nodes[1].fundrawtransaction(rawtx)['hex']
+        tx = FromHex(CTransaction(), rawtxfund)
+        tx.nVersion = 2
+        tx_signed = self.nodes[1].signrawtransaction(ToHex(tx))["hex"]
+        tx_id = self.nodes[1].sendrawtransaction(tx_signed)
+
+if __name__ == '__main__':
+    BIP68Test().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/bip9-softforks.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,246 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test BIP 9 soft forks.
+
+Connect to a single node.
+regtest lock-in with 108/144 block signalling
+activation after a further 144 blocks
+mine 2 block and save coinbases for later use
+mine 141 blocks to transition from DEFINED to STARTED
+mine 100 blocks signalling readiness and 44 not in order to fail to change state this period
+mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN)
+mine a further 143 blocks (LOCKED_IN)
+test that enforcement has not triggered (which triggers ACTIVE)
+test that enforcement has triggered
+"""
+
+from test_framework.blockstore import BlockStore
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.mininode import CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.comptool import TestInstance, TestManager
+from test_framework.script import CScript, OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP
+from io import BytesIO
+import time
+import itertools
+
+class BIP9SoftForksTest(ComparisonTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 1
+
+    def setup_network(self):
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir,
+                                 extra_args=[['-whitelist=127.0.0.1']],
+                                 binary=[self.options.testbinary])
+
+    def run_test(self):
+        self.test = TestManager(self, self.options.tmpdir)
+        self.test.add_all_connections(self.nodes)
+        NetworkThread().start() # Start up network handling in another thread
+        self.test.run()
+
+    def create_transaction(self, node, coinbase, to_address, amount):
+        from_txid = node.getblock(coinbase)['tx'][0]
+        inputs = [{ "txid" : from_txid, "vout" : 0}]
+        outputs = { to_address : amount }
+        rawtx = node.createrawtransaction(inputs, outputs)
+        tx = CTransaction()
+        f = BytesIO(hex_str_to_bytes(rawtx))
+        tx.deserialize(f)
+        tx.nVersion = 2
+        return tx
+
+    def sign_transaction(self, node, tx):
+        signresult = node.signrawtransaction(bytes_to_hex_str(tx.serialize()))
+        tx = CTransaction()
+        f = BytesIO(hex_str_to_bytes(signresult['hex']))
+        tx.deserialize(f)
+        return tx
+
+    def generate_blocks(self, number, version, test_blocks = []):
+        for i in range(number):
+            block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
+            block.nVersion = version
+            block.rehash()
+            block.solve()
+            test_blocks.append([block, True])
+            self.last_block_time += 1
+            self.tip = block.sha256
+            self.height += 1
+        return test_blocks
+
+    def get_bip9_status(self, key):
+        info = self.nodes[0].getblockchaininfo()
+        return info['bip9_softforks'][key]
+
+    def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature, bitno):
+        assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
+        assert_equal(self.get_bip9_status(bipName)['since'], 0)
+
+        # generate some coins for later
+        self.coinbase_blocks = self.nodes[0].generate(2)
+        self.height = 3  # height of the next block to build
+        self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
+        self.nodeaddress = self.nodes[0].getnewaddress()
+        self.last_block_time = int(time.time())
+
+        assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
+        assert_equal(self.get_bip9_status(bipName)['since'], 0)
+        tmpl = self.nodes[0].getblocktemplate({})
+        assert(bipName not in tmpl['rules'])
+        assert(bipName not in tmpl['vbavailable'])
+        assert_equal(tmpl['vbrequired'], 0)
+        assert_equal(tmpl['version'], 0x20000000)
+
+        # Test 1
+        # Advance from DEFINED to STARTED
+        test_blocks = self.generate_blocks(141, 4)
+        yield TestInstance(test_blocks, sync_every_block=False)
+
+        assert_equal(self.get_bip9_status(bipName)['status'], 'started')
+        assert_equal(self.get_bip9_status(bipName)['since'], 144)
+        tmpl = self.nodes[0].getblocktemplate({})
+        assert(bipName not in tmpl['rules'])
+        assert_equal(tmpl['vbavailable'][bipName], bitno)
+        assert_equal(tmpl['vbrequired'], 0)
+        assert(tmpl['version'] & activated_version)
+
+        # Test 2
+        # Fail to achieve LOCKED_IN 100 out of 144 signal bit 1
+        # using a variety of bits to simulate multiple parallel softforks
+        test_blocks = self.generate_blocks(50, activated_version) # 0x20000001 (signalling ready)
+        test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
+        test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
+        test_blocks = self.generate_blocks(24, 4, test_blocks) # 0x20010000 (signalling not)
+        yield TestInstance(test_blocks, sync_every_block=False)
+
+        assert_equal(self.get_bip9_status(bipName)['status'], 'started')
+        assert_equal(self.get_bip9_status(bipName)['since'], 144)
+        tmpl = self.nodes[0].getblocktemplate({})
+        assert(bipName not in tmpl['rules'])
+        assert_equal(tmpl['vbavailable'][bipName], bitno)
+        assert_equal(tmpl['vbrequired'], 0)
+        assert(tmpl['version'] & activated_version)
+
+        # Test 3
+        # 108 out of 144 signal bit 1 to achieve LOCKED_IN
+        # using a variety of bits to simulate multiple parallel softforks
+        test_blocks = self.generate_blocks(58, activated_version) # 0x20000001 (signalling ready)
+        test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
+        test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
+        test_blocks = self.generate_blocks(10, 4, test_blocks) # 0x20010000 (signalling not)
+        yield TestInstance(test_blocks, sync_every_block=False)
+
+        assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
+        assert_equal(self.get_bip9_status(bipName)['since'], 432)
+        tmpl = self.nodes[0].getblocktemplate({})
+        assert(bipName not in tmpl['rules'])
+
+        # Test 4
+        # 143 more version 536870913 blocks (waiting period-1)
+        test_blocks = self.generate_blocks(143, 4)
+        yield TestInstance(test_blocks, sync_every_block=False)
+
+        assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
+        assert_equal(self.get_bip9_status(bipName)['since'], 432)
+        tmpl = self.nodes[0].getblocktemplate({})
+        assert(bipName not in tmpl['rules'])
+
+        # Test 5
+        # Check that the new rule is enforced
+        spendtx = self.create_transaction(self.nodes[0],
+                self.coinbase_blocks[0], self.nodeaddress, 1.0)
+        invalidate(spendtx)
+        spendtx = self.sign_transaction(self.nodes[0], spendtx)
+        spendtx.rehash()
+        invalidatePostSignature(spendtx)
+        spendtx.rehash()
+        block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
+        block.nVersion = activated_version
+        block.vtx.append(spendtx)
+        block.hashMerkleRoot = block.calc_merkle_root()
+        block.rehash()
+        block.solve()
+
+        self.last_block_time += 1
+        self.tip = block.sha256
+        self.height += 1
+        yield TestInstance([[block, True]])
+
+        assert_equal(self.get_bip9_status(bipName)['status'], 'active')
+        assert_equal(self.get_bip9_status(bipName)['since'], 576)
+        tmpl = self.nodes[0].getblocktemplate({})
+        assert(bipName in tmpl['rules'])
+        assert(bipName not in tmpl['vbavailable'])
+        assert_equal(tmpl['vbrequired'], 0)
+        assert(not (tmpl['version'] & (1 << bitno)))
+
+        # Test 6
+        # Check that the new sequence lock rules are enforced
+        spendtx = self.create_transaction(self.nodes[0],
+                self.coinbase_blocks[1], self.nodeaddress, 1.0)
+        invalidate(spendtx)
+        spendtx = self.sign_transaction(self.nodes[0], spendtx)
+        spendtx.rehash()
+        invalidatePostSignature(spendtx)
+        spendtx.rehash()
+
+        block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
+        block.nVersion = 5
+        block.vtx.append(spendtx)
+        block.hashMerkleRoot = block.calc_merkle_root()
+        block.rehash()
+        block.solve()
+        self.last_block_time += 1
+        yield TestInstance([[block, False]])
+
+        # Restart all
+        self.test.block_store.close()
+        stop_nodes(self.nodes)
+        shutil.rmtree(self.options.tmpdir)
+        self.setup_chain()
+        self.setup_network()
+        self.test.block_store = BlockStore(self.options.tmpdir)
+        self.test.clear_all_connections()
+        self.test.add_all_connections(self.nodes)
+        NetworkThread().start() # Start up network handling in another thread
+
+
+    def get_tests(self):
+        for test in itertools.chain(
+                self.test_BIP('csv', 0x20000001, self.sequence_lock_invalidate, self.donothing, 0),
+                self.test_BIP('csv', 0x20000001, self.mtp_invalidate, self.donothing, 0),
+                self.test_BIP('csv', 0x20000001, self.donothing, self.csv_invalidate, 0)
+        ):
+            yield test
+
+    def donothing(self, tx):
+        return
+
+    def csv_invalidate(self, tx):
+        """Modify the signature in vin 0 of the tx to fail CSV
+        Prepends -1 CSV DROP in the scriptSig itself.
+        """
+        tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP] +
+                                      list(CScript(tx.vin[0].scriptSig)))
+
+    def sequence_lock_invalidate(self, tx):
+        """Modify the nSequence to make it fails once sequence lock rule is
+        activated (high timespan).
+        """
+        tx.vin[0].nSequence = 0x00FFFFFF
+        tx.nLockTime = 0
+
+    def mtp_invalidate(self, tx):
+        """Modify the nLockTime to make it fails once MTP rule is activated."""
+        # Disable Sequence lock, Activate nLockTime
+        tx.vin[0].nSequence = 0x90FFFFFF
+        tx.nLockTime = self.last_block_time
+
+if __name__ == '__main__':
+    BIP9SoftForksTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/bipdersig-p2p.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,188 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test BIP66 (DER SIG).
+
+Connect to a single node.
+Mine 2 (version 2) blocks (save the coinbases for later).
+Generate 98 more version 2 blocks, verify the node accepts.
+Mine 749 version 3 blocks, verify the node accepts.
+Check that the new DERSIG rules are not enforced on the 750th version 3 block.
+Check that the new DERSIG rules are enforced on the 751st version 3 block.
+Mine 199 new version blocks.
+Mine 1 old-version block.
+Mine 1 new version block.
+Mine 1 old version block, see that the node rejects.
+"""
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.mininode import CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.comptool import TestInstance, TestManager
+from test_framework.script import CScript
+from io import BytesIO
+import time
+
+# A canonical signature consists of: 
+# <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype>
+def unDERify(tx):
+    """
+    Make the signature in vin 0 of a tx non-DER-compliant,
+    by adding padding after the S-value.
+    """
+    scriptSig = CScript(tx.vin[0].scriptSig)
+    newscript = []
+    for i in scriptSig:
+        if (len(newscript) == 0):
+            newscript.append(i[0:-1] + b'\0' + i[-1:])
+        else:
+            newscript.append(i)
+    tx.vin[0].scriptSig = CScript(newscript)
+            
+class BIP66Test(ComparisonTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 1
+
+    def setup_network(self):
+        # Must set the blockversion for this test
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir,
+                                 extra_args=[['-whitelist=127.0.0.1', '-blockversion=2']],
+                                 binary=[self.options.testbinary])
+
+    def run_test(self):
+        test = TestManager(self, self.options.tmpdir)
+        test.add_all_connections(self.nodes)
+        NetworkThread().start() # Start up network handling in another thread
+        test.run()
+
+    def create_transaction(self, node, coinbase, to_address, amount):
+        from_txid = node.getblock(coinbase)['tx'][0]
+        inputs = [{ "txid" : from_txid, "vout" : 0}]
+        outputs = { to_address : amount }
+        rawtx = node.createrawtransaction(inputs, outputs)
+        signresult = node.signrawtransaction(rawtx)
+        tx = CTransaction()
+        f = BytesIO(hex_str_to_bytes(signresult['hex']))
+        tx.deserialize(f)
+        return tx
+
+    def get_tests(self):
+
+        self.coinbase_blocks = self.nodes[0].generate(2)
+        height = 3  # height of the next block to build
+        self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
+        self.nodeaddress = self.nodes[0].getnewaddress()
+        self.last_block_time = int(time.time())
+
+        ''' 298 more version 2 blocks '''
+        test_blocks = []
+        for i in range(298):
+            block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+            block.nVersion = 2
+            block.rehash()
+            block.solve()
+            test_blocks.append([block, True])
+            self.last_block_time += 1
+            self.tip = block.sha256
+            height += 1
+        yield TestInstance(test_blocks, sync_every_block=False)
+
+        ''' Mine 749 version 3 blocks '''
+        test_blocks = []
+        for i in range(749):
+            block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+            block.nVersion = 3
+            block.rehash()
+            block.solve()
+            test_blocks.append([block, True])
+            self.last_block_time += 1
+            self.tip = block.sha256
+            height += 1
+        yield TestInstance(test_blocks, sync_every_block=False)
+
+        ''' 
+        Check that the new DERSIG rules are not enforced in the 750th
+        version 3 block.
+        '''
+        spendtx = self.create_transaction(self.nodes[0],
+                self.coinbase_blocks[0], self.nodeaddress, 1.0)
+        unDERify(spendtx)
+        spendtx.rehash()
+
+        block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+        block.nVersion = 3
+        block.vtx.append(spendtx)
+        block.hashMerkleRoot = block.calc_merkle_root()
+        block.rehash()
+        block.solve()
+
+        self.last_block_time += 1
+        self.tip = block.sha256
+        height += 1
+        yield TestInstance([[block, True]])       
+
+        ''' Mine 199 new version blocks on last valid tip '''
+        test_blocks = []
+        for i in range(199):
+            block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+            block.nVersion = 3
+            block.rehash()
+            block.solve()
+            test_blocks.append([block, True])
+            self.last_block_time += 1
+            self.tip = block.sha256
+            height += 1
+        yield TestInstance(test_blocks, sync_every_block=False)
+
+        ''' Mine 1 old version block '''
+        block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+        block.nVersion = 2
+        block.rehash()
+        block.solve()
+        self.last_block_time += 1
+        self.tip = block.sha256
+        height += 1
+        yield TestInstance([[block, True]])
+
+        ''' Mine 1 new version block '''
+        block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+        block.nVersion = 3
+        block.rehash()
+        block.solve()
+        self.last_block_time += 1
+        self.tip = block.sha256
+        height += 1
+        yield TestInstance([[block, True]])
+
+        ''' 
+        Check that the new DERSIG rules are enforced in the 951st version 3
+        block.
+        '''
+        spendtx = self.create_transaction(self.nodes[0],
+                self.coinbase_blocks[1], self.nodeaddress, 1.0)
+        unDERify(spendtx)
+        spendtx.rehash()
+
+        block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+        block.nVersion = 3
+        block.vtx.append(spendtx)
+        block.hashMerkleRoot = block.calc_merkle_root()
+        block.rehash()
+        block.solve()
+        self.last_block_time += 1
+        yield TestInstance([[block, False]])
+
+        ''' Mine 1 old version block, should be invalid '''
+        block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+        block.nVersion = 2
+        block.rehash()
+        block.solve()
+        self.last_block_time += 1
+        yield TestInstance([[block, False]])
+
+if __name__ == '__main__':
+    BIP66Test().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/bipdersig.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,84 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the BIP66 changeover logic."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class BIP66Test(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 3
+        self.setup_clean_chain = False
+
+    def setup_network(self):
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir, []))
+        self.nodes.append(start_node(1, self.options.tmpdir, ["-blockversion=2"]))
+        self.nodes.append(start_node(2, self.options.tmpdir, ["-blockversion=3"]))
+        connect_nodes(self.nodes[1], 0)
+        connect_nodes(self.nodes[2], 0)
+        self.is_network_split = False
+        self.sync_all()
+
+    def run_test(self):
+        cnt = self.nodes[0].getblockcount()
+
+        # Mine some old-version blocks
+        self.nodes[1].generate(100)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 100):
+            raise AssertionError("Failed to mine 100 version=2 blocks")
+
+        # Mine 750 new-version blocks
+        for i in range(15):
+            self.nodes[2].generate(50)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 850):
+            raise AssertionError("Failed to mine 750 version=3 blocks")
+
+        # TODO: check that new DERSIG rules are not enforced
+
+        # Mine 1 new-version block
+        self.nodes[2].generate(1)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 851):
+            raise AssertionError("Failed to mine a version=3 blocks")
+
+        # TODO: check that new DERSIG rules are enforced
+
+        # Mine 198 new-version blocks
+        for i in range(2):
+            self.nodes[2].generate(99)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 1049):
+            raise AssertionError("Failed to mine 198 version=3 blocks")
+
+        # Mine 1 old-version block
+        self.nodes[1].generate(1)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 1050):
+            raise AssertionError("Failed to mine a version=2 block after 949 version=3 blocks")
+
+        # Mine 1 new-version blocks
+        self.nodes[2].generate(1)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 1051):
+            raise AssertionError("Failed to mine a version=3 block")
+
+        # Mine 1 old-version blocks. This should fail
+        assert_raises_jsonrpc(-1, "CreateNewBlock: TestBlockValidity failed: bad-version(0x00000002)", self.nodes[1].generate, 1)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 1051):
+            raise AssertionError("Accepted a version=2 block after 950 version=3 blocks")
+
+        # Mine 1 new-version blocks
+        self.nodes[2].generate(1)
+        self.sync_all()
+        if (self.nodes[0].getblockcount() != cnt + 1052):
+            raise AssertionError("Failed to mine a version=3 block")
+
+if __name__ == '__main__':
+    BIP66Test().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/blockchain.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,83 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test RPCs related to blockchainstate.
+
+Test the following RPCs:
+    - gettxoutsetinfo
+    - verifychain
+
+Tests correspond to code in rpc/blockchain.cpp.
+"""
+
+from decimal import Decimal
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+    assert_equal,
+    assert_raises_jsonrpc,
+    assert_is_hex_string,
+    assert_is_hash_string,
+    start_nodes,
+    connect_nodes_bi,
+)
+
+
+class BlockchainTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = False
+        self.num_nodes = 2
+
+    def setup_network(self, split=False):
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
+        connect_nodes_bi(self.nodes, 0, 1)
+        self.is_network_split = False
+        self.sync_all()
+
+    def run_test(self):
+        self._test_gettxoutsetinfo()
+        self._test_getblockheader()
+        self.nodes[0].verifychain(4, 0)
+
+    def _test_gettxoutsetinfo(self):
+        node = self.nodes[0]
+        res = node.gettxoutsetinfo()
+
+        assert_equal(res['total_amount'], Decimal('8725.00000000'))
+        assert_equal(res['transactions'], 200)
+        assert_equal(res['height'], 200)
+        assert_equal(res['txouts'], 200)
+        assert_equal(res['bytes_serialized'], 13924),
+        assert_equal(len(res['bestblock']), 64)
+        assert_equal(len(res['hash_serialized']), 64)
+
+    def _test_getblockheader(self):
+        node = self.nodes[0]
+
+        assert_raises_jsonrpc(-5, "Block not found", node.getblockheader, "nonsense")
+
+        besthash = node.getbestblockhash()
+        secondbesthash = node.getblockhash(199)
+        header = node.getblockheader(besthash)
+
+        assert_equal(header['hash'], besthash)
+        assert_equal(header['height'], 200)
+        assert_equal(header['confirmations'], 1)
+        assert_equal(header['previousblockhash'], secondbesthash)
+        assert_is_hex_string(header['chainwork'])
+        assert_is_hash_string(header['hash'])
+        assert_is_hash_string(header['previousblockhash'])
+        assert_is_hash_string(header['merkleroot'])
+        assert_is_hash_string(header['bits'], length=None)
+        assert isinstance(header['time'], int)
+        assert isinstance(header['mediantime'], int)
+        assert isinstance(header['nonce'], int)
+        assert isinstance(header['version'], int)
+        assert isinstance(int(header['versionHex'], 16), int)
+        assert isinstance(header['difficulty'], Decimal)
+
+if __name__ == '__main__':
+    BlockchainTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/bumpfee.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,323 @@
+#!/usr/bin/env python3
+# Copyright (c) 2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the bumpfee RPC."""
+
+from segwit import send_to_witness
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework import blocktools
+from test_framework.mininode import CTransaction
+from test_framework.util import *
+
+import io
+
+# Sequence number that is BIP 125 opt-in and BIP 68-compliant
+BIP125_SEQUENCE_NUMBER = 0xfffffffd
+
+WALLET_PASSPHRASE = "test"
+WALLET_PASSPHRASE_TIMEOUT = 3600
+
+
+class BumpFeeTest(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 2
+        self.setup_clean_chain = True
+
+    def setup_network(self, split=False):
+        extra_args = [["-prematurewitness", "-walletprematurewitness", "-walletrbf={}".format(i)]
+                      for i in range(self.num_nodes)]
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, extra_args)
+
+        # Encrypt wallet for test_locked_wallet_fails test
+        self.nodes[1].encryptwallet(WALLET_PASSPHRASE)
+        bitcoind_processes[1].wait()
+        self.nodes[1] = start_node(1, self.options.tmpdir, extra_args[1])
+        self.nodes[1].walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
+
+        connect_nodes_bi(self.nodes, 0, 1)
+        self.is_network_split = False
+        self.sync_all()
+
+    def run_test(self):
+        peer_node, rbf_node = self.nodes
+        rbf_node_address = rbf_node.getnewaddress()
+
+        # fund rbf node with 10 coins of 0.001 btc (100,000 satoshis)
+        self.log.info("Mining blocks...")
+        peer_node.generate(110)
+        self.sync_all()
+        for i in range(25):
+            peer_node.sendtoaddress(rbf_node_address, 0.001)
+        self.sync_all()
+        peer_node.generate(1)
+        self.sync_all()
+        assert_equal(rbf_node.getbalance(), Decimal("0.025"))
+
+        self.log.info("Running tests")
+        dest_address = peer_node.getnewaddress()
+        test_small_output_fails(rbf_node, dest_address)
+        test_dust_to_fee(rbf_node, dest_address)
+        test_simple_bumpfee_succeeds(rbf_node, peer_node, dest_address)
+        test_segwit_bumpfee_succeeds(rbf_node, dest_address)
+        test_nonrbf_bumpfee_fails(peer_node, dest_address)
+        test_notmine_bumpfee_fails(rbf_node, peer_node, dest_address)
+        test_bumpfee_with_descendant_fails(rbf_node, rbf_node_address, dest_address)
+        test_settxfee(rbf_node, dest_address)
+        test_rebumping(rbf_node, dest_address)
+        test_rebumping_not_replaceable(rbf_node, dest_address)
+        test_unconfirmed_not_spendable(rbf_node, rbf_node_address)
+        test_bumpfee_metadata(rbf_node, dest_address)
+        test_locked_wallet_fails(rbf_node, dest_address)
+        self.log.info("Success")
+
+
+def test_simple_bumpfee_succeeds(rbf_node, peer_node, dest_address):
+    rbfid = create_fund_sign_send(rbf_node, {dest_address: 0.00090000})
+    rbftx = rbf_node.gettransaction(rbfid)
+    sync_mempools((rbf_node, peer_node))
+    assert rbfid in rbf_node.getrawmempool() and rbfid in peer_node.getrawmempool()
+    bumped_tx = rbf_node.bumpfee(rbfid)
+    assert bumped_tx["fee"] - abs(rbftx["fee"]) > 0
+    # check that bumped_tx propogates, original tx was evicted and has a wallet conflict
+    sync_mempools((rbf_node, peer_node))
+    assert bumped_tx["txid"] in rbf_node.getrawmempool()
+    assert bumped_tx["txid"] in peer_node.getrawmempool()
+    assert rbfid not in rbf_node.getrawmempool()
+    assert rbfid not in peer_node.getrawmempool()
+    oldwtx = rbf_node.gettransaction(rbfid)
+    assert len(oldwtx["walletconflicts"]) > 0
+    # check wallet transaction replaces and replaced_by values
+    bumpedwtx = rbf_node.gettransaction(bumped_tx["txid"])
+    assert_equal(oldwtx["replaced_by_txid"], bumped_tx["txid"])
+    assert_equal(bumpedwtx["replaces_txid"], rbfid)
+
+
+def test_segwit_bumpfee_succeeds(rbf_node, dest_address):
+    # Create a transaction with segwit output, then create an RBF transaction
+    # which spends it, and make sure bumpfee can be called on it.
+
+    segwit_in = next(u for u in rbf_node.listunspent() if u["amount"] == Decimal("0.001"))
+    segwit_out = rbf_node.validateaddress(rbf_node.getnewaddress())
+    rbf_node.addwitnessaddress(segwit_out["address"])
+    segwitid = send_to_witness(
+        use_p2wsh=False,
+        node=rbf_node,
+        utxo=segwit_in,
+        pubkey=segwit_out["pubkey"],
+        encode_p2sh=False,
+        amount=Decimal("0.0009"),
+        sign=True)
+
+    rbfraw = rbf_node.createrawtransaction([{
+        'txid': segwitid,
+        'vout': 0,
+        "sequence": BIP125_SEQUENCE_NUMBER
+    }], {dest_address: Decimal("0.0005"),
+         get_change_address(rbf_node): Decimal("0.0003")})
+    rbfsigned = rbf_node.signrawtransaction(rbfraw)
+    rbfid = rbf_node.sendrawtransaction(rbfsigned["hex"])
+    assert rbfid in rbf_node.getrawmempool()
+
+    bumped_tx = rbf_node.bumpfee(rbfid)
+    assert bumped_tx["txid"] in rbf_node.getrawmempool()
+    assert rbfid not in rbf_node.getrawmempool()
+
+
+def test_nonrbf_bumpfee_fails(peer_node, dest_address):
+    # cannot replace a non RBF transaction (from node which did not enable RBF)
+    not_rbfid = create_fund_sign_send(peer_node, {dest_address: 0.00090000})
+    assert_raises_jsonrpc(-4, "not BIP 125 replaceable", peer_node.bumpfee, not_rbfid)
+
+
+def test_notmine_bumpfee_fails(rbf_node, peer_node, dest_address):
+    # cannot bump fee unless the tx has only inputs that we own.
+    # here, the rbftx has a peer_node coin and then adds a rbf_node input
+    # Note that this test depends upon the RPC code checking input ownership prior to change outputs
+    # (since it can't use fundrawtransaction, it lacks a proper change output)
+    utxos = [node.listunspent()[-1] for node in (rbf_node, peer_node)]
+    inputs = [{
+        "txid": utxo["txid"],
+        "vout": utxo["vout"],
+        "address": utxo["address"],
+        "sequence": BIP125_SEQUENCE_NUMBER
+    } for utxo in utxos]
+    output_val = sum(utxo["amount"] for utxo in utxos) - Decimal("0.001")
+    rawtx = rbf_node.createrawtransaction(inputs, {dest_address: output_val})
+    signedtx = rbf_node.signrawtransaction(rawtx)
+    signedtx = peer_node.signrawtransaction(signedtx["hex"])
+    rbfid = rbf_node.sendrawtransaction(signedtx["hex"])
+    assert_raises_jsonrpc(-4, "Transaction contains inputs that don't belong to this wallet",
+                          rbf_node.bumpfee, rbfid)
+
+
+def test_bumpfee_with_descendant_fails(rbf_node, rbf_node_address, dest_address):
+    # cannot bump fee if the transaction has a descendant
+    # parent is send-to-self, so we don't have to check which output is change when creating the child tx
+    parent_id = create_fund_sign_send(rbf_node, {rbf_node_address: 0.00050000})
+    tx = rbf_node.createrawtransaction([{"txid": parent_id, "vout": 0}], {dest_address: 0.00020000})
+    tx = rbf_node.signrawtransaction(tx)
+    txid = rbf_node.sendrawtransaction(tx["hex"])
+    assert_raises_jsonrpc(-8, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id)
+
+
+def test_small_output_fails(rbf_node, dest_address):
+    # cannot bump fee with a too-small output
+    rbfid = spend_one_input(rbf_node,
+                            Decimal("0.00100000"),
+                            {dest_address: 0.00080000,
+                             get_change_address(rbf_node): Decimal("0.00010000")})
+    rbf_node.bumpfee(rbfid, {"totalFee": 20000})
+
+    rbfid = spend_one_input(rbf_node,
+                            Decimal("0.00100000"),
+                            {dest_address: 0.00080000,
+                             get_change_address(rbf_node): Decimal("0.00010000")})
+    assert_raises_jsonrpc(-4, "Change output is too small", rbf_node.bumpfee, rbfid, {"totalFee": 20001})
+
+
+def test_dust_to_fee(rbf_node, dest_address):
+    # check that if output is reduced to dust, it will be converted to fee
+    # the bumped tx sets fee=9900, but it converts to 10,000
+    rbfid = spend_one_input(rbf_node,
+                            Decimal("0.00100000"),
+                            {dest_address: 0.00080000,
+                             get_change_address(rbf_node): Decimal("0.00010000")})
+    fulltx = rbf_node.getrawtransaction(rbfid, 1)
+    bumped_tx = rbf_node.bumpfee(rbfid, {"totalFee": 19900})
+    full_bumped_tx = rbf_node.getrawtransaction(bumped_tx["txid"], 1)
+    assert_equal(bumped_tx["fee"], Decimal("0.00020000"))
+    assert_equal(len(fulltx["vout"]), 2)
+    assert_equal(len(full_bumped_tx["vout"]), 1)  #change output is eliminated
+
+
+def test_settxfee(rbf_node, dest_address):
+    # check that bumpfee reacts correctly to the use of settxfee (paytxfee)
+    # increase feerate by 2.5x, test that fee increased at least 2x
+    rbf_node.settxfee(Decimal("0.00001000"))
+    rbfid = create_fund_sign_send(rbf_node, {dest_address: 0.00090000})
+    rbftx = rbf_node.gettransaction(rbfid)
+    rbf_node.settxfee(Decimal("0.00002500"))
+    bumped_tx = rbf_node.bumpfee(rbfid)
+    assert bumped_tx["fee"] > 2 * abs(rbftx["fee"])
+    rbf_node.settxfee(Decimal("0.00000000"))  # unset paytxfee
+
+
+def test_rebumping(rbf_node, dest_address):
+    # check that re-bumping the original tx fails, but bumping the bumper succeeds
+    rbf_node.settxfee(Decimal("0.00001000"))
+    rbfid = create_fund_sign_send(rbf_node, {dest_address: 0.00090000})
+    bumped = rbf_node.bumpfee(rbfid, {"totalFee": 1000})
+    assert_raises_jsonrpc(-4, "already bumped", rbf_node.bumpfee, rbfid, {"totalFee": 2000})
+    rbf_node.bumpfee(bumped["txid"], {"totalFee": 2000})
+
+
+def test_rebumping_not_replaceable(rbf_node, dest_address):
+    # check that re-bumping a non-replaceable bump tx fails
+    rbfid = create_fund_sign_send(rbf_node, {dest_address: 0.00090000})
+    bumped = rbf_node.bumpfee(rbfid, {"totalFee": 10000, "replaceable": False})
+    assert_raises_jsonrpc(-4, "Transaction is not BIP 125 replaceable", rbf_node.bumpfee, bumped["txid"],
+                          {"totalFee": 20000})
+
+
+def test_unconfirmed_not_spendable(rbf_node, rbf_node_address):
+    # check that unconfirmed outputs from bumped transactions are not spendable
+    rbfid = create_fund_sign_send(rbf_node, {rbf_node_address: 0.00090000})
+    rbftx = rbf_node.gettransaction(rbfid)["hex"]
+    assert rbfid in rbf_node.getrawmempool()
+    bumpid = rbf_node.bumpfee(rbfid)["txid"]
+    assert bumpid in rbf_node.getrawmempool()
+    assert rbfid not in rbf_node.getrawmempool()
+
+    # check that outputs from the bump transaction are not spendable
+    # due to the replaces_txid check in CWallet::AvailableCoins
+    assert_equal([t for t in rbf_node.listunspent(minconf=0, include_unsafe=False) if t["txid"] == bumpid], [])
+
+    # submit a block with the rbf tx to clear the bump tx out of the mempool,
+    # then call abandon to make sure the wallet doesn't attempt to resubmit the
+    # bump tx, then invalidate the block so the rbf tx will be put back in the
+    # mempool. this makes it possible to check whether the rbf tx outputs are
+    # spendable before the rbf tx is confirmed.
+    block = submit_block_with_tx(rbf_node, rbftx)
+    rbf_node.abandontransaction(bumpid)
+    rbf_node.invalidateblock(block.hash)
+    assert bumpid not in rbf_node.getrawmempool()
+    assert rbfid in rbf_node.getrawmempool()
+
+    # check that outputs from the rbf tx are not spendable before the
+    # transaction is confirmed, due to the replaced_by_txid check in
+    # CWallet::AvailableCoins
+    assert_equal([t for t in rbf_node.listunspent(minconf=0, include_unsafe=False) if t["txid"] == rbfid], [])
+
+    # check that the main output from the rbf tx is spendable after confirmed
+    rbf_node.generate(1)
+    assert_equal(
+        sum(1 for t in rbf_node.listunspent(minconf=0, include_unsafe=False)
+            if t["txid"] == rbfid and t["address"] == rbf_node_address and t["spendable"]), 1)
+
+
+def test_bumpfee_metadata(rbf_node, dest_address):
+    rbfid = rbf_node.sendtoaddress(dest_address, 0.00090000, "comment value", "to value")
+    bumped_tx = rbf_node.bumpfee(rbfid)
+    bumped_wtx = rbf_node.gettransaction(bumped_tx["txid"])
+    assert_equal(bumped_wtx["comment"], "comment value")
+    assert_equal(bumped_wtx["to"], "to value")
+
+
+def test_locked_wallet_fails(rbf_node, dest_address):
+    rbfid = create_fund_sign_send(rbf_node, {dest_address: 0.00090000})
+    rbf_node.walletlock()
+    assert_raises_jsonrpc(-13, "Please enter the wallet passphrase with walletpassphrase first.",
+                          rbf_node.bumpfee, rbfid)
+
+
+def create_fund_sign_send(node, outputs):
+    rawtx = node.createrawtransaction([], outputs)
+    fundtx = node.fundrawtransaction(rawtx)
+    signedtx = node.signrawtransaction(fundtx["hex"])
+    txid = node.sendrawtransaction(signedtx["hex"])
+    return txid
+
+
+def spend_one_input(node, input_amount, outputs):
+    input = dict(sequence=BIP125_SEQUENCE_NUMBER, **next(u for u in node.listunspent() if u["amount"] == input_amount))
+    rawtx = node.createrawtransaction([input], outputs)
+    signedtx = node.signrawtransaction(rawtx)
+    txid = node.sendrawtransaction(signedtx["hex"])
+    return txid
+
+
+def get_change_address(node):
+    """Get a wallet change address.
+
+    There is no wallet RPC to access unused change addresses, so this creates a
+    dummy transaction, calls fundrawtransaction to give add an input and change
+    output, then returns the change address."""
+    dest_address = node.getnewaddress()
+    dest_amount = Decimal("0.00012345")
+    rawtx = node.createrawtransaction([], {dest_address: dest_amount})
+    fundtx = node.fundrawtransaction(rawtx)
+    info = node.decoderawtransaction(fundtx["hex"])
+    return next(address for out in info["vout"]
+                if out["value"] != dest_amount for address in out["scriptPubKey"]["addresses"])
+
+
+def submit_block_with_tx(node, tx):
+    ctx = CTransaction()
+    ctx.deserialize(io.BytesIO(hex_str_to_bytes(tx)))
+
+    tip = node.getbestblockhash()
+    height = node.getblockcount() + 1
+    block_time = node.getblockheader(tip)["mediantime"] + 1
+    block = blocktools.create_block(int(tip, 16), blocktools.create_coinbase(height), block_time)
+    block.vtx.append(ctx)
+    block.rehash()
+    block.hashMerkleRoot = block.calc_merkle_root()
+    block.solve()
+    node.submitblock(bytes_to_hex_str(block.serialize(True)))
+    return block
+
+
+if __name__ == "__main__":
+    BumpFeeTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/create_cache.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,30 @@
+#!/usr/bin/env python3
+# Copyright (c) 2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Create a blockchain cache.
+
+Creating a cache of the blockchain speeds up test execution when running
+multiple functional tests. This helper script is executed by rpc-tests when multiple
+tests are being run in parallel.
+"""
+
+from test_framework.test_framework import BitcoinTestFramework
+
+class CreateCache(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+
+        # Test network and test nodes are not required:
+        self.num_nodes = 0
+        self.nodes = []
+
+    def setup_network(self):
+        pass
+
+    def run_test(self):
+        pass
+
+if __name__ == '__main__':
+    CreateCache().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/decodescript.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test decoding scripts via decodescript RPC command."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.mininode import *
+from io import BytesIO
+
+class DecodeScriptTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 1
+
+    def setup_network(self, split=False):
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
+        self.is_network_split = False
+
+    def decodescript_script_sig(self):
+        signature = '304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001'
+        push_signature = '48' + signature
+        public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2'
+        push_public_key = '21' + public_key
+
+        # below are test cases for all of the standard transaction types
+
+        # 1) P2PK scriptSig
+        # the scriptSig of a public key scriptPubKey simply pushes a signature onto the stack
+        rpc_result = self.nodes[0].decodescript(push_signature)
+        assert_equal(signature, rpc_result['asm'])
+
+        # 2) P2PKH scriptSig
+        rpc_result = self.nodes[0].decodescript(push_signature + push_public_key)
+        assert_equal(signature + ' ' + public_key, rpc_result['asm'])
+
+        # 3) multisig scriptSig
+        # this also tests the leading portion of a P2SH multisig scriptSig
+        # OP_0 <A sig> <B sig>
+        rpc_result = self.nodes[0].decodescript('00' + push_signature + push_signature)
+        assert_equal('0 ' + signature + ' ' + signature, rpc_result['asm'])
+
+        # 4) P2SH scriptSig
+        # an empty P2SH redeemScript is valid and makes for a very simple test case.
+        # thus, such a spending scriptSig would just need to pass the outer redeemScript
+        # hash test and leave true on the top of the stack.
+        rpc_result = self.nodes[0].decodescript('5100')
+        assert_equal('1 0', rpc_result['asm'])
+
+        # 5) null data scriptSig - no such thing because null data scripts can not be spent.
+        # thus, no test case for that standard transaction type is here.
+
+    def decodescript_script_pub_key(self):
+        public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2'
+        push_public_key = '21' + public_key
+        public_key_hash = '11695b6cd891484c2d49ec5aa738ec2b2f897777'
+        push_public_key_hash = '14' + public_key_hash
+
+        # below are test cases for all of the standard transaction types
+
+        # 1) P2PK scriptPubKey
+        # <pubkey> OP_CHECKSIG
+        rpc_result = self.nodes[0].decodescript(push_public_key + 'ac')
+        assert_equal(public_key + ' OP_CHECKSIG', rpc_result['asm'])
+
+        # 2) P2PKH scriptPubKey
+        # OP_DUP OP_HASH160 <PubKeyHash> OP_EQUALVERIFY OP_CHECKSIG
+        rpc_result = self.nodes[0].decodescript('76a9' + push_public_key_hash + '88ac')
+        assert_equal('OP_DUP OP_HASH160 ' + public_key_hash + ' OP_EQUALVERIFY OP_CHECKSIG', rpc_result['asm'])
+
+        # 3) multisig scriptPubKey
+        # <m> <A pubkey> <B pubkey> <C pubkey> <n> OP_CHECKMULTISIG
+        # just imagine that the pub keys used below are different.
+        # for our purposes here it does not matter that they are the same even though it is unrealistic.
+        rpc_result = self.nodes[0].decodescript('52' + push_public_key + push_public_key + push_public_key + '53ae')
+        assert_equal('2 ' + public_key + ' ' + public_key + ' ' + public_key +  ' 3 OP_CHECKMULTISIG', rpc_result['asm'])
+
+        # 4) P2SH scriptPubKey
+        # OP_HASH160 <Hash160(redeemScript)> OP_EQUAL.
+        # push_public_key_hash here should actually be the hash of a redeem script.
+        # but this works the same for purposes of this test.
+        rpc_result = self.nodes[0].decodescript('a9' + push_public_key_hash + '87')
+        assert_equal('OP_HASH160 ' + public_key_hash + ' OP_EQUAL', rpc_result['asm'])
+
+        # 5) null data scriptPubKey
+        # use a signature look-alike here to make sure that we do not decode random data as a signature.
+        # this matters if/when signature sighash decoding comes along.
+        # would want to make sure that no such decoding takes place in this case.
+        signature_imposter = '48304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001'
+        # OP_RETURN <data>
+        rpc_result = self.nodes[0].decodescript('6a' + signature_imposter)
+        assert_equal('OP_RETURN ' + signature_imposter[2:], rpc_result['asm'])
+
+        # 6) a CLTV redeem script. redeem scripts are in-effect scriptPubKey scripts, so adding a test here.
+        # OP_NOP2 is also known as OP_CHECKLOCKTIMEVERIFY.
+        # just imagine that the pub keys used below are different.
+        # for our purposes here it does not matter that they are the same even though it is unrealistic.
+        #
+        # OP_IF
+        #   <receiver-pubkey> OP_CHECKSIGVERIFY
+        # OP_ELSE
+        #   <lock-until> OP_CHECKLOCKTIMEVERIFY OP_DROP
+        # OP_ENDIF
+        # <sender-pubkey> OP_CHECKSIG
+        #
+        # lock until block 500,000
+        rpc_result = self.nodes[0].decodescript('63' + push_public_key + 'ad670320a107b17568' + push_public_key + 'ac')
+        assert_equal('OP_IF ' + public_key + ' OP_CHECKSIGVERIFY OP_ELSE 500000 OP_CHECKLOCKTIMEVERIFY OP_DROP OP_ENDIF ' + public_key + ' OP_CHECKSIG', rpc_result['asm'])
+
+    def decoderawtransaction_asm_sighashtype(self):
+        """Test decoding scripts via RPC command "decoderawtransaction".
+
+        This test is in with the "decodescript" tests because they are testing the same "asm" script decodes.
+        """
+
+        # this test case uses a random plain vanilla mainnet transaction with a single P2PKH input and output
+        tx = '0100000001696a20784a2c70143f634e95227dbdfdf0ecd51647052e70854512235f5986ca010000008a47304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb014104d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536ffffffff0100e1f505000000001976a914eb6c6e0cdb2d256a32d97b8df1fc75d1920d9bca88ac00000000'
+        rpc_result = self.nodes[0].decoderawtransaction(tx)
+        assert_equal('304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb[ALL] 04d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536', rpc_result['vin'][0]['scriptSig']['asm'])
+
+        # this test case uses a mainnet transaction that has a P2SH input and both P2PKH and P2SH outputs.
+        # it's from James D'Angelo's awesome introductory videos about multisig: https://www.youtube.com/watch?v=zIbUSaZBJgU and https://www.youtube.com/watch?v=OSA1pwlaypc
+        # verify that we have not altered scriptPubKey decoding.
+        tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914dc863734a218bfe83ef770ee9d41a27f824a6e5688acee2a02000000000017a9142a5edea39971049a540474c6a99edf0aa4074c588700000000'
+        rpc_result = self.nodes[0].decoderawtransaction(tx)
+        assert_equal('8e3730608c3b0bb5df54f09076e196bc292a8e39a78e73b44b6ba08c78f5cbb0', rpc_result['txid'])
+        assert_equal('0 3045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea[ALL] 3045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75[ALL] 5221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53ae', rpc_result['vin'][0]['scriptSig']['asm'])
+        assert_equal('OP_DUP OP_HASH160 dc863734a218bfe83ef770ee9d41a27f824a6e56 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
+        assert_equal('OP_HASH160 2a5edea39971049a540474c6a99edf0aa4074c58 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
+        txSave = CTransaction()
+        txSave.deserialize(BytesIO(hex_str_to_bytes(tx)))
+
+        # make sure that a specifically crafted op_return value will not pass all the IsDERSignature checks and then get decoded as a sighash type
+        tx = '01000000015ded05872fdbda629c7d3d02b194763ce3b9b1535ea884e3c8e765d42e316724020000006b48304502204c10d4064885c42638cbff3585915b322de33762598321145ba033fc796971e2022100bb153ad3baa8b757e30a2175bd32852d2e1cb9080f84d7e32fcdfd667934ef1b012103163c0ff73511ea1743fb5b98384a2ff09dd06949488028fd819f4d83f56264efffffffff0200000000000000000b6a0930060201000201000180380100000000001976a9141cabd296e753837c086da7a45a6c2fe0d49d7b7b88ac00000000'
+        rpc_result = self.nodes[0].decoderawtransaction(tx)
+        assert_equal('OP_RETURN 300602010002010001', rpc_result['vout'][0]['scriptPubKey']['asm'])
+
+        # verify that we have not altered scriptPubKey processing even of a specially crafted P2PKH pubkeyhash and P2SH redeem script hash that is made to pass the der signature checks
+        tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914301102070101010101010102060101010101010188acee2a02000000000017a91430110207010101010101010206010101010101018700000000'
+        rpc_result = self.nodes[0].decoderawtransaction(tx)
+        assert_equal('OP_DUP OP_HASH160 3011020701010101010101020601010101010101 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
+        assert_equal('OP_HASH160 3011020701010101010101020601010101010101 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
+
+        # some more full transaction tests of varying specific scriptSigs. used instead of
+        # tests in decodescript_script_sig because the decodescript RPC is specifically
+        # for working on scriptPubKeys (argh!).
+        push_signature = bytes_to_hex_str(txSave.vin[0].scriptSig)[2:(0x48*2+4)]
+        signature = push_signature[2:]
+        der_signature = signature[:-2]
+        signature_sighash_decoded = der_signature + '[ALL]'
+        signature_2 = der_signature + '82'
+        push_signature_2 = '48' + signature_2
+        signature_2_sighash_decoded = der_signature + '[NONE|ANYONECANPAY]'
+
+        # 1) P2PK scriptSig
+        txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature)
+        rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
+        assert_equal(signature_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
+
+        # make sure that the sighash decodes come out correctly for a more complex / lesser used case.
+        txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature_2)
+        rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
+        assert_equal(signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
+
+        # 2) multisig scriptSig
+        txSave.vin[0].scriptSig = hex_str_to_bytes('00' + push_signature + push_signature_2)
+        rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
+        assert_equal('0 ' + signature_sighash_decoded + ' ' + signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
+
+        # 3) test a scriptSig that contains more than push operations.
+        # in fact, it contains an OP_RETURN with data specially crafted to cause improper decode if the code does not catch it.
+        txSave.vin[0].scriptSig = hex_str_to_bytes('6a143011020701010101010101020601010101010101')
+        rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
+        assert_equal('OP_RETURN 3011020701010101010101020601010101010101', rpc_result['vin'][0]['scriptSig']['asm'])
+
+    def run_test(self):
+        self.decodescript_script_sig()
+        self.decodescript_script_pub_key()
+        self.decoderawtransaction_asm_sighashtype()
+
+if __name__ == '__main__':
+    DecodeScriptTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/disablewallet.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test a node with the -disablewallet option.
+
+- Test that validateaddress RPC works when running with -disablewallet
+- Test that it is not possible to mine to an invalid address.
+"""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+
+class DisableWalletTest (BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 1
+
+    def setup_network(self, split=False):
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [['-disablewallet']])
+        self.is_network_split = False
+        self.sync_all()
+
+    def run_test (self):
+        x = self.nodes[0].validateaddress('3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
+        assert(x['isvalid'] == False)
+        x = self.nodes[0].validateaddress('mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
+        assert(x['isvalid'] == True)
+
+        # Checking mining to an address without a wallet. Generating to a valid address should succeed
+        # but generating to an invalid address will fail.
+        self.nodes[0].generatetoaddress(1, 'mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
+        assert_raises_jsonrpc(-5, "Invalid address", self.nodes[0].generatetoaddress, 1, '3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
+
+if __name__ == '__main__':
+    DisableWalletTest ().main ()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/forknotify.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the -alertnotify option."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class ForkNotifyTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 2
+        self.setup_clean_chain = False
+
+    alert_filename = None  # Set by setup_network
+
+    def setup_network(self):
+        self.nodes = []
+        self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
+        with open(self.alert_filename, 'w', encoding='utf8'):
+            pass  # Just open then close to create zero-length file
+        self.nodes.append(start_node(0, self.options.tmpdir,
+                            ["-blockversion=2", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]))
+        # Node1 mines block.version=211 blocks
+        self.nodes.append(start_node(1, self.options.tmpdir,
+                                ["-blockversion=211"]))
+        connect_nodes(self.nodes[1], 0)
+
+        self.is_network_split = False
+        self.sync_all()
+
+    def run_test(self):
+        # Mine 51 up-version blocks
+        self.nodes[1].generate(51)
+        self.sync_all()
+        # -alertnotify should trigger on the 51'st,
+        # but mine and sync another to give
+        # -alertnotify time to write
+        self.nodes[1].generate(1)
+        self.sync_all()
+
+        with open(self.alert_filename, 'r', encoding='utf8') as f:
+            alert_text = f.read()
+
+        if len(alert_text) == 0:
+            raise AssertionError("-alertnotify did not warn of up-version blocks")
+
+        # Mine more up-version blocks, should not get more alerts:
+        self.nodes[1].generate(1)
+        self.sync_all()
+        self.nodes[1].generate(1)
+        self.sync_all()
+
+        with open(self.alert_filename, 'r', encoding='utf8') as f:
+            alert_text2 = f.read()
+
+        if alert_text != alert_text2:
+            raise AssertionError("-alertnotify excessive warning of up-version blocks")
+
+if __name__ == '__main__':
+    ForkNotifyTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/fundrawtransaction.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,733 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the fundrawtransaction RPC."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+
+def get_unspent(listunspent, amount):
+    for utx in listunspent:
+        if utx['amount'] == amount:
+            return utx
+    raise AssertionError('Could not find unspent with amount={}'.format(amount))
+
+
+class RawTransactionsTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 4
+
+    def setup_network(self, split=False):
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
+
+        connect_nodes_bi(self.nodes,0,1)
+        connect_nodes_bi(self.nodes,1,2)
+        connect_nodes_bi(self.nodes,0,2)
+        connect_nodes_bi(self.nodes,0,3)
+
+        self.is_network_split=False
+        self.sync_all()
+
+    def run_test(self):
+        min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
+        # This test is not meant to test fee estimation and we'd like
+        # to be sure all txs are sent at a consistent desired feerate
+        for node in self.nodes:
+            node.settxfee(min_relay_tx_fee)
+
+        # if the fee's positive delta is higher than this value tests will fail,
+        # neg. delta always fail the tests.
+        # The size of the signature of every input may be at most 2 bytes larger
+        # than a minimum sized signature.
+
+        #            = 2 bytes * minRelayTxFeePerByte
+        feeTolerance = 2 * min_relay_tx_fee/1000
+
+        self.nodes[2].generate(1)
+        self.sync_all()
+        self.nodes[0].generate(121)
+        self.sync_all()
+
+        watchonly_address = self.nodes[0].getnewaddress()
+        watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
+        watchonly_amount = Decimal(200)
+        self.nodes[3].importpubkey(watchonly_pubkey, "", True)
+        watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
+        self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
+
+        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
+        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
+        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
+
+        self.nodes[0].generate(1)
+        self.sync_all()
+
+        ###############
+        # simple test #
+        ###############
+        inputs  = [ ]
+        outputs = { self.nodes[0].getnewaddress() : 1.0 }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+        fee = rawtxfund['fee']
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+        assert(len(dec_tx['vin']) > 0) #test that we have enough inputs
+
+        ##############################
+        # simple test with two coins #
+        ##############################
+        inputs  = [ ]
+        outputs = { self.nodes[0].getnewaddress() : 2.2 }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+        fee = rawtxfund['fee']
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+        assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
+
+        ##############################
+        # simple test with two coins #
+        ##############################
+        inputs  = [ ]
+        outputs = { self.nodes[0].getnewaddress() : 2.6 }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+        fee = rawtxfund['fee']
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+        assert(len(dec_tx['vin']) > 0)
+        assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
+
+
+        ################################
+        # simple test with two outputs #
+        ################################
+        inputs  = [ ]
+        outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+        fee = rawtxfund['fee']
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+        totalOut = 0
+        for out in dec_tx['vout']:
+            totalOut += out['value']
+
+        assert(len(dec_tx['vin']) > 0)
+        assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
+
+
+        #########################################################################
+        # test a fundrawtransaction with a VIN greater than the required amount #
+        #########################################################################
+        utx = get_unspent(self.nodes[2].listunspent(), 5)
+
+        inputs  = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
+        outputs = { self.nodes[0].getnewaddress() : 1.0 }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+        assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
+
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+        fee = rawtxfund['fee']
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+        totalOut = 0
+        for out in dec_tx['vout']:
+            totalOut += out['value']
+
+        assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
+
+
+        #####################################################################
+        # test a fundrawtransaction with which will not get a change output #
+        #####################################################################
+        utx = get_unspent(self.nodes[2].listunspent(), 5)
+
+        inputs  = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
+        outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+        assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
+
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+        fee = rawtxfund['fee']
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+        totalOut = 0
+        for out in dec_tx['vout']:
+            totalOut += out['value']
+
+        assert_equal(rawtxfund['changepos'], -1)
+        assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
+
+
+        ####################################################
+        # test a fundrawtransaction with an invalid option #
+        ####################################################
+        utx = get_unspent(self.nodes[2].listunspent(), 5)
+
+        inputs  = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
+        outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+        assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
+
+        assert_raises_jsonrpc(-3, "Unexpected key foo", self.nodes[2].fundrawtransaction, rawtx, {'foo':'bar'})
+
+        ############################################################
+        # test a fundrawtransaction with an invalid change address #
+        ############################################################
+        utx = get_unspent(self.nodes[2].listunspent(), 5)
+
+        inputs  = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
+        outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+        assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
+
+        assert_raises_jsonrpc(-5, "changeAddress must be a valid bitcoin address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':'foobar'})
+
+        ############################################################
+        # test a fundrawtransaction with a provided change address #
+        ############################################################
+        utx = get_unspent(self.nodes[2].listunspent(), 5)
+
+        inputs  = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
+        outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+        assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
+
+        change = self.nodes[2].getnewaddress()
+        assert_raises_jsonrpc(-8, "changePosition out of bounds", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':change, 'changePosition':2})
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0})
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+        out = dec_tx['vout'][0]
+        assert_equal(change, out['scriptPubKey']['addresses'][0])
+
+
+        #########################################################################
+        # test a fundrawtransaction with a VIN smaller than the required amount #
+        #########################################################################
+        utx = get_unspent(self.nodes[2].listunspent(), 1)
+
+        inputs  = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
+        outputs = { self.nodes[0].getnewaddress() : 1.0 }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+
+        # 4-byte version + 1-byte vin count + 36-byte prevout then script_len
+        rawtx = rawtx[:82] + "0100" + rawtx[84:]
+
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+        assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
+        assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
+
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+        fee = rawtxfund['fee']
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+        totalOut = 0
+        matchingOuts = 0
+        for i, out in enumerate(dec_tx['vout']):
+            totalOut += out['value']
+            if out['scriptPubKey']['addresses'][0] in outputs:
+                matchingOuts+=1
+            else:
+                assert_equal(i, rawtxfund['changepos'])
+
+        assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
+        assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
+
+        assert_equal(matchingOuts, 1)
+        assert_equal(len(dec_tx['vout']), 2)
+
+
+        ###########################################
+        # test a fundrawtransaction with two VINs #
+        ###########################################
+        utx = get_unspent(self.nodes[2].listunspent(), 1)
+        utx2 = get_unspent(self.nodes[2].listunspent(), 5)
+
+        inputs  = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
+        outputs = { self.nodes[0].getnewaddress() : 6.0 }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+        assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
+
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+        fee = rawtxfund['fee']
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+        totalOut = 0
+        matchingOuts = 0
+        for out in dec_tx['vout']:
+            totalOut += out['value']
+            if out['scriptPubKey']['addresses'][0] in outputs:
+                matchingOuts+=1
+
+        assert_equal(matchingOuts, 1)
+        assert_equal(len(dec_tx['vout']), 2)
+
+        matchingIns = 0
+        for vinOut in dec_tx['vin']:
+            for vinIn in inputs:
+                if vinIn['txid'] == vinOut['txid']:
+                    matchingIns+=1
+
+        assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
+
+        #########################################################
+        # test a fundrawtransaction with two VINs and two vOUTs #
+        #########################################################
+        utx = get_unspent(self.nodes[2].listunspent(), 1)
+        utx2 = get_unspent(self.nodes[2].listunspent(), 5)
+
+        inputs  = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
+        outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+        assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
+
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+        fee = rawtxfund['fee']
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+        totalOut = 0
+        matchingOuts = 0
+        for out in dec_tx['vout']:
+            totalOut += out['value']
+            if out['scriptPubKey']['addresses'][0] in outputs:
+                matchingOuts+=1
+
+        assert_equal(matchingOuts, 2)
+        assert_equal(len(dec_tx['vout']), 3)
+
+        ##############################################
+        # test a fundrawtransaction with invalid vin #
+        ##############################################
+        listunspent = self.nodes[2].listunspent()
+        inputs  = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
+        outputs = { self.nodes[0].getnewaddress() : 1.0}
+        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+
+        assert_raises_jsonrpc(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx)
+
+        ############################################################
+        #compare fee of a standard pubkeyhash transaction
+        inputs = []
+        outputs = {self.nodes[1].getnewaddress():1.1}
+        rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
+        fundedTx = self.nodes[0].fundrawtransaction(rawTx)
+
+        #create same transaction over sendtoaddress
+        txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1)
+        signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
+
+        #compare fee
+        feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
+        assert(feeDelta >= 0 and feeDelta <= feeTolerance)
+        ############################################################
+
+        ############################################################
+        #compare fee of a standard pubkeyhash transaction with multiple outputs
+        inputs = []
+        outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3}
+        rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
+        fundedTx = self.nodes[0].fundrawtransaction(rawTx)
+        #create same transaction over sendtoaddress
+        txId = self.nodes[0].sendmany("", outputs)
+        signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
+
+        #compare fee
+        feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
+        assert(feeDelta >= 0 and feeDelta <= feeTolerance)
+        ############################################################
+
+
+        ############################################################
+        #compare fee of a 2of2 multisig p2sh transaction
+
+        # create 2of2 addr
+        addr1 = self.nodes[1].getnewaddress()
+        addr2 = self.nodes[1].getnewaddress()
+
+        addr1Obj = self.nodes[1].validateaddress(addr1)
+        addr2Obj = self.nodes[1].validateaddress(addr2)
+
+        mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
+
+        inputs = []
+        outputs = {mSigObj:1.1}
+        rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
+        fundedTx = self.nodes[0].fundrawtransaction(rawTx)
+
+        #create same transaction over sendtoaddress
+        txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
+        signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
+
+        #compare fee
+        feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
+        assert(feeDelta >= 0 and feeDelta <= feeTolerance)
+        ############################################################
+
+
+        ############################################################
+        #compare fee of a standard pubkeyhash transaction
+
+        # create 4of5 addr
+        addr1 = self.nodes[1].getnewaddress()
+        addr2 = self.nodes[1].getnewaddress()
+        addr3 = self.nodes[1].getnewaddress()
+        addr4 = self.nodes[1].getnewaddress()
+        addr5 = self.nodes[1].getnewaddress()
+
+        addr1Obj = self.nodes[1].validateaddress(addr1)
+        addr2Obj = self.nodes[1].validateaddress(addr2)
+        addr3Obj = self.nodes[1].validateaddress(addr3)
+        addr4Obj = self.nodes[1].validateaddress(addr4)
+        addr5Obj = self.nodes[1].validateaddress(addr5)
+
+        mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
+
+        inputs = []
+        outputs = {mSigObj:1.1}
+        rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
+        fundedTx = self.nodes[0].fundrawtransaction(rawTx)
+
+        #create same transaction over sendtoaddress
+        txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
+        signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
+
+        #compare fee
+        feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
+        assert(feeDelta >= 0 and feeDelta <= feeTolerance)
+        ############################################################
+
+
+        ############################################################
+        # spend a 2of2 multisig transaction over fundraw
+
+        # create 2of2 addr
+        addr1 = self.nodes[2].getnewaddress()
+        addr2 = self.nodes[2].getnewaddress()
+
+        addr1Obj = self.nodes[2].validateaddress(addr1)
+        addr2Obj = self.nodes[2].validateaddress(addr2)
+
+        mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
+
+
+        # send 1.2 BTC to msig addr
+        txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
+        self.sync_all()
+        self.nodes[1].generate(1)
+        self.sync_all()
+
+        oldBalance = self.nodes[1].getbalance()
+        inputs = []
+        outputs = {self.nodes[1].getnewaddress():1.1}
+        rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
+        fundedTx = self.nodes[2].fundrawtransaction(rawTx)
+
+        signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
+        txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
+        self.sync_all()
+        self.nodes[1].generate(1)
+        self.sync_all()
+
+        # make sure funds are received at node1
+        assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance())
+
+        ############################################################
+        # locked wallet test
+        self.nodes[1].encryptwallet("test")
+        self.nodes.pop(1)
+        stop_node(self.nodes[0], 0)
+        stop_node(self.nodes[1], 2)
+        stop_node(self.nodes[2], 3)
+
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
+        # This test is not meant to test fee estimation and we'd like
+        # to be sure all txs are sent at a consistent desired feerate
+        for node in self.nodes:
+            node.settxfee(min_relay_tx_fee)
+
+        connect_nodes_bi(self.nodes,0,1)
+        connect_nodes_bi(self.nodes,1,2)
+        connect_nodes_bi(self.nodes,0,2)
+        connect_nodes_bi(self.nodes,0,3)
+        self.is_network_split=False
+        self.sync_all()
+
+        # drain the keypool
+        self.nodes[1].getnewaddress()
+        inputs = []
+        outputs = {self.nodes[0].getnewaddress():1.1}
+        rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
+        # fund a transaction that requires a new key for the change output
+        # creating the key must be impossible because the wallet is locked
+        assert_raises_jsonrpc(-4, "Insufficient funds", self.nodes[1].fundrawtransaction, rawtx)
+
+        #refill the keypool
+        self.nodes[1].walletpassphrase("test", 100)
+        self.nodes[1].walletlock()
+
+        assert_raises_jsonrpc(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2)
+
+        oldBalance = self.nodes[0].getbalance()
+
+        inputs = []
+        outputs = {self.nodes[0].getnewaddress():1.1}
+        rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
+        fundedTx = self.nodes[1].fundrawtransaction(rawTx)
+
+        #now we need to unlock
+        self.nodes[1].walletpassphrase("test", 600)
+        signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
+        txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
+        self.nodes[1].generate(1)
+        self.sync_all()
+
+        # make sure funds are received at node1
+        assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
+
+
+        ###############################################
+        # multiple (~19) inputs tx test | Compare fee #
+        ###############################################
+
+        #empty node1, send some small coins from node0 to node1
+        self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
+        self.sync_all()
+        self.nodes[0].generate(1)
+        self.sync_all()
+
+        for i in range(0,20):
+            self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
+        self.nodes[0].generate(1)
+        self.sync_all()
+
+        #fund a tx with ~20 small inputs
+        inputs = []
+        outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
+        rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
+        fundedTx = self.nodes[1].fundrawtransaction(rawTx)
+
+        #create same transaction over sendtoaddress
+        txId = self.nodes[1].sendmany("", outputs)
+        signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
+
+        #compare fee
+        feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
+        assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
+
+
+        #############################################
+        # multiple (~19) inputs tx test | sign/send #
+        #############################################
+
+        #again, empty node1, send some small coins from node0 to node1
+        self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
+        self.sync_all()
+        self.nodes[0].generate(1)
+        self.sync_all()
+
+        for i in range(0,20):
+            self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
+        self.nodes[0].generate(1)
+        self.sync_all()
+
+        #fund a tx with ~20 small inputs
+        oldBalance = self.nodes[0].getbalance()
+
+        inputs = []
+        outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
+        rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
+        fundedTx = self.nodes[1].fundrawtransaction(rawTx)
+        fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
+        txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
+        self.sync_all()
+        self.nodes[0].generate(1)
+        self.sync_all()
+        assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward
+
+        #####################################################
+        # test fundrawtransaction with OP_RETURN and no vin #
+        #####################################################
+
+        rawtx   = "0100000000010000000000000000066a047465737400000000"
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtx)
+
+        assert_equal(len(dec_tx['vin']), 0)
+        assert_equal(len(dec_tx['vout']), 1)
+
+        rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+        dec_tx  = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
+
+        assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
+        assert_equal(len(dec_tx['vout']), 2) # one change output added
+
+
+        ##################################################
+        # test a fundrawtransaction using only watchonly #
+        ##################################################
+
+        inputs = []
+        outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
+        rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
+
+        result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True })
+        res_dec = self.nodes[0].decoderawtransaction(result["hex"])
+        assert_equal(len(res_dec["vin"]), 1)
+        assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
+
+        assert("fee" in result.keys())
+        assert_greater_than(result["changepos"], -1)
+
+        ###############################################################
+        # test fundrawtransaction using the entirety of watched funds #
+        ###############################################################
+
+        inputs = []
+        outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
+        rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
+
+        # Backward compatibility test (2nd param is includeWatching)
+        result = self.nodes[3].fundrawtransaction(rawtx, True)
+        res_dec = self.nodes[0].decoderawtransaction(result["hex"])
+        assert_equal(len(res_dec["vin"]), 2)
+        assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
+
+        assert_greater_than(result["fee"], 0)
+        assert_greater_than(result["changepos"], -1)
+        assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
+
+        signedtx = self.nodes[3].signrawtransaction(result["hex"])
+        assert(not signedtx["complete"])
+        signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
+        assert(signedtx["complete"])
+        self.nodes[0].sendrawtransaction(signedtx["hex"])
+        self.nodes[0].generate(1)
+        self.sync_all()
+
+        #######################
+        # Test feeRate option #
+        #######################
+
+        # Make sure there is exactly one input so coin selection can't skew the result
+        assert_equal(len(self.nodes[3].listunspent(1)), 1)
+
+        inputs = []
+        outputs = {self.nodes[3].getnewaddress() : 1}
+        rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
+        result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee)
+        result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee})
+        result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee})
+        result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex'])
+        assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate)
+        assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate)
+
+        #############################
+        # Test address reuse option #
+        #############################
+
+        result3 = self.nodes[3].fundrawtransaction(rawtx, {"reserveChangeKey": False})
+        res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
+        changeaddress = ""
+        for out in res_dec['vout']:
+            if out['value'] > 1.0:
+                changeaddress += out['scriptPubKey']['addresses'][0]
+        assert(changeaddress != "")
+        nextaddr = self.nodes[3].getnewaddress()
+        # frt should not have removed the key from the keypool
+        assert(changeaddress == nextaddr)
+
+        result3 = self.nodes[3].fundrawtransaction(rawtx)
+        res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
+        changeaddress = ""
+        for out in res_dec['vout']:
+            if out['value'] > 1.0:
+                changeaddress += out['scriptPubKey']['addresses'][0]
+        assert(changeaddress != "")
+        nextaddr = self.nodes[3].getnewaddress()
+        # Now the change address key should be removed from the keypool
+        assert(changeaddress != nextaddr)
+
+        ######################################
+        # Test subtractFeeFromOutputs option #
+        ######################################
+
+        # Make sure there is exactly one input so coin selection can't skew the result
+        assert_equal(len(self.nodes[3].listunspent(1)), 1)
+
+        inputs = []
+        outputs = {self.nodes[2].getnewaddress(): 1}
+        rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
+
+        result = [self.nodes[3].fundrawtransaction(rawtx), # uses min_relay_tx_fee (set by settxfee)
+                  self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": []}), # empty subtraction list
+                  self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0]}), # uses min_relay_tx_fee (set by settxfee)
+                  self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}),
+                  self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee, "subtractFeeFromOutputs": [0]})]
+
+        dec_tx = [self.nodes[3].decoderawtransaction(tx['hex']) for tx in result]
+        output = [d['vout'][1 - r['changepos']]['value'] for d, r in zip(dec_tx, result)]
+        change = [d['vout'][r['changepos']]['value'] for d, r in zip(dec_tx, result)]
+
+        assert_equal(result[0]['fee'], result[1]['fee'], result[2]['fee'])
+        assert_equal(result[3]['fee'], result[4]['fee'])
+        assert_equal(change[0], change[1])
+        assert_equal(output[0], output[1])
+        assert_equal(output[0], output[2] + result[2]['fee'])
+        assert_equal(change[0] + result[0]['fee'], change[2])
+        assert_equal(output[3], output[4] + result[4]['fee'])
+        assert_equal(change[3] + result[3]['fee'], change[4])
+
+        inputs = []
+        outputs = {self.nodes[2].getnewaddress(): value for value in (1.0, 1.1, 1.2, 1.3)}
+        keys = list(outputs.keys())
+        rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
+
+        result = [self.nodes[3].fundrawtransaction(rawtx),
+                  # split the fee between outputs 0, 2, and 3, but not output 1
+                  self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0, 2, 3]})]
+
+        dec_tx = [self.nodes[3].decoderawtransaction(result[0]['hex']),
+                  self.nodes[3].decoderawtransaction(result[1]['hex'])]
+
+        # Nested list of non-change output amounts for each transaction
+        output = [[out['value'] for i, out in enumerate(d['vout']) if i != r['changepos']]
+                  for d, r in zip(dec_tx, result)]
+
+        # List of differences in output amounts between normal and subtractFee transactions
+        share = [o0 - o1 for o0, o1 in zip(output[0], output[1])]
+
+        # output 1 is the same in both transactions
+        assert_equal(share[1], 0)
+
+        # the other 3 outputs are smaller as a result of subtractFeeFromOutputs
+        assert_greater_than(share[0], 0)
+        assert_greater_than(share[2], 0)
+        assert_greater_than(share[3], 0)
+
+        # outputs 2 and 3 take the same share of the fee
+        assert_equal(share[2], share[3])
+
+        # output 0 takes at least as much share of the fee, and no more than 2 satoshis more, than outputs 2 and 3
+        assert_greater_than_or_equal(share[0], share[2])
+        assert_greater_than_or_equal(share[2] + Decimal(2e-8), share[0])
+
+        # the fee is the same in both transactions
+        assert_equal(result[0]['fee'], result[1]['fee'])
+
+        # the total subtracted from the outputs is equal to the fee
+        assert_equal(share[0] + share[2] + share[3], result[0]['fee'])
+
+if __name__ == '__main__':
+    RawTransactionsTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/getblocktemplate_longpoll.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test longpolling with getblocktemplate."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+import threading
+
+class LongpollThread(threading.Thread):
+    def __init__(self, node):
+        threading.Thread.__init__(self)
+        # query current longpollid
+        templat = node.getblocktemplate()
+        self.longpollid = templat['longpollid']
+        # create a new connection to the node, we can't use the same
+        # connection from two threads
+        self.node = get_rpc_proxy(node.url, 1, timeout=600)
+
+    def run(self):
+        self.node.getblocktemplate({'longpollid':self.longpollid})
+
+class GetBlockTemplateLPTest(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 4
+        self.setup_clean_chain = False
+
+    def run_test(self):
+        self.log.info("Warning: this test will take about 70 seconds in the best case. Be patient.")
+        self.nodes[0].generate(10)
+        templat = self.nodes[0].getblocktemplate()
+        longpollid = templat['longpollid']
+        # longpollid should not change between successive invocations if nothing else happens
+        templat2 = self.nodes[0].getblocktemplate()
+        assert(templat2['longpollid'] == longpollid)
+
+        # Test 1: test that the longpolling wait if we do nothing
+        thr = LongpollThread(self.nodes[0])
+        thr.start()
+        # check that thread still lives
+        thr.join(5)  # wait 5 seconds or until thread exits
+        assert(thr.is_alive())
+
+        # Test 2: test that longpoll will terminate if another node generates a block
+        self.nodes[1].generate(1)  # generate a block on another node
+        # check that thread will exit now that new transaction entered mempool
+        thr.join(5)  # wait 5 seconds or until thread exits
+        assert(not thr.is_alive())
+
+        # Test 3: test that longpoll will terminate if we generate a block ourselves
+        thr = LongpollThread(self.nodes[0])
+        thr.start()
+        self.nodes[0].generate(1)  # generate a block on another node
+        thr.join(5)  # wait 5 seconds or until thread exits
+        assert(not thr.is_alive())
+
+        # Test 4: test that introducing a new transaction into the mempool will terminate the longpoll
+        thr = LongpollThread(self.nodes[0])
+        thr.start()
+        # generate a random transaction and submit it
+        min_relay_fee = self.nodes[0].getnetworkinfo()["relayfee"]
+        # min_relay_fee is fee per 1000 bytes, which should be more than enough.
+        (txid, txhex, fee) = random_transaction(self.nodes, Decimal("1.1"), min_relay_fee, Decimal("0.001"), 20)
+        # after one minute, every 10 seconds the mempool is probed, so in 80 seconds it should have returned
+        thr.join(60 + 20)
+        assert(not thr.is_alive())
+
+if __name__ == '__main__':
+    GetBlockTemplateLPTest().main()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/getblocktemplate_proposals.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,161 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test block proposals with getblocktemplate."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+from binascii import a2b_hex, b2a_hex
+from hashlib import sha256
+from struct import pack
+
+def b2x(b):
+    return b2a_hex(b).decode('ascii')
+
+# NOTE: This does not work for signed numbers (set the high bit) or zero (use b'\0')
+def encodeUNum(n):
+    s = bytearray(b'\1')
+    while n > 127:
+        s[0] += 1
+        s.append(n % 256)
+        n //= 256
+    s.append(n)
+    return bytes(s)
+
+def varlenEncode(n):
+    if n < 0xfd:
+        return pack('<B', n)
+    if n <= 0xffff:
+        return b'\xfd' + pack('<H', n)
+    if n <= 0xffffffff:
+        return b'\xfe' + pack('<L', n)
+    return b'\xff' + pack('<Q', n)
+
+def dblsha(b):
+    return sha256(sha256(b).digest()).digest()
+
+def genmrklroot(leaflist):
+    cur = leaflist
+    while len(cur) > 1:
+        n = []
+        if len(cur) & 1:
+            cur.append(cur[-1])
+        for i in range(0, len(cur), 2):
+            n.append(dblsha(cur[i] + cur[i+1]))
+        cur = n
+    return cur[0]
+
+def template_to_bytearray(tmpl, txlist):
+    blkver = pack('<L', tmpl['version'])
+    mrklroot = genmrklroot(list(dblsha(a) for a in txlist))
+    timestamp = pack('<L', tmpl['curtime'])
+    nonce = b'\0\0\0\0'
+    blk = blkver + a2b_hex(tmpl['previousblockhash'])[::-1] + mrklroot + timestamp + a2b_hex(tmpl['bits'])[::-1] + nonce
+    blk += varlenEncode(len(txlist))
+    for tx in txlist:
+        blk += tx
+    return bytearray(blk)
+
+def template_to_hex(tmpl, txlist):
+    return b2x(template_to_bytearray(tmpl, txlist))
+
+def assert_template(node, tmpl, txlist, expect):
+    rsp = node.getblocktemplate({'data':template_to_hex(tmpl, txlist),'mode':'proposal'})
+    if rsp != expect:
+        raise AssertionError('unexpected: %s' % (rsp,))
+
+class GetBlockTemplateProposalTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 2
+        self.setup_clean_chain = False
+
+    def setup_network(self):
+        self.nodes = self.setup_nodes()
+        connect_nodes_bi(self.nodes, 0, 1)
+
+    def run_test(self):
+        node = self.nodes[0]
+        node.generate(1) # Mine a block to leave initial block download
+        tmpl = node.getblocktemplate()
+        if 'coinbasetxn' not in tmpl:
+            rawcoinbase = encodeUNum(tmpl['height'])
+            rawcoinbase += b'\x01-'
+            hexcoinbase = b2x(rawcoinbase)
+            hexoutval = b2x(pack('<Q', tmpl['coinbasevalue']))
+            tmpl['coinbasetxn'] = {'data': '01000000' + '01' + '0000000000000000000000000000000000000000000000000000000000000000ffffffff' + ('%02x' % (len(rawcoinbase),)) + hexcoinbase + 'fffffffe' + '01' + hexoutval + '00' + '00000000'}
+        txlist = list(bytearray(a2b_hex(a['data'])) for a in (tmpl['coinbasetxn'],) + tuple(tmpl['transactions']))
+
+        # Test 0: Capability advertised
+        assert('proposal' in tmpl['capabilities'])
+
+        # NOTE: This test currently FAILS (regtest mode doesn't enforce block height in coinbase)
+        ## Test 1: Bad height in coinbase
+        #txlist[0][4+1+36+1+1] += 1
+        #assert_template(node, tmpl, txlist, 'FIXME')
+        #txlist[0][4+1+36+1+1] -= 1
+
+        # Test 2: Bad input hash for gen tx
+        txlist[0][4+1] += 1
+        assert_template(node, tmpl, txlist, 'bad-cb-missing')
+        txlist[0][4+1] -= 1
+
+        # Test 3: Truncated final tx
+        lastbyte = txlist[-1].pop()
+        assert_raises_jsonrpc(-22, "Block decode failed", assert_template, node, tmpl, txlist, 'n/a')
+        txlist[-1].append(lastbyte)
+
+        # Test 4: Add an invalid tx to the end (duplicate of gen tx)
+        txlist.append(txlist[0])
+        assert_template(node, tmpl, txlist, 'bad-txns-duplicate')
+        txlist.pop()
+
+        # Test 5: Add an invalid tx to the end (non-duplicate)
+        txlist.append(bytearray(txlist[0]))
+        txlist[-1][4+1] = 0xff
+        assert_template(node, tmpl, txlist, 'bad-txns-inputs-missingorspent')
+        txlist.pop()
+
+        # Test 6: Future tx lock time
+        txlist[0][-4:] = b'\xff\xff\xff\xff'
+        assert_template(node, tmpl, txlist, 'bad-txns-nonfinal')
+        txlist[0][-4:] = b'\0\0\0\0'
+
+        # Test 7: Bad tx count
+        txlist.append(b'')
+        assert_raises_jsonrpc(-22, 'Block decode failed', assert_template, node, tmpl, txlist, 'n/a')
+        txlist.pop()
+
+        # Test 8: Bad bits
+        realbits = tmpl['bits']
+        tmpl['bits'] = '1c0000ff'  # impossible in the real world
+        assert_template(node, tmpl, txlist, 'bad-diffbits')
+        tmpl['bits'] = realbits
+
+        # Test 9: Bad merkle root
+        rawtmpl = template_to_bytearray(tmpl, txlist)
+        rawtmpl[4+32] = (rawtmpl[4+32] + 1) % 0x100
+        rsp = node.getblocktemplate({'data':b2x(rawtmpl),'mode':'proposal'})
+        if rsp != 'bad-txnmrklroot':
+            raise AssertionError('unexpected: %s' % (rsp,))
+
+        # Test 10: Bad timestamps
+        realtime = tmpl['curtime']
+        tmpl['curtime'] = 0x7fffffff
+        assert_template(node, tmpl, txlist, 'time-too-new')
+        tmpl['curtime'] = 0
+        assert_template(node, tmpl, txlist, 'time-too-old')
+        tmpl['curtime'] = realtime
+
+        # Test 11: Valid block
+        assert_template(node, tmpl, txlist, None)
+
+        # Test 12: Orphan block
+        tmpl['previousblockhash'] = 'ff00' * 16
+        assert_template(node, tmpl, txlist, 'inconclusive-not-best-prevblk')
+
+if __name__ == '__main__':
+    GetBlockTemplateProposalTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/getchaintips.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,65 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the getchaintips RPC.
+
+- introduce a network split
+- work on chains of different lengths
+- join the network together again
+- verify that getchaintips now returns two chain tips.
+"""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+class GetChainTipsTest (BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 4
+        self.setup_clean_chain = False
+
+    def run_test (self):
+
+        tips = self.nodes[0].getchaintips ()
+        assert_equal (len (tips), 1)
+        assert_equal (tips[0]['branchlen'], 0)
+        assert_equal (tips[0]['height'], 200)
+        assert_equal (tips[0]['status'], 'active')
+
+        # Split the network and build two chains of different lengths.
+        self.split_network ()
+        self.nodes[0].generate(10)
+        self.nodes[2].generate(20)
+        self.sync_all ()
+
+        tips = self.nodes[1].getchaintips ()
+        assert_equal (len (tips), 1)
+        shortTip = tips[0]
+        assert_equal (shortTip['branchlen'], 0)
+        assert_equal (shortTip['height'], 210)
+        assert_equal (tips[0]['status'], 'active')
+
+        tips = self.nodes[3].getchaintips ()
+        assert_equal (len (tips), 1)
+        longTip = tips[0]
+        assert_equal (longTip['branchlen'], 0)
+        assert_equal (longTip['height'], 220)
+        assert_equal (tips[0]['status'], 'active')
+
+        # Join the network halves and check that we now have two tips
+        # (at least at the nodes that previously had the short chain).
+        self.join_network ()
+
+        tips = self.nodes[0].getchaintips ()
+        assert_equal (len (tips), 2)
+        assert_equal (tips[0], longTip)
+
+        assert_equal (tips[1]['branchlen'], 10)
+        assert_equal (tips[1]['status'], 'valid-fork')
+        tips[1]['branchlen'] = 0
+        tips[1]['status'] = 'active'
+        assert_equal (tips[1], shortTip)
+
+if __name__ == '__main__':
+    GetChainTipsTest ().main ()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/httpbasics.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the RPC HTTP basics."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+import http.client
+import urllib.parse
+
+class HTTPBasicsTest (BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 3
+        self.setup_clean_chain = False
+
+    def setup_network(self):
+        self.nodes = self.setup_nodes()
+
+    def run_test(self):
+
+        #################################################
+        # lowlevel check for http persistent connection #
+        #################################################
+        url = urllib.parse.urlparse(self.nodes[0].url)
+        authpair = url.username + ':' + url.password
+        headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
+
+        conn = http.client.HTTPConnection(url.hostname, url.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        out1 = conn.getresponse().read()
+        assert(b'"error":null' in out1)
+        assert(conn.sock!=None) #according to http/1.1 connection must still be open!
+
+        #send 2nd request without closing connection
+        conn.request('POST', '/', '{"method": "getchaintips"}', headers)
+        out1 = conn.getresponse().read()
+        assert(b'"error":null' in out1) #must also response with a correct json-rpc message
+        assert(conn.sock!=None) #according to http/1.1 connection must still be open!
+        conn.close()
+
+        #same should be if we add keep-alive because this should be the std. behaviour
+        headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
+
+        conn = http.client.HTTPConnection(url.hostname, url.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        out1 = conn.getresponse().read()
+        assert(b'"error":null' in out1)
+        assert(conn.sock!=None) #according to http/1.1 connection must still be open!
+
+        #send 2nd request without closing connection
+        conn.request('POST', '/', '{"method": "getchaintips"}', headers)
+        out1 = conn.getresponse().read()
+        assert(b'"error":null' in out1) #must also response with a correct json-rpc message
+        assert(conn.sock!=None) #according to http/1.1 connection must still be open!
+        conn.close()
+
+        #now do the same with "Connection: close"
+        headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
+
+        conn = http.client.HTTPConnection(url.hostname, url.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        out1 = conn.getresponse().read()
+        assert(b'"error":null' in out1)
+        assert(conn.sock==None) #now the connection must be closed after the response
+
+        #node1 (2nd node) is running with disabled keep-alive option
+        urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
+        authpair = urlNode1.username + ':' + urlNode1.password
+        headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
+
+        conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        out1 = conn.getresponse().read()
+        assert(b'"error":null' in out1)
+
+        #node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
+        urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
+        authpair = urlNode2.username + ':' + urlNode2.password
+        headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
+
+        conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        out1 = conn.getresponse().read()
+        assert(b'"error":null' in out1)
+        assert(conn.sock!=None) #connection must be closed because bitcoind should use keep-alive by default
+
+        # Check excessive request size
+        conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
+        conn.connect()
+        conn.request('GET', '/' + ('x'*1000), '', headers)
+        out1 = conn.getresponse()
+        assert_equal(out1.status, http.client.NOT_FOUND)
+
+        conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
+        conn.connect()
+        conn.request('GET', '/' + ('x'*10000), '', headers)
+        out1 = conn.getresponse()
+        assert_equal(out1.status, http.client.BAD_REQUEST)
+
+
+if __name__ == '__main__':
+    HTTPBasicsTest ().main ()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/import-rescan.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,193 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test wallet import RPCs.
+
+Test rescan behavior of importaddress, importpubkey, importprivkey, and
+importmulti RPCs with different types of keys and rescan options.
+
+In the first part of the test, node 0 creates an address for each type of
+import RPC call and sends BTC to it. Then other nodes import the addresses,
+and the test makes listtransactions and getbalance calls to confirm that the
+importing node either did or did not execute rescans picking up the send
+transactions.
+
+In the second part of the test, node 0 sends more BTC to each address, and the
+test makes more listtransactions and getbalance calls to confirm that the
+importing nodes pick up the new transactions regardless of whether rescans
+happened previously.
+"""
+
+from test_framework.authproxy import JSONRPCException
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (start_nodes, connect_nodes, sync_blocks, assert_equal, set_node_times)
+from decimal import Decimal
+
+import collections
+import enum
+import itertools
+
+Call = enum.Enum("Call", "single multi")
+Data = enum.Enum("Data", "address pub priv")
+Rescan = enum.Enum("Rescan", "no yes late_timestamp")
+
+
+class Variant(collections.namedtuple("Variant", "call data rescan prune")):
+    """Helper for importing one key and verifying scanned transactions."""
+
+    def do_import(self, timestamp):
+        """Call one key import RPC."""
+
+        if self.call == Call.single:
+            if self.data == Data.address:
+                response, error = try_rpc(self.node.importaddress, self.address["address"], self.label,
+                                          self.rescan == Rescan.yes)
+            elif self.data == Data.pub:
+                response, error = try_rpc(self.node.importpubkey, self.address["pubkey"], self.label,
+                                          self.rescan == Rescan.yes)
+            elif self.data == Data.priv:
+                response, error = try_rpc(self.node.importprivkey, self.key, self.label, self.rescan == Rescan.yes)
+            assert_equal(response, None)
+            assert_equal(error, {'message': 'Rescan is disabled in pruned mode',
+                                 'code': -4} if self.expect_disabled else None)
+        elif self.call == Call.multi:
+            response = self.node.importmulti([{
+                "scriptPubKey": {
+                    "address": self.address["address"]
+                },
+                "timestamp": timestamp + TIMESTAMP_WINDOW + (1 if self.rescan == Rescan.late_timestamp else 0),
+                "pubkeys": [self.address["pubkey"]] if self.data == Data.pub else [],
+                "keys": [self.key] if self.data == Data.priv else [],
+                "label": self.label,
+                "watchonly": self.data != Data.priv
+            }], {"rescan": self.rescan in (Rescan.yes, Rescan.late_timestamp)})
+            assert_equal(response, [{"success": True}])
+
+    def check(self, txid=None, amount=None, confirmations=None):
+        """Verify that getbalance/listtransactions return expected values."""
+
+        balance = self.node.getbalance(self.label, 0, True)
+        assert_equal(balance, self.expected_balance)
+
+        txs = self.node.listtransactions(self.label, 10000, 0, True)
+        assert_equal(len(txs), self.expected_txs)
+
+        if txid is not None:
+            tx, = [tx for tx in txs if tx["txid"] == txid]
+            assert_equal(tx["account"], self.label)
+            assert_equal(tx["address"], self.address["address"])
+            assert_equal(tx["amount"], amount)
+            assert_equal(tx["category"], "receive")
+            assert_equal(tx["label"], self.label)
+            assert_equal(tx["txid"], txid)
+            assert_equal(tx["confirmations"], confirmations)
+            assert_equal("trusted" not in tx, True)
+            # Verify the transaction is correctly marked watchonly depending on
+            # whether the transaction pays to an imported public key or
+            # imported private key. The test setup ensures that transaction
+            # inputs will not be from watchonly keys (important because
+            # involvesWatchonly will be true if either the transaction output
+            # or inputs are watchonly).
+            if self.data != Data.priv:
+                assert_equal(tx["involvesWatchonly"], True)
+            else:
+                assert_equal("involvesWatchonly" not in tx, True)
+
+
+# List of Variants for each way a key or address could be imported.
+IMPORT_VARIANTS = [Variant(*variants) for variants in itertools.product(Call, Data, Rescan, (False, True))]
+
+# List of nodes to import keys to. Half the nodes will have pruning disabled,
+# half will have it enabled. Different nodes will be used for imports that are
+# expected to cause rescans, and imports that are not expected to cause
+# rescans, in order to prevent rescans during later imports picking up
+# transactions associated with earlier imports. This makes it easier to keep
+# track of expected balances and transactions.
+ImportNode = collections.namedtuple("ImportNode", "prune rescan")
+IMPORT_NODES = [ImportNode(*fields) for fields in itertools.product((False, True), repeat=2)]
+
+# Rescans start at the earliest block up to 2 hours before the key timestamp.
+TIMESTAMP_WINDOW = 2 * 60 * 60
+
+
+class ImportRescanTest(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 2 + len(IMPORT_NODES)
+
+    def setup_network(self):
+        extra_args = [[] for _ in range(self.num_nodes)]
+        for i, import_node in enumerate(IMPORT_NODES, 2):
+            if import_node.prune:
+                extra_args[i] += ["-prune=1"]
+
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, extra_args)
+        for i in range(1, self.num_nodes):
+            connect_nodes(self.nodes[i], 0)
+
+    def run_test(self):
+        # Create one transaction on node 0 with a unique amount and label for
+        # each possible type of wallet import RPC.
+        for i, variant in enumerate(IMPORT_VARIANTS):
+            variant.label = "label {} {}".format(i, variant)
+            variant.address = self.nodes[1].validateaddress(self.nodes[1].getnewaddress(variant.label))
+            variant.key = self.nodes[1].dumpprivkey(variant.address["address"])
+            variant.initial_amount = 10 - (i + 1) / 4.0
+            variant.initial_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.initial_amount)
+
+        # Generate a block containing the initial transactions, then another
+        # block further in the future (past the rescan window).
+        self.nodes[0].generate(1)
+        assert_equal(self.nodes[0].getrawmempool(), [])
+        timestamp = self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"]
+        set_node_times(self.nodes, timestamp + TIMESTAMP_WINDOW + 1)
+        self.nodes[0].generate(1)
+        sync_blocks(self.nodes)
+
+        # For each variation of wallet key import, invoke the import RPC and
+        # check the results from getbalance and listtransactions.
+        for variant in IMPORT_VARIANTS:
+            variant.expect_disabled = variant.rescan == Rescan.yes and variant.prune and variant.call == Call.single
+            expect_rescan = variant.rescan == Rescan.yes and not variant.expect_disabled
+            variant.node = self.nodes[2 + IMPORT_NODES.index(ImportNode(variant.prune, expect_rescan))]
+            variant.do_import(timestamp)
+            if expect_rescan:
+                variant.expected_balance = variant.initial_amount
+                variant.expected_txs = 1
+                variant.check(variant.initial_txid, variant.initial_amount, 2)
+            else:
+                variant.expected_balance = 0
+                variant.expected_txs = 0
+                variant.check()
+
+        # Create new transactions sending to each address.
+        fee = self.nodes[0].getnetworkinfo()["relayfee"]
+        for i, variant in enumerate(IMPORT_VARIANTS):
+            variant.sent_amount = 10 - (2 * i + 1) / 8.0
+            variant.sent_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.sent_amount)
+
+        # Generate a block containing the new transactions.
+        self.nodes[0].generate(1)
+        assert_equal(self.nodes[0].getrawmempool(), [])
+        sync_blocks(self.nodes)
+
+        # Check the latest results from getbalance and listtransactions.
+        for variant in IMPORT_VARIANTS:
+            if not variant.expect_disabled:
+                variant.expected_balance += variant.sent_amount
+                variant.expected_txs += 1
+                variant.check(variant.sent_txid, variant.sent_amount, 1)
+            else:
+                variant.check()
+
+
+def try_rpc(func, *args, **kwargs):
+    try:
+        return func(*args, **kwargs), None
+    except JSONRPCException as e:
+        return None, e.error
+
+
+if __name__ == "__main__":
+    ImportRescanTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/importmulti.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,453 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the importmulti RPC."""
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class ImportMultiTest (BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 2
+        self.setup_clean_chain = True
+
+    def setup_network(self, split=False):
+        self.nodes = start_nodes(2, self.options.tmpdir)
+        self.is_network_split=False
+
+    def run_test (self):
+        self.log.info("Mining blocks...")
+        self.nodes[0].generate(1)
+        self.nodes[1].generate(1)
+        timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+
+        # keyword definition
+        PRIV_KEY = 'privkey'
+        PUB_KEY = 'pubkey'
+        ADDRESS_KEY = 'address'
+        SCRIPT_KEY = 'script'
+
+
+        node0_address1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        node0_address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        node0_address3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+
+        #Check only one address
+        assert_equal(node0_address1['ismine'], True)
+
+        #Node 1 sync test
+        assert_equal(self.nodes[1].getblockcount(),1)
+
+        #Address Test - before import
+        address_info = self.nodes[1].validateaddress(node0_address1['address'])
+        assert_equal(address_info['iswatchonly'], False)
+        assert_equal(address_info['ismine'], False)
+
+
+        # RPC importmulti -----------------------------------------------
+
+        # Bitcoin Address
+        self.log.info("Should import an address")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": address['address']
+            },
+            "timestamp": "now",
+        }])
+        assert_equal(result[0]['success'], True)
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], True)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal(address_assert['timestamp'], timestamp)
+        watchonly_address = address['address']
+        watchonly_timestamp = timestamp
+
+        self.log.info("Should not import an invalid address")
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": "not valid address",
+            },
+            "timestamp": "now",
+        }])
+        assert_equal(result[0]['success'], False)
+        assert_equal(result[0]['error']['code'], -5)
+        assert_equal(result[0]['error']['message'], 'Invalid address')
+
+        # ScriptPubKey + internal
+        self.log.info("Should import a scriptPubKey with internal flag")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": address['scriptPubKey'],
+            "timestamp": "now",
+            "internal": True
+        }])
+        assert_equal(result[0]['success'], True)
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], True)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal(address_assert['timestamp'], timestamp)
+
+        # ScriptPubKey + !internal
+        self.log.info("Should not import a scriptPubKey without internal flag")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": address['scriptPubKey'],
+            "timestamp": "now",
+        }])
+        assert_equal(result[0]['success'], False)
+        assert_equal(result[0]['error']['code'], -8)
+        assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], False)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal('timestamp' in address_assert, False)
+
+
+        # Address + Public key + !Internal
+        self.log.info("Should import an address with public key")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": address['address']
+            },
+            "timestamp": "now",
+            "pubkeys": [ address['pubkey'] ]
+        }])
+        assert_equal(result[0]['success'], True)
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], True)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal(address_assert['timestamp'], timestamp)
+
+
+        # ScriptPubKey + Public key + internal
+        self.log.info("Should import a scriptPubKey with internal and with public key")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        request = [{
+            "scriptPubKey": address['scriptPubKey'],
+            "timestamp": "now",
+            "pubkeys": [ address['pubkey'] ],
+            "internal": True
+        }]
+        result = self.nodes[1].importmulti(request)
+        assert_equal(result[0]['success'], True)
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], True)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal(address_assert['timestamp'], timestamp)
+
+        # ScriptPubKey + Public key + !internal
+        self.log.info("Should not import a scriptPubKey without internal and with public key")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        request = [{
+            "scriptPubKey": address['scriptPubKey'],
+            "timestamp": "now",
+            "pubkeys": [ address['pubkey'] ]
+        }]
+        result = self.nodes[1].importmulti(request)
+        assert_equal(result[0]['success'], False)
+        assert_equal(result[0]['error']['code'], -8)
+        assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], False)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal('timestamp' in address_assert, False)
+
+        # Address + Private key + !watchonly
+        self.log.info("Should import an address with private key")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": address['address']
+            },
+            "timestamp": "now",
+            "keys": [ self.nodes[0].dumpprivkey(address['address']) ]
+        }])
+        assert_equal(result[0]['success'], True)
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], False)
+        assert_equal(address_assert['ismine'], True)
+        assert_equal(address_assert['timestamp'], timestamp)
+
+        # Address + Private key + watchonly
+        self.log.info("Should not import an address with private key and with watchonly")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": address['address']
+            },
+            "timestamp": "now",
+            "keys": [ self.nodes[0].dumpprivkey(address['address']) ],
+            "watchonly": True
+        }])
+        assert_equal(result[0]['success'], False)
+        assert_equal(result[0]['error']['code'], -8)
+        assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], False)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal('timestamp' in address_assert, False)
+
+        # ScriptPubKey + Private key + internal
+        self.log.info("Should import a scriptPubKey with internal and with private key")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": address['scriptPubKey'],
+            "timestamp": "now",
+            "keys": [ self.nodes[0].dumpprivkey(address['address']) ],
+            "internal": True
+        }])
+        assert_equal(result[0]['success'], True)
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], False)
+        assert_equal(address_assert['ismine'], True)
+        assert_equal(address_assert['timestamp'], timestamp)
+
+        # ScriptPubKey + Private key + !internal
+        self.log.info("Should not import a scriptPubKey without internal and with private key")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": address['scriptPubKey'],
+            "timestamp": "now",
+            "keys": [ self.nodes[0].dumpprivkey(address['address']) ]
+        }])
+        assert_equal(result[0]['success'], False)
+        assert_equal(result[0]['error']['code'], -8)
+        assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], False)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal('timestamp' in address_assert, False)
+
+
+        # P2SH address
+        sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
+        self.nodes[1].generate(100)
+        transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+        self.nodes[1].generate(1)
+        timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+        transaction = self.nodes[1].gettransaction(transactionid)
+
+        self.log.info("Should import a p2sh")
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": multi_sig_script['address']
+            },
+            "timestamp": "now",
+        }])
+        assert_equal(result[0]['success'], True)
+        address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
+        assert_equal(address_assert['isscript'], True)
+        assert_equal(address_assert['iswatchonly'], True)
+        assert_equal(address_assert['timestamp'], timestamp)
+        p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
+        assert_equal(p2shunspent['spendable'], False)
+        assert_equal(p2shunspent['solvable'], False)
+
+
+        # P2SH + Redeem script
+        sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
+        self.nodes[1].generate(100)
+        transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+        self.nodes[1].generate(1)
+        timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+        transaction = self.nodes[1].gettransaction(transactionid)
+
+        self.log.info("Should import a p2sh with respective redeem script")
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": multi_sig_script['address']
+            },
+            "timestamp": "now",
+            "redeemscript": multi_sig_script['redeemScript']
+        }])
+        assert_equal(result[0]['success'], True)
+        address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
+        assert_equal(address_assert['timestamp'], timestamp)
+
+        p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
+        assert_equal(p2shunspent['spendable'], False)
+        assert_equal(p2shunspent['solvable'], True)
+
+
+        # P2SH + Redeem script + Private Keys + !Watchonly
+        sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
+        self.nodes[1].generate(100)
+        transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+        self.nodes[1].generate(1)
+        timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+        transaction = self.nodes[1].gettransaction(transactionid)
+
+        self.log.info("Should import a p2sh with respective redeem script and private keys")
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": multi_sig_script['address']
+            },
+            "timestamp": "now",
+            "redeemscript": multi_sig_script['redeemScript'],
+            "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])]
+        }])
+        assert_equal(result[0]['success'], True)
+        address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
+        assert_equal(address_assert['timestamp'], timestamp)
+
+        p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
+        assert_equal(p2shunspent['spendable'], False)
+        assert_equal(p2shunspent['solvable'], True)
+
+        # P2SH + Redeem script + Private Keys + Watchonly
+        sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
+        self.nodes[1].generate(100)
+        transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+        self.nodes[1].generate(1)
+        timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+        transaction = self.nodes[1].gettransaction(transactionid)
+
+        self.log.info("Should import a p2sh with respective redeem script and private keys")
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": multi_sig_script['address']
+            },
+            "timestamp": "now",
+            "redeemscript": multi_sig_script['redeemScript'],
+            "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])],
+            "watchonly": True
+        }])
+        assert_equal(result[0]['success'], False)
+        assert_equal(result[0]['error']['code'], -8)
+        assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
+
+
+        # Address + Public key + !Internal + Wrong pubkey
+        self.log.info("Should not import an address with a wrong public key")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": address['address']
+            },
+            "timestamp": "now",
+            "pubkeys": [ address2['pubkey'] ]
+        }])
+        assert_equal(result[0]['success'], False)
+        assert_equal(result[0]['error']['code'], -5)
+        assert_equal(result[0]['error']['message'], 'Consistency check failed')
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], False)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal('timestamp' in address_assert, False)
+
+
+        # ScriptPubKey + Public key + internal + Wrong pubkey
+        self.log.info("Should not import a scriptPubKey with internal and with a wrong public key")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        request = [{
+            "scriptPubKey": address['scriptPubKey'],
+            "timestamp": "now",
+            "pubkeys": [ address2['pubkey'] ],
+            "internal": True
+        }]
+        result = self.nodes[1].importmulti(request)
+        assert_equal(result[0]['success'], False)
+        assert_equal(result[0]['error']['code'], -5)
+        assert_equal(result[0]['error']['message'], 'Consistency check failed')
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], False)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal('timestamp' in address_assert, False)
+
+
+        # Address + Private key + !watchonly + Wrong private key
+        self.log.info("Should not import an address with a wrong private key")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": address['address']
+            },
+            "timestamp": "now",
+            "keys": [ self.nodes[0].dumpprivkey(address2['address']) ]
+        }])
+        assert_equal(result[0]['success'], False)
+        assert_equal(result[0]['error']['code'], -5)
+        assert_equal(result[0]['error']['message'], 'Consistency check failed')
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], False)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal('timestamp' in address_assert, False)
+
+
+        # ScriptPubKey + Private key + internal + Wrong private key
+        self.log.info("Should not import a scriptPubKey with internal and with a wrong private key")
+        address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": address['scriptPubKey'],
+            "timestamp": "now",
+            "keys": [ self.nodes[0].dumpprivkey(address2['address']) ],
+            "internal": True
+        }])
+        assert_equal(result[0]['success'], False)
+        assert_equal(result[0]['error']['code'], -5)
+        assert_equal(result[0]['error']['message'], 'Consistency check failed')
+        address_assert = self.nodes[1].validateaddress(address['address'])
+        assert_equal(address_assert['iswatchonly'], False)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal('timestamp' in address_assert, False)
+
+
+        # Importing existing watch only address with new timestamp should replace saved timestamp.
+        assert_greater_than(timestamp, watchonly_timestamp)
+        self.log.info("Should replace previously saved watch only timestamp.")
+        result = self.nodes[1].importmulti([{
+            "scriptPubKey": {
+                "address": watchonly_address,
+            },
+            "timestamp": "now",
+        }])
+        assert_equal(result[0]['success'], True)
+        address_assert = self.nodes[1].validateaddress(watchonly_address)
+        assert_equal(address_assert['iswatchonly'], True)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal(address_assert['timestamp'], timestamp)
+        watchonly_timestamp = timestamp
+
+
+        # restart nodes to check for proper serialization/deserialization of watch only address
+        stop_nodes(self.nodes)
+        self.nodes = start_nodes(2, self.options.tmpdir)
+        address_assert = self.nodes[1].validateaddress(watchonly_address)
+        assert_equal(address_assert['iswatchonly'], True)
+        assert_equal(address_assert['ismine'], False)
+        assert_equal(address_assert['timestamp'], watchonly_timestamp);
+
+        # Bad or missing timestamps
+        self.log.info("Should throw on invalid or missing timestamp values")
+        assert_raises_message(JSONRPCException, 'Missing required timestamp field for key',
+            self.nodes[1].importmulti, [{
+                "scriptPubKey": address['scriptPubKey'],
+            }])
+        assert_raises_message(JSONRPCException, 'Expected number or "now" timestamp value for key. got type string',
+            self.nodes[1].importmulti, [{
+                "scriptPubKey": address['scriptPubKey'],
+                "timestamp": "",
+            }])
+
+
+if __name__ == '__main__':
+    ImportMultiTest ().main ()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/importprunedfunds.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,124 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the importprunedfunds and removeprunedfunds RPCs."""
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+
+class ImportPrunedFundsTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 2
+
+    def setup_network(self, split=False):
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
+        connect_nodes_bi(self.nodes,0,1)
+        self.is_network_split=False
+        self.sync_all()
+
+    def run_test(self):
+        self.log.info("Mining blocks...")
+        self.nodes[0].generate(101)
+
+        self.sync_all()
+        
+        # address
+        address1 = self.nodes[0].getnewaddress()
+        # pubkey
+        address2 = self.nodes[0].getnewaddress()
+        address2_pubkey = self.nodes[0].validateaddress(address2)['pubkey']                 # Using pubkey
+        # privkey
+        address3 = self.nodes[0].getnewaddress()
+        address3_privkey = self.nodes[0].dumpprivkey(address3)                              # Using privkey
+
+        #Check only one address
+        address_info = self.nodes[0].validateaddress(address1)
+        assert_equal(address_info['ismine'], True)
+
+        self.sync_all()
+
+        #Node 1 sync test
+        assert_equal(self.nodes[1].getblockcount(),101)
+
+        #Address Test - before import
+        address_info = self.nodes[1].validateaddress(address1)
+        assert_equal(address_info['iswatchonly'], False)
+        assert_equal(address_info['ismine'], False)
+
+        address_info = self.nodes[1].validateaddress(address2)
+        assert_equal(address_info['iswatchonly'], False)
+        assert_equal(address_info['ismine'], False)
+
+        address_info = self.nodes[1].validateaddress(address3)
+        assert_equal(address_info['iswatchonly'], False)
+        assert_equal(address_info['ismine'], False)
+
+        #Send funds to self
+        txnid1 = self.nodes[0].sendtoaddress(address1, 0.1)
+        self.nodes[0].generate(1)
+        rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex']
+        proof1 = self.nodes[0].gettxoutproof([txnid1])
+
+        txnid2 = self.nodes[0].sendtoaddress(address2, 0.05)
+        self.nodes[0].generate(1)
+        rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex']
+        proof2 = self.nodes[0].gettxoutproof([txnid2])
+
+        txnid3 = self.nodes[0].sendtoaddress(address3, 0.025)
+        self.nodes[0].generate(1)
+        rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex']
+        proof3 = self.nodes[0].gettxoutproof([txnid3])
+
+        self.sync_all()
+
+        #Import with no affiliated address
+        assert_raises_jsonrpc(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1)
+
+        balance1 = self.nodes[1].getbalance("", 0, True)
+        assert_equal(balance1, Decimal(0))
+
+        #Import with affiliated address with no rescan
+        self.nodes[1].importaddress(address2, "add2", False)
+        result2 = self.nodes[1].importprunedfunds(rawtxn2, proof2)
+        balance2 = self.nodes[1].getbalance("add2", 0, True)
+        assert_equal(balance2, Decimal('0.05'))
+
+        #Import with private key with no rescan
+        self.nodes[1].importprivkey(address3_privkey, "add3", False)
+        result3 = self.nodes[1].importprunedfunds(rawtxn3, proof3)
+        balance3 = self.nodes[1].getbalance("add3", 0, False)
+        assert_equal(balance3, Decimal('0.025'))
+        balance3 = self.nodes[1].getbalance("*", 0, True)
+        assert_equal(balance3, Decimal('0.075'))
+
+        #Addresses Test - after import
+        address_info = self.nodes[1].validateaddress(address1)
+        assert_equal(address_info['iswatchonly'], False)
+        assert_equal(address_info['ismine'], False)
+        address_info = self.nodes[1].validateaddress(address2)
+        assert_equal(address_info['iswatchonly'], True)
+        assert_equal(address_info['ismine'], False)
+        address_info = self.nodes[1].validateaddress(address3)
+        assert_equal(address_info['iswatchonly'], False)
+        assert_equal(address_info['ismine'], True)
+
+        #Remove transactions
+        assert_raises_jsonrpc(-8, "Transaction does not exist in wallet.", self.nodes[1].removeprunedfunds, txnid1)
+
+        balance1 = self.nodes[1].getbalance("*", 0, True)
+        assert_equal(balance1, Decimal('0.075'))
+
+        self.nodes[1].removeprunedfunds(txnid2)
+        balance2 = self.nodes[1].getbalance("*", 0, True)
+        assert_equal(balance2, Decimal('0.025'))
+
+        self.nodes[1].removeprunedfunds(txnid3)
+        balance3 = self.nodes[1].getbalance("*", 0, True)
+        assert_equal(balance3, Decimal('0.0'))
+
+if __name__ == '__main__':
+    ImportPrunedFundsTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/invalidateblock.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,71 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the invalidateblock RPC."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class InvalidateTest(BitcoinTestFramework):
+    
+        
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 3
+
+    def setup_network(self):
+        self.nodes = []
+        self.is_network_split = False 
+        self.nodes.append(start_node(0, self.options.tmpdir))
+        self.nodes.append(start_node(1, self.options.tmpdir))
+        self.nodes.append(start_node(2, self.options.tmpdir))
+        
+    def run_test(self):
+        self.log.info("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:")
+        self.log.info("Mine 4 blocks on Node 0")
+        self.nodes[0].generate(4)
+        assert(self.nodes[0].getblockcount() == 4)
+        besthash = self.nodes[0].getbestblockhash()
+
+        self.log.info("Mine competing 6 blocks on Node 1")
+        self.nodes[1].generate(6)
+        assert(self.nodes[1].getblockcount() == 6)
+
+        self.log.info("Connect nodes to force a reorg")
+        connect_nodes_bi(self.nodes,0,1)
+        sync_blocks(self.nodes[0:2])
+        assert(self.nodes[0].getblockcount() == 6)
+        badhash = self.nodes[1].getblockhash(2)
+
+        self.log.info("Invalidate block 2 on node 0 and verify we reorg to node 0's original chain")
+        self.nodes[0].invalidateblock(badhash)
+        newheight = self.nodes[0].getblockcount()
+        newhash = self.nodes[0].getbestblockhash()
+        if (newheight != 4 or newhash != besthash):
+            raise AssertionError("Wrong tip for node0, hash %s, height %d"%(newhash,newheight))
+
+        self.log.info("Make sure we won't reorg to a lower work chain:")
+        connect_nodes_bi(self.nodes,1,2)
+        self.log.info("Sync node 2 to node 1 so both have 6 blocks")
+        sync_blocks(self.nodes[1:3])
+        assert(self.nodes[2].getblockcount() == 6)
+        self.log.info("Invalidate block 5 on node 1 so its tip is now at 4")
+        self.nodes[1].invalidateblock(self.nodes[1].getblockhash(5))
+        assert(self.nodes[1].getblockcount() == 4)
+        self.log.info("Invalidate block 3 on node 2, so its tip is now 2")
+        self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3))
+        assert(self.nodes[2].getblockcount() == 2)
+        self.log.info("..and then mine a block")
+        self.nodes[2].generate(1)
+        self.log.info("Verify all nodes are at the right height")
+        time.sleep(5)
+        assert_equal(self.nodes[2].getblockcount(), 3)
+        assert_equal(self.nodes[0].getblockcount(), 4)
+        node1height = self.nodes[1].getblockcount()
+        if node1height < 4:
+            raise AssertionError("Node 1 reorged to a lower height: %d"%node1height)
+
+if __name__ == '__main__':
+    InvalidateTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/invalidblockrequest.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test node responses to invalid blocks.
+
+In this test we connect to one node over p2p, and test block requests:
+1) Valid blocks should be requested and become chain tip.
+2) Invalid block with duplicated transaction should be re-requested.
+3) Invalid block with bad coinbase value should be rejected and not
+re-requested.
+"""
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.comptool import TestManager, TestInstance, RejectResult
+from test_framework.blocktools import *
+import copy
+import time
+
+# Use the ComparisonTestFramework with 1 node: only use --testbinary.
+class InvalidBlockRequestTest(ComparisonTestFramework):
+
+    ''' Can either run this test as 1 node with expected answers, or two and compare them. 
+        Change the "outcome" variable from each TestInstance object to only do the comparison. '''
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 1
+
+    def run_test(self):
+        test = TestManager(self, self.options.tmpdir)
+        test.add_all_connections(self.nodes)
+        self.tip = None
+        self.block_time = None
+        NetworkThread().start() # Start up network handling in another thread
+        test.run()
+
+    def get_tests(self):
+        if self.tip is None:
+            self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
+        self.block_time = int(time.time())+1
+
+        '''
+        Create a new block with an anyone-can-spend coinbase
+        '''
+        height = 1
+        block = create_block(self.tip, create_coinbase(height), self.block_time)
+        self.block_time += 1
+        block.solve()
+        # Save the coinbase for later
+        self.block1 = block
+        self.tip = block.sha256
+        height += 1
+        yield TestInstance([[block, True]])
+
+        '''
+        Now we need that block to mature so we can spend the coinbase.
+        '''
+        test = TestInstance(sync_every_block=False)
+        for i in range(100):
+            block = create_block(self.tip, create_coinbase(height), self.block_time)
+            block.solve()
+            self.tip = block.sha256
+            self.block_time += 1
+            test.blocks_and_transactions.append([block, True])
+            height += 1
+        yield test
+
+        '''
+        Now we use merkle-root malleability to generate an invalid block with
+        same blockheader.
+        Manufacture a block with 3 transactions (coinbase, spend of prior
+        coinbase, spend of that spend).  Duplicate the 3rd transaction to 
+        leave merkle root and blockheader unchanged but invalidate the block.
+        '''
+        block2 = create_block(self.tip, create_coinbase(height), self.block_time)
+        self.block_time += 1
+
+        # b'0x51' is OP_TRUE
+        tx1 = create_transaction(self.block1.vtx[0], 0, b'\x51', 50 * COIN)
+        tx2 = create_transaction(tx1, 0, b'\x51', 50 * COIN)
+
+        block2.vtx.extend([tx1, tx2])
+        block2.hashMerkleRoot = block2.calc_merkle_root()
+        block2.rehash()
+        block2.solve()
+        orig_hash = block2.sha256
+        block2_orig = copy.deepcopy(block2)
+
+        # Mutate block 2
+        block2.vtx.append(tx2)
+        assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
+        assert_equal(orig_hash, block2.rehash())
+        assert(block2_orig.vtx != block2.vtx)
+
+        self.tip = block2.sha256
+        yield TestInstance([[block2, RejectResult(16, b'bad-txns-duplicate')], [block2_orig, True]])
+        height += 1
+
+        '''
+        Make sure that a totally screwed up block is not valid.
+        '''
+        block3 = create_block(self.tip, create_coinbase(height), self.block_time)
+        self.block_time += 1
+        block3.vtx[0].vout[0].nValue = 100 * COIN # Too high!
+        block3.vtx[0].sha256=None
+        block3.vtx[0].calc_sha256()
+        block3.hashMerkleRoot = block3.calc_merkle_root()
+        block3.rehash()
+        block3.solve()
+
+        yield TestInstance([[block3, RejectResult(16, b'bad-cb-amount')]])
+
+
+if __name__ == '__main__':
+    InvalidBlockRequestTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/invalidtxrequest.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test node responses to invalid transactions.
+
+In this test we connect to one node over p2p, and test tx requests.
+"""
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.comptool import TestManager, TestInstance, RejectResult
+from test_framework.blocktools import *
+import time
+
+
+
+# Use the ComparisonTestFramework with 1 node: only use --testbinary.
+class InvalidTxRequestTest(ComparisonTestFramework):
+
+    ''' Can either run this test as 1 node with expected answers, or two and compare them. 
+        Change the "outcome" variable from each TestInstance object to only do the comparison. '''
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 1
+
+    def run_test(self):
+        test = TestManager(self, self.options.tmpdir)
+        test.add_all_connections(self.nodes)
+        self.tip = None
+        self.block_time = None
+        NetworkThread().start() # Start up network handling in another thread
+        test.run()
+
+    def get_tests(self):
+        if self.tip is None:
+            self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
+        self.block_time = int(time.time())+1
+
+        '''
+        Create a new block with an anyone-can-spend coinbase
+        '''
+        height = 1
+        block = create_block(self.tip, create_coinbase(height), self.block_time)
+        self.block_time += 1
+        block.solve()
+        # Save the coinbase for later
+        self.block1 = block
+        self.tip = block.sha256
+        height += 1
+        yield TestInstance([[block, True]])
+
+        '''
+        Now we need that block to mature so we can spend the coinbase.
+        '''
+        test = TestInstance(sync_every_block=False)
+        for i in range(100):
+            block = create_block(self.tip, create_coinbase(height), self.block_time)
+            block.solve()
+            self.tip = block.sha256
+            self.block_time += 1
+            test.blocks_and_transactions.append([block, True])
+            height += 1
+        yield test
+
+        # b'\x64' is OP_NOTIF
+        # Transaction will be rejected with code 16 (REJECT_INVALID)
+        tx1 = create_transaction(self.block1.vtx[0], 0, b'\x64', 50 * COIN - 12000)
+        yield TestInstance([[tx1, RejectResult(16, b'mandatory-script-verify-flag-failed')]])
+
+        # TODO: test further transactions...
+
+if __name__ == '__main__':
+    InvalidTxRequestTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/keypool.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,72 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the wallet keypool and interaction with wallet encryption/locking."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class KeyPoolTest(BitcoinTestFramework):
+
+    def run_test(self):
+        nodes = self.nodes
+        addr_before_encrypting = nodes[0].getnewaddress()
+        addr_before_encrypting_data = nodes[0].validateaddress(addr_before_encrypting)
+        wallet_info_old = nodes[0].getwalletinfo()
+        assert(addr_before_encrypting_data['hdmasterkeyid'] == wallet_info_old['hdmasterkeyid'])
+        
+        # Encrypt wallet and wait to terminate
+        nodes[0].encryptwallet('test')
+        bitcoind_processes[0].wait()
+        # Restart node 0
+        nodes[0] = start_node(0, self.options.tmpdir)
+        # Keep creating keys
+        addr = nodes[0].getnewaddress()
+        addr_data = nodes[0].validateaddress(addr)
+        wallet_info = nodes[0].getwalletinfo()
+        assert(addr_before_encrypting_data['hdmasterkeyid'] != wallet_info['hdmasterkeyid'])
+        assert(addr_data['hdmasterkeyid'] == wallet_info['hdmasterkeyid'])
+        
+        assert_raises_jsonrpc(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
+
+        # put three new keys in the keypool
+        nodes[0].walletpassphrase('test', 12000)
+        nodes[0].keypoolrefill(3)
+        nodes[0].walletlock()
+
+        # drain the keys
+        addr = set()
+        addr.add(nodes[0].getrawchangeaddress())
+        addr.add(nodes[0].getrawchangeaddress())
+        addr.add(nodes[0].getrawchangeaddress())
+        addr.add(nodes[0].getrawchangeaddress())
+        # assert that four unique addresses were returned
+        assert(len(addr) == 4)
+        # the next one should fail
+        assert_raises_jsonrpc(-12, "Keypool ran out", nodes[0].getrawchangeaddress)
+
+        # refill keypool with three new addresses
+        nodes[0].walletpassphrase('test', 1)
+        nodes[0].keypoolrefill(3)
+        # test walletpassphrase timeout
+        time.sleep(1.1)
+        assert_equal(nodes[0].getwalletinfo()["unlocked_until"], 0)
+
+        # drain them by mining
+        nodes[0].generate(1)
+        nodes[0].generate(1)
+        nodes[0].generate(1)
+        nodes[0].generate(1)
+        assert_raises_jsonrpc(-12, "Keypool ran out", nodes[0].generate, 1)
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = False
+        self.num_nodes = 1
+
+    def setup_network(self):
+        self.nodes = self.setup_nodes()
+
+if __name__ == '__main__':
+    KeyPoolTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/listsinceblock.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,81 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the listsincelast RPC."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+class ListSinceBlockTest (BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 4
+
+    def run_test (self):
+        '''
+        `listsinceblock` did not behave correctly when handed a block that was
+        no longer in the main chain:
+
+             ab0
+          /       \
+        aa1 [tx0]   bb1
+         |           |
+        aa2         bb2
+         |           |
+        aa3         bb3
+                     |
+                    bb4
+
+        Consider a client that has only seen block `aa3` above. It asks the node
+        to `listsinceblock aa3`. But at some point prior the main chain switched
+        to the bb chain.
+
+        Previously: listsinceblock would find height=4 for block aa3 and compare
+        this to height=5 for the tip of the chain (bb4). It would then return
+        results restricted to bb3-bb4.
+
+        Now: listsinceblock finds the fork at ab0 and returns results in the
+        range bb1-bb4.
+
+        This test only checks that [tx0] is present.
+        '''
+
+        assert_equal(self.is_network_split, False)
+        self.nodes[2].generate(101)
+        self.sync_all()
+
+        assert_equal(self.nodes[0].getbalance(), 0)
+        assert_equal(self.nodes[1].getbalance(), 0)
+        assert_equal(self.nodes[2].getbalance(), 50)
+        assert_equal(self.nodes[3].getbalance(), 0)
+
+        # Split network into two
+        self.split_network()
+        assert_equal(self.is_network_split, True)
+
+        # send to nodes[0] from nodes[2]
+        senttx = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1)
+
+        # generate on both sides
+        lastblockhash = self.nodes[1].generate(6)[5]
+        self.nodes[2].generate(7)
+        self.log.info('lastblockhash=%s' % (lastblockhash))
+
+        self.sync_all()
+
+        self.join_network()
+
+        # listsinceblock(lastblockhash) should now include tx, as seen from nodes[0]
+        lsbres = self.nodes[0].listsinceblock(lastblockhash)
+        found = False
+        for tx in lsbres['transactions']:
+            if tx['txid'] == senttx:
+                found = True
+                break
+        assert_equal(found, True)
+
+if __name__ == '__main__':
+    ListSinceBlockTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/listtransactions.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,207 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the listtransactions API."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.mininode import CTransaction, COIN
+from io import BytesIO
+
+def txFromHex(hexstring):
+    tx = CTransaction()
+    f = BytesIO(hex_str_to_bytes(hexstring))
+    tx.deserialize(f)
+    return tx
+
+class ListTransactionsTest(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 4
+        self.setup_clean_chain = False
+
+    def setup_nodes(self):
+        #This test requires mocktime
+        enable_mocktime()
+        return start_nodes(self.num_nodes, self.options.tmpdir)
+
+    def run_test(self):
+        # Simple send, 0 to 1:
+        txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
+        self.sync_all()
+        assert_array_result(self.nodes[0].listtransactions(),
+                           {"txid":txid},
+                           {"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0})
+        assert_array_result(self.nodes[1].listtransactions(),
+                           {"txid":txid},
+                           {"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0})
+        # mine a block, confirmations should change:
+        self.nodes[0].generate(1)
+        self.sync_all()
+        assert_array_result(self.nodes[0].listtransactions(),
+                           {"txid":txid},
+                           {"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1})
+        assert_array_result(self.nodes[1].listtransactions(),
+                           {"txid":txid},
+                           {"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1})
+
+        # send-to-self:
+        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
+        assert_array_result(self.nodes[0].listtransactions(),
+                           {"txid":txid, "category":"send"},
+                           {"amount":Decimal("-0.2")})
+        assert_array_result(self.nodes[0].listtransactions(),
+                           {"txid":txid, "category":"receive"},
+                           {"amount":Decimal("0.2")})
+
+        # sendmany from node1: twice to self, twice to node2:
+        send_to = { self.nodes[0].getnewaddress() : 0.11,
+                    self.nodes[1].getnewaddress() : 0.22,
+                    self.nodes[0].getaccountaddress("from1") : 0.33,
+                    self.nodes[1].getaccountaddress("toself") : 0.44 }
+        txid = self.nodes[1].sendmany("", send_to)
+        self.sync_all()
+        assert_array_result(self.nodes[1].listtransactions(),
+                           {"category":"send","amount":Decimal("-0.11")},
+                           {"txid":txid} )
+        assert_array_result(self.nodes[0].listtransactions(),
+                           {"category":"receive","amount":Decimal("0.11")},
+                           {"txid":txid} )
+        assert_array_result(self.nodes[1].listtransactions(),
+                           {"category":"send","amount":Decimal("-0.22")},
+                           {"txid":txid} )
+        assert_array_result(self.nodes[1].listtransactions(),
+                           {"category":"receive","amount":Decimal("0.22")},
+                           {"txid":txid} )
+        assert_array_result(self.nodes[1].listtransactions(),
+                           {"category":"send","amount":Decimal("-0.33")},
+                           {"txid":txid} )
+        assert_array_result(self.nodes[0].listtransactions(),
+                           {"category":"receive","amount":Decimal("0.33")},
+                           {"txid":txid, "account" : "from1"} )
+        assert_array_result(self.nodes[1].listtransactions(),
+                           {"category":"send","amount":Decimal("-0.44")},
+                           {"txid":txid, "account" : ""} )
+        assert_array_result(self.nodes[1].listtransactions(),
+                           {"category":"receive","amount":Decimal("0.44")},
+                           {"txid":txid, "account" : "toself"} )
+
+        multisig = self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
+        self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
+        txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
+        self.nodes[1].generate(1)
+        self.sync_all()
+        assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0)
+        assert_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True),
+                           {"category":"receive","amount":Decimal("0.1")},
+                           {"txid":txid, "account" : "watchonly"} )
+
+        self.run_rbf_opt_in_test()
+
+    # Check that the opt-in-rbf flag works properly, for sent and received
+    # transactions.
+    def run_rbf_opt_in_test(self):
+        # Check whether a transaction signals opt-in RBF itself
+        def is_opt_in(node, txid):
+            rawtx = node.getrawtransaction(txid, 1)
+            for x in rawtx["vin"]:
+                if x["sequence"] < 0xfffffffe:
+                    return True
+            return False
+
+        # Find an unconfirmed output matching a certain txid
+        def get_unconfirmed_utxo_entry(node, txid_to_match):
+            utxo = node.listunspent(0, 0)
+            for i in utxo:
+                if i["txid"] == txid_to_match:
+                    return i
+            return None
+
+        # 1. Chain a few transactions that don't opt-in.
+        txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
+        assert(not is_opt_in(self.nodes[0], txid_1))
+        assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
+        sync_mempools(self.nodes)
+        assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
+
+        # Tx2 will build off txid_1, still not opting in to RBF.
+        utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_1)
+        assert_equal(utxo_to_use["safe"], True)
+        utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1)
+        utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1)
+        assert_equal(utxo_to_use["safe"], False)
+
+        # Create tx2 using createrawtransaction
+        inputs = [{"txid":utxo_to_use["txid"], "vout":utxo_to_use["vout"]}]
+        outputs = {self.nodes[0].getnewaddress(): 0.999}
+        tx2 = self.nodes[1].createrawtransaction(inputs, outputs)
+        tx2_signed = self.nodes[1].signrawtransaction(tx2)["hex"]
+        txid_2 = self.nodes[1].sendrawtransaction(tx2_signed)
+
+        # ...and check the result
+        assert(not is_opt_in(self.nodes[1], txid_2))
+        assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
+        sync_mempools(self.nodes)
+        assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
+
+        # Tx3 will opt-in to RBF
+        utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2)
+        inputs = [{"txid": txid_2, "vout":utxo_to_use["vout"]}]
+        outputs = {self.nodes[1].getnewaddress(): 0.998}
+        tx3 = self.nodes[0].createrawtransaction(inputs, outputs)
+        tx3_modified = txFromHex(tx3)
+        tx3_modified.vin[0].nSequence = 0
+        tx3 = bytes_to_hex_str(tx3_modified.serialize())
+        tx3_signed = self.nodes[0].signrawtransaction(tx3)['hex']
+        txid_3 = self.nodes[0].sendrawtransaction(tx3_signed)
+
+        assert(is_opt_in(self.nodes[0], txid_3))
+        assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
+        sync_mempools(self.nodes)
+        assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
+
+        # Tx4 will chain off tx3.  Doesn't signal itself, but depends on one
+        # that does.
+        utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_3)
+        inputs = [{"txid": txid_3, "vout":utxo_to_use["vout"]}]
+        outputs = {self.nodes[0].getnewaddress(): 0.997}
+        tx4 = self.nodes[1].createrawtransaction(inputs, outputs)
+        tx4_signed = self.nodes[1].signrawtransaction(tx4)["hex"]
+        txid_4 = self.nodes[1].sendrawtransaction(tx4_signed)
+
+        assert(not is_opt_in(self.nodes[1], txid_4))
+        assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"})
+        sync_mempools(self.nodes)
+        assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"})
+
+        # Replace tx3, and check that tx4 becomes unknown
+        tx3_b = tx3_modified
+        tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee
+        tx3_b = bytes_to_hex_str(tx3_b.serialize())
+        tx3_b_signed = self.nodes[0].signrawtransaction(tx3_b)['hex']
+        txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, True)
+        assert(is_opt_in(self.nodes[0], txid_3b))
+
+        assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"})
+        sync_mempools(self.nodes)
+        assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"})
+
+        # Check gettransaction as well:
+        for n in self.nodes[0:2]:
+            assert_equal(n.gettransaction(txid_1)["bip125-replaceable"], "no")
+            assert_equal(n.gettransaction(txid_2)["bip125-replaceable"], "no")
+            assert_equal(n.gettransaction(txid_3)["bip125-replaceable"], "yes")
+            assert_equal(n.gettransaction(txid_3b)["bip125-replaceable"], "yes")
+            assert_equal(n.gettransaction(txid_4)["bip125-replaceable"], "unknown")
+
+        # After mining a transaction, it's no longer BIP125-replaceable
+        self.nodes[0].generate(1)
+        assert(txid_3b not in self.nodes[0].getrawmempool())
+        assert_equal(self.nodes[0].gettransaction(txid_3b)["bip125-replaceable"], "no")
+        assert_equal(self.nodes[0].gettransaction(txid_4)["bip125-replaceable"], "unknown")
+
+
+if __name__ == '__main__':
+    ListTransactionsTest().main()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/maxblocksinflight.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,97 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test nodes responses to having many blocks in flight.
+
+In this test we connect to one node over p2p, send it numerous inv's, and
+compare the resulting number of getdata requests to a max allowed value.  We
+test for exceeding 128 blocks in flight, which was the limit an 0.9 client will
+reach. [0.10 clients shouldn't request more than 16 from a single peer.]
+"""
+
+from test_framework.mininode import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+MAX_REQUESTS = 128
+
+class TestManager(NodeConnCB):
+    # set up NodeConnCB callbacks, overriding base class
+    def on_getdata(self, conn, message):
+        self.log.debug("got getdata %s" % repr(message))
+        # Log the requests
+        for inv in message.inv:
+            if inv.hash not in self.blockReqCounts:
+                self.blockReqCounts[inv.hash] = 0
+            self.blockReqCounts[inv.hash] += 1
+
+    def on_close(self, conn):
+        if not self.disconnectOkay:
+            raise EarlyDisconnectError(0)
+
+    def __init__(self):
+        NodeConnCB.__init__(self)
+
+    def add_new_connection(self, connection):
+        self.connection = connection
+        self.blockReqCounts = {}
+        self.disconnectOkay = False
+
+    def run(self):
+        self.connection.rpc.generate(1)  # Leave IBD
+
+        numBlocksToGenerate = [8, 16, 128, 1024]
+        for count in range(len(numBlocksToGenerate)):
+            current_invs = []
+            for i in range(numBlocksToGenerate[count]):
+                current_invs.append(CInv(2, random.randrange(0, 1 << 256)))
+                if len(current_invs) >= 50000:
+                    self.connection.send_message(msg_inv(current_invs))
+                    current_invs = []
+            if len(current_invs) > 0:
+                self.connection.send_message(msg_inv(current_invs))
+
+            # Wait and see how many blocks were requested
+            time.sleep(2)
+
+            total_requests = 0
+            with mininode_lock:
+                for key in self.blockReqCounts:
+                    total_requests += self.blockReqCounts[key]
+                    if self.blockReqCounts[key] > 1:
+                        raise AssertionError("Error, test failed: block %064x requested more than once" % key)
+            if total_requests > MAX_REQUESTS:
+                raise AssertionError("Error, too many blocks (%d) requested" % total_requests)
+            self.log.info("Round %d: success (total requests: %d)" % (count, total_requests))
+
+        self.disconnectOkay = True
+        self.connection.disconnect_node()
+
+
+class MaxBlocksInFlightTest(BitcoinTestFramework):
+    def add_options(self, parser):
+        parser.add_option("--testbinary", dest="testbinary",
+                          default=os.getenv("BITCOIND", "bitcoind"),
+                          help="Binary to test max block requests behavior")
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 1
+
+    def setup_network(self):
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir,
+                                 extra_args=[['-whitelist=127.0.0.1']],
+                                 binary=[self.options.testbinary])
+
+    def run_test(self):
+        test = TestManager()
+        # pass log handler through to the test manager object
+        test.log = self.log
+        test.add_new_connection(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test))
+        NetworkThread().start()  # Start up network handling in another thread
+        test.run()
+
+if __name__ == '__main__':
+    MaxBlocksInFlightTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/maxuploadtarget.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,233 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test behavior of -maxuploadtarget.
+
+* Verify that getdata requests for old blocks (>1week) are dropped
+if uploadtarget has been reached.
+* Verify that getdata requests for recent blocks are respecteved even
+if uploadtarget has been reached.
+* Verify that the upload counters are reset after 24 hours.
+"""
+
+from test_framework.mininode import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import time
+
+# TestNode: bare-bones "peer".  Used mostly as a conduit for a test to sending
+# p2p messages to a node, generating the messages in the main testing logic.
+class TestNode(NodeConnCB):
+    def __init__(self):
+        NodeConnCB.__init__(self)
+        self.connection = None
+        self.ping_counter = 1
+        self.last_pong = msg_pong()
+        self.block_receive_map = {}
+
+    def add_connection(self, conn):
+        self.connection = conn
+        self.peer_disconnected = False
+
+    def on_inv(self, conn, message):
+        pass
+
+    # Track the last getdata message we receive (used in the test)
+    def on_getdata(self, conn, message):
+        self.last_getdata = message
+
+    def on_block(self, conn, message):
+        message.block.calc_sha256()
+        try:
+            self.block_receive_map[message.block.sha256] += 1
+        except KeyError as e:
+            self.block_receive_map[message.block.sha256] = 1
+
+    # Spin until verack message is received from the node.
+    # We use this to signal that our test can begin. This
+    # is called from the testing thread, so it needs to acquire
+    # the global lock.
+    def wait_for_verack(self):
+        def veracked():
+            return self.verack_received
+        return wait_until(veracked, timeout=10)
+
+    def wait_for_disconnect(self):
+        def disconnected():
+            return self.peer_disconnected
+        return wait_until(disconnected, timeout=10)
+
+    # Wrapper for the NodeConn's send_message function
+    def send_message(self, message):
+        self.connection.send_message(message)
+
+    def on_pong(self, conn, message):
+        self.last_pong = message
+
+    def on_close(self, conn):
+        self.peer_disconnected = True
+
+    # Sync up with the node after delivery of a block
+    def sync_with_ping(self, timeout=30):
+        def received_pong():
+            return (self.last_pong.nonce == self.ping_counter)
+        self.connection.send_message(msg_ping(nonce=self.ping_counter))
+        success = wait_until(received_pong, timeout=timeout)
+        self.ping_counter += 1
+        return success
+
+class MaxUploadTest(BitcoinTestFramework):
+ 
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 1
+
+        # Cache for utxos, as the listunspent may take a long time later in the test
+        self.utxo_cache = []
+
+    def setup_network(self):
+        # Start a node with maxuploadtarget of 200 MB (/24h)
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir, ["-maxuploadtarget=800", "-blockmaxsize=999000"]))
+
+    def run_test(self):
+        # Before we connect anything, we first set the time on the node
+        # to be in the past, otherwise things break because the CNode
+        # time counters can't be reset backward after initialization
+        old_time = int(time.time() - 2*60*60*24*7)
+        self.nodes[0].setmocktime(old_time)
+
+        # Generate some old blocks
+        self.nodes[0].generate(130)
+
+        # test_nodes[0] will only request old blocks
+        # test_nodes[1] will only request new blocks
+        # test_nodes[2] will test resetting the counters
+        test_nodes = []
+        connections = []
+
+        for i in range(3):
+            test_nodes.append(TestNode())
+            connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
+            test_nodes[i].add_connection(connections[i])
+
+        NetworkThread().start() # Start up network handling in another thread
+        [x.wait_for_verack() for x in test_nodes]
+
+        # Test logic begins here
+
+        # Now mine a big block
+        mine_large_block(self.nodes[0], self.utxo_cache)
+
+        # Store the hash; we'll request this later
+        big_old_block = self.nodes[0].getbestblockhash()
+        old_block_size = self.nodes[0].getblock(big_old_block, True)['size']
+        big_old_block = int(big_old_block, 16)
+
+        # Advance to two days ago
+        self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)
+
+        # Mine one more block, so that the prior block looks old
+        mine_large_block(self.nodes[0], self.utxo_cache)
+
+        # We'll be requesting this new block too
+        big_new_block = self.nodes[0].getbestblockhash()
+        big_new_block = int(big_new_block, 16)
+
+        # test_nodes[0] will test what happens if we just keep requesting the
+        # the same big old block too many times (expect: disconnect)
+
+        getdata_request = msg_getdata()
+        getdata_request.inv.append(CInv(2, big_old_block))
+
+        max_bytes_per_day = 800*1024*1024
+        daily_buffer = 144 * 4000000
+        max_bytes_available = max_bytes_per_day - daily_buffer
+        success_count = max_bytes_available // old_block_size
+
+        # 576MB will be reserved for relaying new blocks, so expect this to
+        # succeed for ~235 tries.
+        for i in range(success_count):
+            test_nodes[0].send_message(getdata_request)
+            test_nodes[0].sync_with_ping()
+            assert_equal(test_nodes[0].block_receive_map[big_old_block], i+1)
+
+        assert_equal(len(self.nodes[0].getpeerinfo()), 3)
+        # At most a couple more tries should succeed (depending on how long 
+        # the test has been running so far).
+        for i in range(3):
+            test_nodes[0].send_message(getdata_request)
+        test_nodes[0].wait_for_disconnect()
+        assert_equal(len(self.nodes[0].getpeerinfo()), 2)
+        self.log.info("Peer 0 disconnected after downloading old block too many times")
+
+        # Requesting the current block on test_nodes[1] should succeed indefinitely,
+        # even when over the max upload target.
+        # We'll try 800 times
+        getdata_request.inv = [CInv(2, big_new_block)]
+        for i in range(800):
+            test_nodes[1].send_message(getdata_request)
+            test_nodes[1].sync_with_ping()
+            assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)
+
+        self.log.info("Peer 1 able to repeatedly download new block")
+
+        # But if test_nodes[1] tries for an old block, it gets disconnected too.
+        getdata_request.inv = [CInv(2, big_old_block)]
+        test_nodes[1].send_message(getdata_request)
+        test_nodes[1].wait_for_disconnect()
+        assert_equal(len(self.nodes[0].getpeerinfo()), 1)
+
+        self.log.info("Peer 1 disconnected after trying to download old block")
+
+        self.log.info("Advancing system time on node to clear counters...")
+
+        # If we advance the time by 24 hours, then the counters should reset,
+        # and test_nodes[2] should be able to retrieve the old block.
+        self.nodes[0].setmocktime(int(time.time()))
+        test_nodes[2].sync_with_ping()
+        test_nodes[2].send_message(getdata_request)
+        test_nodes[2].sync_with_ping()
+        assert_equal(test_nodes[2].block_receive_map[big_old_block], 1)
+
+        self.log.info("Peer 2 able to download old block")
+
+        [c.disconnect_node() for c in connections]
+
+        #stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1
+        self.log.info("Restarting nodes with -whitelist=127.0.0.1")
+        stop_node(self.nodes[0], 0)
+        self.nodes[0] = start_node(0, self.options.tmpdir, ["-whitelist=127.0.0.1", "-maxuploadtarget=1", "-blockmaxsize=999000"])
+
+        #recreate/reconnect 3 test nodes
+        test_nodes = []
+        connections = []
+
+        for i in range(3):
+            test_nodes.append(TestNode())
+            connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
+            test_nodes[i].add_connection(connections[i])
+
+        NetworkThread().start() # Start up network handling in another thread
+        [x.wait_for_verack() for x in test_nodes]
+
+        #retrieve 20 blocks which should be enough to break the 1MB limit
+        getdata_request.inv = [CInv(2, big_new_block)]
+        for i in range(20):
+            test_nodes[1].send_message(getdata_request)
+            test_nodes[1].sync_with_ping()
+            assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)
+
+        getdata_request.inv = [CInv(2, big_old_block)]
+        test_nodes[1].send_message(getdata_request)
+        test_nodes[1].wait_for_disconnect()
+        assert_equal(len(self.nodes[0].getpeerinfo()), 3) #node is still connected because of the whitelist
+
+        self.log.info("Peer 1 still connected after trying to download old block (whitelisted)")
+
+        [c.disconnect_node() for c in connections]
+
+if __name__ == '__main__':
+    MaxUploadTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/mempool_limit.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,53 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test mempool limiting together/eviction with the wallet."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class MempoolLimitTest(BitcoinTestFramework):
+
+    def setup_network(self):
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir, ["-maxmempool=5", "-spendzeroconfchange=0"]))
+        self.is_network_split = False
+        self.sync_all()
+        self.relayfee = self.nodes[0].getnetworkinfo()['relayfee']
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 1
+
+        self.txouts = gen_return_txouts()
+
+    def run_test(self):
+        txids = []
+        utxos = create_confirmed_utxos(self.relayfee, self.nodes[0], 91)
+
+        #create a mempool tx that will be evicted
+        us0 = utxos.pop()
+        inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
+        outputs = {self.nodes[0].getnewaddress() : 0.0001}
+        tx = self.nodes[0].createrawtransaction(inputs, outputs)
+        self.nodes[0].settxfee(self.relayfee) # specifically fund this tx with low fee
+        txF = self.nodes[0].fundrawtransaction(tx)
+        self.nodes[0].settxfee(0) # return to automatic fee selection
+        txFS = self.nodes[0].signrawtransaction(txF['hex'])
+        txid = self.nodes[0].sendrawtransaction(txFS['hex'])
+
+        relayfee = self.nodes[0].getnetworkinfo()['relayfee']
+        base_fee = relayfee*100
+        for i in range (3):
+            txids.append([])
+            txids[i] = create_lots_of_big_transactions(self.nodes[0], self.txouts, utxos[30*i:30*i+30], 30, (i+1)*base_fee)
+
+        # by now, the tx should be evicted, check confirmation state
+        assert(txid not in self.nodes[0].getrawmempool())
+        txdata = self.nodes[0].gettransaction(txid)
+        assert(txdata['confirmations'] ==  0) #confirmation should still be 0
+
+if __name__ == '__main__':
+    MempoolLimitTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/mempool_packages.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,239 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test descendant package tracking code."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.mininode import COIN
+
+MAX_ANCESTORS = 25
+MAX_DESCENDANTS = 25
+
+class MempoolPackagesTest(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 2
+        self.setup_clean_chain = False
+
+    def setup_network(self):
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir, ["-maxorphantx=1000"]))
+        self.nodes.append(start_node(1, self.options.tmpdir, ["-maxorphantx=1000", "-limitancestorcount=5"]))
+        connect_nodes(self.nodes[0], 1)
+        self.is_network_split = False
+        self.sync_all()
+
+    # Build a transaction that spends parent_txid:vout
+    # Return amount sent
+    def chain_transaction(self, node, parent_txid, vout, value, fee, num_outputs):
+        send_value = satoshi_round((value - fee)/num_outputs)
+        inputs = [ {'txid' : parent_txid, 'vout' : vout} ]
+        outputs = {}
+        for i in range(num_outputs):
+            outputs[node.getnewaddress()] = send_value
+        rawtx = node.createrawtransaction(inputs, outputs)
+        signedtx = node.signrawtransaction(rawtx)
+        txid = node.sendrawtransaction(signedtx['hex'])
+        fulltx = node.getrawtransaction(txid, 1)
+        assert(len(fulltx['vout']) == num_outputs) # make sure we didn't generate a change output
+        return (txid, send_value)
+
+    def run_test(self):
+        ''' Mine some blocks and have them mature. '''
+        self.nodes[0].generate(101)
+        utxo = self.nodes[0].listunspent(10)
+        txid = utxo[0]['txid']
+        vout = utxo[0]['vout']
+        value = utxo[0]['amount']
+
+        fee = Decimal("0.0001")
+        # MAX_ANCESTORS transactions off a confirmed tx should be fine
+        chain = []
+        for i in range(MAX_ANCESTORS):
+            (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, 0, value, fee, 1)
+            value = sent_value
+            chain.append(txid)
+
+        # Check mempool has MAX_ANCESTORS transactions in it, and descendant
+        # count and fees should look correct
+        mempool = self.nodes[0].getrawmempool(True)
+        assert_equal(len(mempool), MAX_ANCESTORS)
+        descendant_count = 1
+        descendant_fees = 0
+        descendant_size = 0
+
+        descendants = []
+        ancestors = list(chain)
+        for x in reversed(chain):
+            # Check that getmempoolentry is consistent with getrawmempool
+            entry = self.nodes[0].getmempoolentry(x)
+            assert_equal(entry, mempool[x])
+
+            # Check that the descendant calculations are correct
+            assert_equal(mempool[x]['descendantcount'], descendant_count)
+            descendant_fees += mempool[x]['fee']
+            assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee'])
+            assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN)
+            descendant_size += mempool[x]['size']
+            assert_equal(mempool[x]['descendantsize'], descendant_size)
+            descendant_count += 1
+
+            # Check that getmempooldescendants is correct
+            assert_equal(sorted(descendants), sorted(self.nodes[0].getmempooldescendants(x)))
+            descendants.append(x)
+
+            # Check that getmempoolancestors is correct
+            ancestors.remove(x)
+            assert_equal(sorted(ancestors), sorted(self.nodes[0].getmempoolancestors(x)))
+
+        # Check that getmempoolancestors/getmempooldescendants correctly handle verbose=true
+        v_ancestors = self.nodes[0].getmempoolancestors(chain[-1], True)
+        assert_equal(len(v_ancestors), len(chain)-1)
+        for x in v_ancestors.keys():
+            assert_equal(mempool[x], v_ancestors[x])
+        assert(chain[-1] not in v_ancestors.keys())
+
+        v_descendants = self.nodes[0].getmempooldescendants(chain[0], True)
+        assert_equal(len(v_descendants), len(chain)-1)
+        for x in v_descendants.keys():
+            assert_equal(mempool[x], v_descendants[x])
+        assert(chain[0] not in v_descendants.keys())
+
+        # Check that descendant modified fees includes fee deltas from
+        # prioritisetransaction
+        self.nodes[0].prioritisetransaction(chain[-1], 1000)
+        mempool = self.nodes[0].getrawmempool(True)
+
+        descendant_fees = 0
+        for x in reversed(chain):
+            descendant_fees += mempool[x]['fee']
+            assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 1000)
+
+        # Adding one more transaction on to the chain should fail.
+        assert_raises_jsonrpc(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], txid, vout, value, fee, 1)
+
+        # Check that prioritising a tx before it's added to the mempool works
+        # First clear the mempool by mining a block.
+        self.nodes[0].generate(1)
+        sync_blocks(self.nodes)
+        assert_equal(len(self.nodes[0].getrawmempool()), 0)
+        # Prioritise a transaction that has been mined, then add it back to the
+        # mempool by using invalidateblock.
+        self.nodes[0].prioritisetransaction(chain[-1], 2000)
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+        # Keep node1's tip synced with node0
+        self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash())
+
+        # Now check that the transaction is in the mempool, with the right modified fee
+        mempool = self.nodes[0].getrawmempool(True)
+
+        descendant_fees = 0
+        for x in reversed(chain):
+            descendant_fees += mempool[x]['fee']
+            if (x == chain[-1]):
+                assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']+satoshi_round(0.00002))
+            assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 2000)
+
+        # TODO: check that node1's mempool is as expected
+
+        # TODO: test ancestor size limits
+
+        # Now test descendant chain limits
+        txid = utxo[1]['txid']
+        value = utxo[1]['amount']
+        vout = utxo[1]['vout']
+
+        transaction_package = []
+        # First create one parent tx with 10 children
+        (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 10)
+        parent_transaction = txid
+        for i in range(10):
+            transaction_package.append({'txid': txid, 'vout': i, 'amount': sent_value})
+
+        # Sign and send up to MAX_DESCENDANT transactions chained off the parent tx
+        for i in range(MAX_DESCENDANTS - 1):
+            utxo = transaction_package.pop(0)
+            (txid, sent_value) = self.chain_transaction(self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
+            for j in range(10):
+                transaction_package.append({'txid': txid, 'vout': j, 'amount': sent_value})
+
+        mempool = self.nodes[0].getrawmempool(True)
+        assert_equal(mempool[parent_transaction]['descendantcount'], MAX_DESCENDANTS)
+
+        # Sending one more chained transaction will fail
+        utxo = transaction_package.pop(0)
+        assert_raises_jsonrpc(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
+
+        # TODO: check that node1's mempool is as expected
+
+        # TODO: test descendant size limits
+
+        # Test reorg handling
+        # First, the basics:
+        self.nodes[0].generate(1)
+        sync_blocks(self.nodes)
+        self.nodes[1].invalidateblock(self.nodes[0].getbestblockhash())
+        self.nodes[1].reconsiderblock(self.nodes[0].getbestblockhash())
+
+        # Now test the case where node1 has a transaction T in its mempool that
+        # depends on transactions A and B which are in a mined block, and the
+        # block containing A and B is disconnected, AND B is not accepted back
+        # into node1's mempool because its ancestor count is too high.
+
+        # Create 8 transactions, like so:
+        # Tx0 -> Tx1 (vout0)
+        #   \--> Tx2 (vout1) -> Tx3 -> Tx4 -> Tx5 -> Tx6 -> Tx7
+        #
+        # Mine them in the next block, then generate a new tx8 that spends
+        # Tx1 and Tx7, and add to node1's mempool, then disconnect the
+        # last block.
+
+        # Create tx0 with 2 outputs
+        utxo = self.nodes[0].listunspent()
+        txid = utxo[0]['txid']
+        value = utxo[0]['amount']
+        vout = utxo[0]['vout']
+
+        send_value = satoshi_round((value - fee)/2)
+        inputs = [ {'txid' : txid, 'vout' : vout} ]
+        outputs = {}
+        for i in range(2):
+            outputs[self.nodes[0].getnewaddress()] = send_value
+        rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
+        signedtx = self.nodes[0].signrawtransaction(rawtx)
+        txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
+        tx0_id = txid
+        value = send_value
+
+        # Create tx1
+        (tx1_id, tx1_value) = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1)
+
+        # Create tx2-7
+        vout = 1
+        txid = tx0_id
+        for i in range(6):
+            (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 1)
+            vout = 0
+            value = sent_value
+
+        # Mine these in a block
+        self.nodes[0].generate(1)
+        self.sync_all()
+
+        # Now generate tx8, with a big fee
+        inputs = [ {'txid' : tx1_id, 'vout': 0}, {'txid' : txid, 'vout': 0} ]
+        outputs = { self.nodes[0].getnewaddress() : send_value + value - 4*fee }
+        rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
+        signedtx = self.nodes[0].signrawtransaction(rawtx)
+        txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
+        sync_mempools(self.nodes)
+        
+        # Now try to disconnect the tip on each node...
+        self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash())
+        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+        sync_blocks(self.nodes)
+
+if __name__ == '__main__':
+    MempoolPackagesTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/mempool_reorg.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,107 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test mempool re-org scenarios.
+
+Test re-org scenarios with a mempool that contains transactions
+that spend (directly or indirectly) coinbase transactions.
+"""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+# Create one-input, one-output, no-fee transaction:
+class MempoolCoinbaseTest(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 2
+        self.setup_clean_chain = False
+
+    alert_filename = None  # Set by setup_network
+
+    def setup_network(self):
+        args = ["-checkmempool"]
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir, args))
+        self.nodes.append(start_node(1, self.options.tmpdir, args))
+        connect_nodes(self.nodes[1], 0)
+        self.is_network_split = False
+        self.sync_all()
+
+    def run_test(self):
+        # Start with a 200 block chain
+        assert_equal(self.nodes[0].getblockcount(), 200)
+
+        # Mine four blocks. After this, nodes[0] blocks
+        # 101, 102, and 103 are spend-able.
+        new_blocks = self.nodes[1].generate(4)
+        self.sync_all()
+
+        node0_address = self.nodes[0].getnewaddress()
+        node1_address = self.nodes[1].getnewaddress()
+
+        # Three scenarios for re-orging coinbase spends in the memory pool:
+        # 1. Direct coinbase spend  :  spend_101
+        # 2. Indirect (coinbase spend in chain, child in mempool) : spend_102 and spend_102_1
+        # 3. Indirect (coinbase and child both in chain) : spend_103 and spend_103_1
+        # Use invalidatblock to make all of the above coinbase spends invalid (immature coinbase),
+        # and make sure the mempool code behaves correctly.
+        b = [ self.nodes[0].getblockhash(n) for n in range(101, 105) ]
+        coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
+        spend_101_raw = create_tx(self.nodes[0], coinbase_txids[1], node1_address, 49.99)
+        spend_102_raw = create_tx(self.nodes[0], coinbase_txids[2], node0_address, 49.99)
+        spend_103_raw = create_tx(self.nodes[0], coinbase_txids[3], node0_address, 49.99)
+
+        # Create a transaction which is time-locked to two blocks in the future
+        timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 49.99})
+        # Set the time lock
+        timelock_tx = timelock_tx.replace("ffffffff", "11111191", 1)
+        timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
+        timelock_tx = self.nodes[0].signrawtransaction(timelock_tx)["hex"]
+        # This will raise an exception because the timelock transaction is too immature to spend
+        assert_raises_jsonrpc(-26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx)
+
+        # Broadcast and mine spend_102 and 103:
+        spend_102_id = self.nodes[0].sendrawtransaction(spend_102_raw)
+        spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw)
+        self.nodes[0].generate(1)
+        # Time-locked transaction is still too immature to spend
+        assert_raises_jsonrpc(-26,'non-final', self.nodes[0].sendrawtransaction, timelock_tx)
+
+        # Create 102_1 and 103_1:
+        spend_102_1_raw = create_tx(self.nodes[0], spend_102_id, node1_address, 49.98)
+        spend_103_1_raw = create_tx(self.nodes[0], spend_103_id, node1_address, 49.98)
+
+        # Broadcast and mine 103_1:
+        spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw)
+        last_block = self.nodes[0].generate(1)
+        # Time-locked transaction can now be spent
+        timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx)
+
+        # ... now put spend_101 and spend_102_1 in memory pools:
+        spend_101_id = self.nodes[0].sendrawtransaction(spend_101_raw)
+        spend_102_1_id = self.nodes[0].sendrawtransaction(spend_102_1_raw)
+
+        self.sync_all()
+
+        assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, timelock_tx_id})
+
+        for node in self.nodes:
+            node.invalidateblock(last_block[0])
+        # Time-locked transaction is now too immature and has been removed from the mempool
+        # spend_103_1 has been re-orged out of the chain and is back in the mempool
+        assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, spend_103_1_id})
+
+        # Use invalidateblock to re-org back and make all those coinbase spends
+        # immature/invalid:
+        for node in self.nodes:
+            node.invalidateblock(new_blocks[0])
+
+        self.sync_all()
+
+        # mempool should be empty.
+        assert_equal(set(self.nodes[0].getrawmempool()), set())
+
+if __name__ == '__main__':
+    MempoolCoinbaseTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/mempool_resurrect_test.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test resurrection of mined transactions when the blockchain is re-organized."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+# Create one-input, one-output, no-fee transaction:
+class MempoolCoinbaseTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 1
+        self.setup_clean_chain = False
+
+    def setup_network(self):
+        # Just need one node for this test
+        args = ["-checkmempool"]
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir, args))
+        self.is_network_split = False
+
+    def run_test(self):
+        node0_address = self.nodes[0].getnewaddress()
+        # Spend block 1/2/3's coinbase transactions
+        # Mine a block.
+        # Create three more transactions, spending the spends
+        # Mine another block.
+        # ... make sure all the transactions are confirmed
+        # Invalidate both blocks
+        # ... make sure all the transactions are put back in the mempool
+        # Mine a new block
+        # ... make sure all the transactions are confirmed again.
+
+        b = [ self.nodes[0].getblockhash(n) for n in range(1, 4) ]
+        coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
+        spends1_raw = [ create_tx(self.nodes[0], txid, node0_address, 49.99) for txid in coinbase_txids ]
+        spends1_id = [ self.nodes[0].sendrawtransaction(tx) for tx in spends1_raw ]
+
+        blocks = []
+        blocks.extend(self.nodes[0].generate(1))
+
+        spends2_raw = [ create_tx(self.nodes[0], txid, node0_address, 49.98) for txid in spends1_id ]
+        spends2_id = [ self.nodes[0].sendrawtransaction(tx) for tx in spends2_raw ]
+
+        blocks.extend(self.nodes[0].generate(1))
+
+        # mempool should be empty, all txns confirmed
+        assert_equal(set(self.nodes[0].getrawmempool()), set())
+        for txid in spends1_id+spends2_id:
+            tx = self.nodes[0].gettransaction(txid)
+            assert(tx["confirmations"] > 0)
+
+        # Use invalidateblock to re-org back; all transactions should
+        # end up unconfirmed and back in the mempool
+        for node in self.nodes:
+            node.invalidateblock(blocks[0])
+
+        # mempool should be empty, all txns confirmed
+        assert_equal(set(self.nodes[0].getrawmempool()), set(spends1_id+spends2_id))
+        for txid in spends1_id+spends2_id:
+            tx = self.nodes[0].gettransaction(txid)
+            assert(tx["confirmations"] == 0)
+
+        # Generate another block, they should all get mined
+        self.nodes[0].generate(1)
+        # mempool should be empty, all txns confirmed
+        assert_equal(set(self.nodes[0].getrawmempool()), set())
+        for txid in spends1_id+spends2_id:
+            tx = self.nodes[0].gettransaction(txid)
+            assert(tx["confirmations"] > 0)
+
+
+if __name__ == '__main__':
+    MempoolCoinbaseTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/mempool_spendcoinbase.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,62 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test spending coinbase transactions.
+
+The coinbase transaction in block N can appear in block
+N+100... so is valid in the mempool when the best block
+height is N+99.
+This test makes sure coinbase spends that will be mature
+in the next block are accepted into the memory pool,
+but less mature coinbase spends are NOT.
+"""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+# Create one-input, one-output, no-fee transaction:
+class MempoolSpendCoinbaseTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 1
+        self.setup_clean_chain = False
+
+    def setup_network(self):
+        # Just need one node for this test
+        args = ["-checkmempool"]
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir, args))
+        self.is_network_split = False
+
+    def run_test(self):
+        chain_height = self.nodes[0].getblockcount()
+        assert_equal(chain_height, 200)
+        node0_address = self.nodes[0].getnewaddress()
+
+        # Coinbase at height chain_height-100+1 ok in mempool, should
+        # get mined. Coinbase at height chain_height-100+2 is
+        # is too immature to spend.
+        b = [ self.nodes[0].getblockhash(n) for n in range(101, 103) ]
+        coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
+        spends_raw = [ create_tx(self.nodes[0], txid, node0_address, 49.99) for txid in coinbase_txids ]
+
+        spend_101_id = self.nodes[0].sendrawtransaction(spends_raw[0])
+
+        # coinbase at height 102 should be too immature to spend
+        assert_raises_jsonrpc(-26,"bad-txns-premature-spend-of-coinbase", self.nodes[0].sendrawtransaction, spends_raw[1])
+
+        # mempool should have just spend_101:
+        assert_equal(self.nodes[0].getrawmempool(), [ spend_101_id ])
+
+        # mine a block, spend_101 should get confirmed
+        self.nodes[0].generate(1)
+        assert_equal(set(self.nodes[0].getrawmempool()), set())
+
+        # ... and now height 102 can be spent:
+        spend_102_id = self.nodes[0].sendrawtransaction(spends_raw[1])
+        assert_equal(self.nodes[0].getrawmempool(), [ spend_102_id ])
+
+if __name__ == '__main__':
+    MempoolSpendCoinbaseTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/merkle_blocks.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,85 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test gettxoutproof and verifytxoutproof RPCs."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class MerkleBlockTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 4
+
+    def setup_network(self):
+        self.nodes = []
+        # Nodes 0/1 are "wallet" nodes
+        self.nodes.append(start_node(0, self.options.tmpdir))
+        self.nodes.append(start_node(1, self.options.tmpdir))
+        # Nodes 2/3 are used for testing
+        self.nodes.append(start_node(2, self.options.tmpdir))
+        self.nodes.append(start_node(3, self.options.tmpdir, ["-txindex"]))
+        connect_nodes(self.nodes[0], 1)
+        connect_nodes(self.nodes[0], 2)
+        connect_nodes(self.nodes[0], 3)
+
+        self.is_network_split = False
+        self.sync_all()
+
+    def run_test(self):
+        self.log.info("Mining blocks...")
+        self.nodes[0].generate(105)
+        self.sync_all()
+
+        chain_height = self.nodes[1].getblockcount()
+        assert_equal(chain_height, 105)
+        assert_equal(self.nodes[1].getbalance(), 0)
+        assert_equal(self.nodes[2].getbalance(), 0)
+
+        node0utxos = self.nodes[0].listunspent(1)
+        tx1 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99})
+        txid1 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransaction(tx1)["hex"])
+        tx2 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99})
+        txid2 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransaction(tx2)["hex"])
+        assert_raises(JSONRPCException, self.nodes[0].gettxoutproof, [txid1])
+
+        self.nodes[0].generate(1)
+        blockhash = self.nodes[0].getblockhash(chain_height + 1)
+        self.sync_all()
+
+        txlist = []
+        blocktxn = self.nodes[0].getblock(blockhash, True)["tx"]
+        txlist.append(blocktxn[1])
+        txlist.append(blocktxn[2])
+
+        assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1])), [txid1])
+        assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2])), txlist)
+        assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2], blockhash)), txlist)
+
+        txin_spent = self.nodes[1].listunspent(1).pop()
+        tx3 = self.nodes[1].createrawtransaction([txin_spent], {self.nodes[0].getnewaddress(): 49.98})
+        self.nodes[0].sendrawtransaction(self.nodes[1].signrawtransaction(tx3)["hex"])
+        self.nodes[0].generate(1)
+        self.sync_all()
+
+        txid_spent = txin_spent["txid"]
+        txid_unspent = txid1 if txin_spent["txid"] != txid1 else txid2
+
+        # We can't find the block from a fully-spent tx
+        assert_raises(JSONRPCException, self.nodes[2].gettxoutproof, [txid_spent])
+        # ...but we can if we specify the block
+        assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_spent], blockhash)), [txid_spent])
+        # ...or if the first tx is not fully-spent
+        assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_unspent])), [txid_unspent])
+        try:
+            assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2])), txlist)
+        except JSONRPCException:
+            assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid2, txid1])), txlist)
+        # ...or if we have a -txindex
+        assert_equal(self.nodes[2].verifytxoutproof(self.nodes[3].gettxoutproof([txid_spent])), [txid_spent])
+
+if __name__ == '__main__':
+    MerkleBlockTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/multi_rpc.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,117 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test multiple RPC users."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import str_to_b64str, assert_equal
+
+import os
+import http.client
+import urllib.parse
+
+class HTTPBasicsTest (BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = False
+        self.num_nodes = 1
+
+    def setup_chain(self):
+        super().setup_chain()
+        #Append rpcauth to bitcoin.conf before initialization
+        rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
+        rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
+        with open(os.path.join(self.options.tmpdir+"/node0", "bitcoin.conf"), 'a', encoding='utf8') as f:
+            f.write(rpcauth+"\n")
+            f.write(rpcauth2+"\n")
+
+    def setup_network(self):
+        self.nodes = self.setup_nodes()
+
+    def run_test(self):
+
+        ##################################################
+        # Check correctness of the rpcauth config option #
+        ##################################################
+        url = urllib.parse.urlparse(self.nodes[0].url)
+
+        #Old authpair
+        authpair = url.username + ':' + url.password
+
+        #New authpair generated via share/rpcuser tool
+        rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
+        password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
+
+        #Second authpair with different username
+        rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
+        password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
+        authpairnew = "rt:"+password
+
+        headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
+
+        conn = http.client.HTTPConnection(url.hostname, url.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        resp = conn.getresponse()
+        assert_equal(resp.status==401, False)
+        conn.close()
+        
+        #Use new authpair to confirm both work
+        headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
+
+        conn = http.client.HTTPConnection(url.hostname, url.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        resp = conn.getresponse()
+        assert_equal(resp.status==401, False)
+        conn.close()
+
+        #Wrong login name with rt's password
+        authpairnew = "rtwrong:"+password
+        headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
+
+        conn = http.client.HTTPConnection(url.hostname, url.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        resp = conn.getresponse()
+        assert_equal(resp.status==401, True)
+        conn.close()
+
+        #Wrong password for rt
+        authpairnew = "rt:"+password+"wrong"
+        headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
+
+        conn = http.client.HTTPConnection(url.hostname, url.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        resp = conn.getresponse()
+        assert_equal(resp.status==401, True)
+        conn.close()
+
+        #Correct for rt2
+        authpairnew = "rt2:"+password2
+        headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
+
+        conn = http.client.HTTPConnection(url.hostname, url.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        resp = conn.getresponse()
+        assert_equal(resp.status==401, False)
+        conn.close()
+
+        #Wrong password for rt2
+        authpairnew = "rt2:"+password2+"wrong"
+        headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
+
+        conn = http.client.HTTPConnection(url.hostname, url.port)
+        conn.connect()
+        conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+        resp = conn.getresponse()
+        assert_equal(resp.status==401, True)
+        conn.close()
+
+
+if __name__ == '__main__':
+    HTTPBasicsTest ().main ()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/nodehandling.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,80 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test node handling."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+import urllib.parse
+
+class NodeHandlingTest (BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 4
+        self.setup_clean_chain = False
+
+    def run_test(self):
+        ###########################
+        # setban/listbanned tests #
+        ###########################
+        assert_equal(len(self.nodes[2].getpeerinfo()), 4) #we should have 4 nodes at this point
+        self.nodes[2].setban("127.0.0.1", "add")
+        time.sleep(3) #wait till the nodes are disconected
+        assert_equal(len(self.nodes[2].getpeerinfo()), 0) #all nodes must be disconnected at this point
+        assert_equal(len(self.nodes[2].listbanned()), 1)
+        self.nodes[2].clearbanned()
+        assert_equal(len(self.nodes[2].listbanned()), 0)
+        self.nodes[2].setban("127.0.0.0/24", "add")
+        assert_equal(len(self.nodes[2].listbanned()), 1)
+        # This will throw an exception because 127.0.0.1 is within range 127.0.0.0/24
+        assert_raises_jsonrpc(-23, "IP/Subnet already banned", self.nodes[2].setban, "127.0.0.1", "add")
+        # This will throw an exception because 127.0.0.1/42 is not a real subnet
+        assert_raises_jsonrpc(-30, "Error: Invalid IP/Subnet", self.nodes[2].setban, "127.0.0.1/42", "add")
+        assert_equal(len(self.nodes[2].listbanned()), 1) #still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24
+        # This will throw an exception because 127.0.0.1 was not added above
+        assert_raises_jsonrpc(-30, "Error: Unban failed", self.nodes[2].setban, "127.0.0.1", "remove")
+        assert_equal(len(self.nodes[2].listbanned()), 1)
+        self.nodes[2].setban("127.0.0.0/24", "remove")
+        assert_equal(len(self.nodes[2].listbanned()), 0)
+        self.nodes[2].clearbanned()
+        assert_equal(len(self.nodes[2].listbanned()), 0)
+
+        ##test persisted banlist
+        self.nodes[2].setban("127.0.0.0/32", "add")
+        self.nodes[2].setban("127.0.0.0/24", "add")
+        self.nodes[2].setban("192.168.0.1", "add", 1) #ban for 1 seconds
+        self.nodes[2].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) #ban for 1000 seconds
+        listBeforeShutdown = self.nodes[2].listbanned()
+        assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address']) #must be here
+        time.sleep(2) #make 100% sure we expired 192.168.0.1 node time
+
+        #stop node
+        stop_node(self.nodes[2], 2)
+
+        self.nodes[2] = start_node(2, self.options.tmpdir)
+        listAfterShutdown = self.nodes[2].listbanned()
+        assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
+        assert_equal("127.0.0.0/32", listAfterShutdown[1]['address'])
+        assert_equal("/19" in listAfterShutdown[2]['address'], True)
+
+        ###########################
+        # RPC disconnectnode test #
+        ###########################
+        url = urllib.parse.urlparse(self.nodes[1].url)
+        self.nodes[0].disconnectnode(url.hostname+":"+str(p2p_port(1)))
+        time.sleep(2) #disconnecting a node needs a little bit of time
+        for node in self.nodes[0].getpeerinfo():
+            assert(node['addr'] != url.hostname+":"+str(p2p_port(1)))
+
+        connect_nodes_bi(self.nodes,0,1) #reconnect the node
+        found = False
+        for node in self.nodes[0].getpeerinfo():
+            if node['addr'] == url.hostname+":"+str(p2p_port(1)):
+                found = True
+        assert(found)
+
+if __name__ == '__main__':
+    NodeHandlingTest ().main ()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/nulldummy.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,136 @@
+#!/usr/bin/env python3
+# Copyright (c) 2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test NULLDUMMY softfork.
+
+Connect to a single node.
+Generate 2 blocks (save the coinbases for later).
+Generate 427 more blocks.
+[Policy/Consensus] Check that NULLDUMMY compliant transactions are accepted in the 430th block.
+[Policy] Check that non-NULLDUMMY transactions are rejected before activation.
+[Consensus] Check that the new NULLDUMMY rules are not enforced on the 431st block.
+[Policy/Consensus] Check that the new NULLDUMMY rules are enforced on the 432nd block.
+"""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.mininode import CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block, add_witness_commitment
+from test_framework.script import CScript
+from io import BytesIO
+import time
+
+NULLDUMMY_ERROR = "64: non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero)"
+
+def trueDummy(tx):
+    scriptSig = CScript(tx.vin[0].scriptSig)
+    newscript = []
+    for i in scriptSig:
+        if (len(newscript) == 0):
+            assert(len(i) == 0)
+            newscript.append(b'\x51')
+        else:
+            newscript.append(i)
+    tx.vin[0].scriptSig = CScript(newscript)
+    tx.rehash()
+
+class NULLDUMMYTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 1
+        self.setup_clean_chain = True
+
+    def setup_network(self):
+        # Must set the blockversion for this test
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir,
+                                 extra_args=[['-whitelist=127.0.0.1', '-walletprematurewitness']])
+
+    def run_test(self):
+        self.address = self.nodes[0].getnewaddress()
+        self.ms_address = self.nodes[0].addmultisigaddress(1,[self.address])
+        self.wit_address = self.nodes[0].addwitnessaddress(self.address)
+        self.wit_ms_address = self.nodes[0].addwitnessaddress(self.ms_address)
+
+        NetworkThread().start() # Start up network handling in another thread
+        self.coinbase_blocks = self.nodes[0].generate(2) # Block 2
+        coinbase_txid = []
+        for i in self.coinbase_blocks:
+            coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
+        self.nodes[0].generate(427) # Block 429
+        self.lastblockhash = self.nodes[0].getbestblockhash()
+        self.tip = int("0x" + self.lastblockhash, 0)
+        self.lastblockheight = 429
+        self.lastblocktime = int(time.time()) + 429
+
+        self.log.info("Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]")
+        test1txs = [self.create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, 49)]
+        txid1 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[0].serialize_with_witness()), True)
+        test1txs.append(self.create_transaction(self.nodes[0], txid1, self.ms_address, 48))
+        txid2 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[1].serialize_with_witness()), True)
+        test1txs.append(self.create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, 49))
+        txid3 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[2].serialize_with_witness()), True)
+        self.block_submit(self.nodes[0], test1txs, False, True)
+
+        self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation")
+        test2tx = self.create_transaction(self.nodes[0], txid2, self.ms_address, 47)
+        trueDummy(test2tx)
+        assert_raises_jsonrpc(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True)
+
+        self.log.info("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]")
+        self.block_submit(self.nodes[0], [test2tx], False, True)
+
+        self.log.info("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation")
+        test4tx = self.create_transaction(self.nodes[0], test2tx.hash, self.address, 46)
+        test6txs=[CTransaction(test4tx)]
+        trueDummy(test4tx)
+        assert_raises_jsonrpc(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True)
+        self.block_submit(self.nodes[0], [test4tx])
+
+        self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
+        test5tx = self.create_transaction(self.nodes[0], txid3, self.wit_address, 48)
+        test6txs.append(CTransaction(test5tx))
+        test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
+        assert_raises_jsonrpc(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True)
+        self.block_submit(self.nodes[0], [test5tx], True)
+
+        self.log.info("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]")
+        for i in test6txs:
+            self.nodes[0].sendrawtransaction(bytes_to_hex_str(i.serialize_with_witness()), True)
+        self.block_submit(self.nodes[0], test6txs, True, True)
+
+
+    def create_transaction(self, node, txid, to_address, amount):
+        inputs = [{ "txid" : txid, "vout" : 0}]
+        outputs = { to_address : amount }
+        rawtx = node.createrawtransaction(inputs, outputs)
+        signresult = node.signrawtransaction(rawtx)
+        tx = CTransaction()
+        f = BytesIO(hex_str_to_bytes(signresult['hex']))
+        tx.deserialize(f)
+        return tx
+
+
+    def block_submit(self, node, txs, witness = False, accept = False):
+        block = create_block(self.tip, create_coinbase(self.lastblockheight + 1), self.lastblocktime + 1)
+        block.nVersion = 4
+        for tx in txs:
+            tx.rehash()
+            block.vtx.append(tx)
+        block.hashMerkleRoot = block.calc_merkle_root()
+        witness and add_witness_commitment(block)
+        block.rehash()
+        block.solve()
+        node.submitblock(bytes_to_hex_str(block.serialize(True)))
+        if (accept):
+            assert_equal(node.getbestblockhash(), block.hash)
+            self.tip = block.sha256
+            self.lastblockhash = block.hash
+            self.lastblocktime += 1
+            self.lastblockheight += 1
+        else:
+            assert_equal(node.getbestblockhash(), self.lastblockhash)
+
+if __name__ == '__main__':
+    NULLDUMMYTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/p2p-acceptblock.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,277 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test processing of unrequested blocks.
+
+Since behavior differs when receiving unrequested blocks from whitelisted peers
+versus non-whitelisted peers, this tests the behavior of both (effectively two
+separate tests running in parallel).
+
+Setup: two nodes, node0 and node1, not connected to each other.  Node0 does not
+whitelist localhost, but node1 does. They will each be on their own chain for
+this test.
+
+We have one NodeConn connection to each, test_node and white_node respectively.
+
+The test:
+1. Generate one block on each node, to leave IBD.
+
+2. Mine a new block on each tip, and deliver to each node from node's peer.
+   The tip should advance.
+
+3. Mine a block that forks the previous block, and deliver to each node from
+   corresponding peer.
+   Node0 should not process this block (just accept the header), because it is
+   unrequested and doesn't have more work than the tip.
+   Node1 should process because this is coming from a whitelisted peer.
+
+4. Send another block that builds on the forking block.
+   Node0 should process this block but be stuck on the shorter chain, because
+   it's missing an intermediate block.
+   Node1 should reorg to this longer chain.
+
+4b.Send 288 more blocks on the longer chain.
+   Node0 should process all but the last block (too far ahead in height).
+   Send all headers to Node1, and then send the last block in that chain.
+   Node1 should accept the block because it's coming from a whitelisted peer.
+
+5. Send a duplicate of the block in #3 to Node0.
+   Node0 should not process the block because it is unrequested, and stay on
+   the shorter chain.
+
+6. Send Node0 an inv for the height 3 block produced in #4 above.
+   Node0 should figure out that Node0 has the missing height 2 block and send a
+   getdata.
+
+7. Send Node0 the missing block again.
+   Node0 should process and the tip should advance.
+"""
+
+from test_framework.mininode import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import time
+from test_framework.blocktools import create_block, create_coinbase
+
+# TestNode: bare-bones "peer".  Used mostly as a conduit for a test to sending
+# p2p messages to a node, generating the messages in the main testing logic.
+class TestNode(NodeConnCB):
+    def __init__(self):
+        NodeConnCB.__init__(self)
+        self.connection = None
+        self.ping_counter = 1
+        self.last_pong = msg_pong()
+
+    def add_connection(self, conn):
+        self.connection = conn
+
+    # Track the last getdata message we receive (used in the test)
+    def on_getdata(self, conn, message):
+        self.last_getdata = message
+
+    # Spin until verack message is received from the node.
+    # We use this to signal that our test can begin. This
+    # is called from the testing thread, so it needs to acquire
+    # the global lock.
+    def wait_for_verack(self):
+        while True:
+            with mininode_lock:
+                if self.verack_received:
+                    return
+            time.sleep(0.05)
+
+    # Wrapper for the NodeConn's send_message function
+    def send_message(self, message):
+        self.connection.send_message(message)
+
+    def on_pong(self, conn, message):
+        self.last_pong = message
+
+    # Sync up with the node after delivery of a block
+    def sync_with_ping(self, timeout=30):
+        self.connection.send_message(msg_ping(nonce=self.ping_counter))
+        received_pong = False
+        sleep_time = 0.05
+        while not received_pong and timeout > 0:
+            time.sleep(sleep_time)
+            timeout -= sleep_time
+            with mininode_lock:
+                if self.last_pong.nonce == self.ping_counter:
+                    received_pong = True
+        self.ping_counter += 1
+        return received_pong
+
+
+class AcceptBlockTest(BitcoinTestFramework):
+    def add_options(self, parser):
+        parser.add_option("--testbinary", dest="testbinary",
+                          default=os.getenv("BITCOIND", "bitcoind"),
+                          help="bitcoind binary to test")
+
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        self.num_nodes = 2
+
+    def setup_network(self):
+        # Node0 will be used to test behavior of processing unrequested blocks
+        # from peers which are not whitelisted, while Node1 will be used for
+        # the whitelisted case.
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir,
+                                     binary=self.options.testbinary))
+        self.nodes.append(start_node(1, self.options.tmpdir,
+                                     ["-whitelist=127.0.0.1"],
+                                     binary=self.options.testbinary))
+
+    def run_test(self):
+        # Setup the p2p connections and start up the network thread.
+        test_node = TestNode()   # connects to node0 (not whitelisted)
+        white_node = TestNode()  # connects to node1 (whitelisted)
+
+        connections = []
+        connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
+        connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], white_node))
+        test_node.add_connection(connections[0])
+        white_node.add_connection(connections[1])
+
+        NetworkThread().start() # Start up network handling in another thread
+
+        # Test logic begins here
+        test_node.wait_for_verack()
+        white_node.wait_for_verack()
+
+        # 1. Have both nodes mine a block (leave IBD)
+        [ n.generate(1) for n in self.nodes ]
+        tips = [ int("0x" + n.getbestblockhash(), 0) for n in self.nodes ]
+
+        # 2. Send one block that builds on each tip.
+        # This should be accepted.
+        blocks_h2 = []  # the height 2 blocks on each node's chain
+        block_time = int(time.time()) + 1
+        for i in range(2):
+            blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time))
+            blocks_h2[i].solve()
+            block_time += 1
+        test_node.send_message(msg_block(blocks_h2[0]))
+        white_node.send_message(msg_block(blocks_h2[1]))
+
+        [ x.sync_with_ping() for x in [test_node, white_node] ]
+        assert_equal(self.nodes[0].getblockcount(), 2)
+        assert_equal(self.nodes[1].getblockcount(), 2)
+        self.log.info("First height 2 block accepted by both nodes")
+
+        # 3. Send another block that builds on the original tip.
+        blocks_h2f = []  # Blocks at height 2 that fork off the main chain
+        for i in range(2):
+            blocks_h2f.append(create_block(tips[i], create_coinbase(2), blocks_h2[i].nTime+1))
+            blocks_h2f[i].solve()
+        test_node.send_message(msg_block(blocks_h2f[0]))
+        white_node.send_message(msg_block(blocks_h2f[1]))
+
+        [ x.sync_with_ping() for x in [test_node, white_node] ]
+        for x in self.nodes[0].getchaintips():
+            if x['hash'] == blocks_h2f[0].hash:
+                assert_equal(x['status'], "headers-only")
+
+        for x in self.nodes[1].getchaintips():
+            if x['hash'] == blocks_h2f[1].hash:
+                assert_equal(x['status'], "valid-headers")
+
+        self.log.info("Second height 2 block accepted only from whitelisted peer")
+
+        # 4. Now send another block that builds on the forking chain.
+        blocks_h3 = []
+        for i in range(2):
+            blocks_h3.append(create_block(blocks_h2f[i].sha256, create_coinbase(3), blocks_h2f[i].nTime+1))
+            blocks_h3[i].solve()
+        test_node.send_message(msg_block(blocks_h3[0]))
+        white_node.send_message(msg_block(blocks_h3[1]))
+
+        [ x.sync_with_ping() for x in [test_node, white_node] ]
+        # Since the earlier block was not processed by node0, the new block
+        # can't be fully validated.
+        for x in self.nodes[0].getchaintips():
+            if x['hash'] == blocks_h3[0].hash:
+                assert_equal(x['status'], "headers-only")
+
+        # But this block should be accepted by node0 since it has more work.
+        self.nodes[0].getblock(blocks_h3[0].hash)
+        self.log.info("Unrequested more-work block accepted from non-whitelisted peer")
+
+        # Node1 should have accepted and reorged.
+        assert_equal(self.nodes[1].getblockcount(), 3)
+        self.log.info("Successfully reorged to length 3 chain from whitelisted peer")
+
+        # 4b. Now mine 288 more blocks and deliver; all should be processed but
+        # the last (height-too-high) on node0.  Node1 should process the tip if
+        # we give it the headers chain leading to the tip.
+        tips = blocks_h3
+        headers_message = msg_headers()
+        all_blocks = []   # node0's blocks
+        for j in range(2):
+            for i in range(288):
+                next_block = create_block(tips[j].sha256, create_coinbase(i + 4), tips[j].nTime+1)
+                next_block.solve()
+                if j==0:
+                    test_node.send_message(msg_block(next_block))
+                    all_blocks.append(next_block)
+                else:
+                    headers_message.headers.append(CBlockHeader(next_block))
+                tips[j] = next_block
+
+        time.sleep(2)
+        # Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead
+        for x in all_blocks[:-1]:
+            self.nodes[0].getblock(x.hash)
+        assert_raises_jsonrpc(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash)
+
+        headers_message.headers.pop() # Ensure the last block is unrequested
+        white_node.send_message(headers_message) # Send headers leading to tip
+        white_node.send_message(msg_block(tips[1]))  # Now deliver the tip
+        white_node.sync_with_ping()
+        self.nodes[1].getblock(tips[1].hash)
+        self.log.info("Unrequested block far ahead of tip accepted from whitelisted peer")
+
+        # 5. Test handling of unrequested block on the node that didn't process
+        # Should still not be processed (even though it has a child that has more
+        # work).
+        test_node.send_message(msg_block(blocks_h2f[0]))
+
+        # Here, if the sleep is too short, the test could falsely succeed (if the
+        # node hasn't processed the block by the time the sleep returns, and then
+        # the node processes it and incorrectly advances the tip).
+        # But this would be caught later on, when we verify that an inv triggers
+        # a getdata request for this block.
+        test_node.sync_with_ping()
+        assert_equal(self.nodes[0].getblockcount(), 2)
+        self.log.info("Unrequested block that would complete more-work chain was ignored")
+
+        # 6. Try to get node to request the missing block.
+        # Poke the node with an inv for block at height 3 and see if that
+        # triggers a getdata on block 2 (it should if block 2 is missing).
+        with mininode_lock:
+            # Clear state so we can check the getdata request
+            test_node.last_getdata = None
+            test_node.send_message(msg_inv([CInv(2, blocks_h3[0].sha256)]))
+
+        test_node.sync_with_ping()
+        with mininode_lock:
+            getdata = test_node.last_getdata
+
+        # Check that the getdata includes the right block
+        assert_equal(getdata.inv[0].hash, blocks_h2f[0].sha256)
+        self.log.info("Inv at tip triggered getdata for unprocessed block")
+
+        # 7. Send the missing block for the third time (now it is requested)
+        test_node.send_message(msg_block(blocks_h2f[0]))
+
+        test_node.sync_with_ping()
+        assert_equal(self.nodes[0].getblockcount(), 290)
+        self.log.info("Successfully reorged to longer chain from non-whitelisted peer")
+
+        [ c.disconnect_node() for c in connections ]
+
+if __name__ == '__main__':
+    AcceptBlockTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/p2p-compactblocks.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,968 @@
+#!/usr/bin/env python3
+# Copyright (c) 2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test compact blocks (BIP 152).
+
+Version 1 compact blocks are pre-segwit (txids)
+Version 2 compact blocks are post-segwit (wtxids)
+"""
+
+from test_framework.mininode import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment
+from test_framework.script import CScript, OP_TRUE
+
+# TestNode: A peer we use to send messages to bitcoind, and store responses.
+class TestNode(SingleNodeConnCB):
+    def __init__(self):
+        SingleNodeConnCB.__init__(self)
+        self.last_sendcmpct = []
+        self.last_headers = None
+        self.last_inv = None
+        self.last_cmpctblock = None
+        self.block_announced = False
+        self.last_getdata = None
+        self.last_getheaders = None
+        self.last_getblocktxn = None
+        self.last_block = None
+        self.last_blocktxn = None
+        # Store the hashes of blocks we've seen announced.
+        # This is for synchronizing the p2p message traffic,
+        # so we can eg wait until a particular block is announced.
+        self.set_announced_blockhashes = set()
+
+    def on_sendcmpct(self, conn, message):
+        self.last_sendcmpct.append(message)
+
+    def on_block(self, conn, message):
+        self.last_block = message
+
+    def on_cmpctblock(self, conn, message):
+        self.last_cmpctblock = message
+        self.block_announced = True
+        self.last_cmpctblock.header_and_shortids.header.calc_sha256()
+        self.set_announced_blockhashes.add(self.last_cmpctblock.header_and_shortids.header.sha256)
+
+    def on_headers(self, conn, message):
+        self.last_headers = message
+        self.block_announced = True
+        for x in self.last_headers.headers:
+            x.calc_sha256()
+            self.set_announced_blockhashes.add(x.sha256)
+
+    def on_inv(self, conn, message):
+        self.last_inv = message
+        for x in self.last_inv.inv:
+            if x.type == 2:
+                self.block_announced = True
+                self.set_announced_blockhashes.add(x.hash)
+
+    def on_getdata(self, conn, message):
+        self.last_getdata = message
+
+    def on_getheaders(self, conn, message):
+        self.last_getheaders = message
+
+    def on_getblocktxn(self, conn, message):
+        self.last_getblocktxn = message
+
+    def on_blocktxn(self, conn, message):
+        self.last_blocktxn = message
+
+    # Requires caller to hold mininode_lock
+    def received_block_announcement(self):
+        return self.block_announced
+
+    def clear_block_announcement(self):
+        with mininode_lock:
+            self.block_announced = False
+            self.last_inv = None
+            self.last_headers = None
+            self.last_cmpctblock = None
+
+    def get_headers(self, locator, hashstop):
+        msg = msg_getheaders()
+        msg.locator.vHave = locator
+        msg.hashstop = hashstop
+        self.connection.send_message(msg)
+
+    def send_header_for_blocks(self, new_blocks):
+        headers_message = msg_headers()
+        headers_message.headers = [CBlockHeader(b) for b in new_blocks]
+        self.send_message(headers_message)
+
+    def request_headers_and_sync(self, locator, hashstop=0):
+        self.clear_block_announcement()
+        self.get_headers(locator, hashstop)
+        assert(wait_until(self.received_block_announcement, timeout=30))
+        assert(self.received_block_announcement())
+        self.clear_block_announcement()
+
+    # Block until a block announcement for a particular block hash is
+    # received.
+    def wait_for_block_announcement(self, block_hash, timeout=30):
+        def received_hash():
+            return (block_hash in self.set_announced_blockhashes)
+        return wait_until(received_hash, timeout=timeout)
+
+class CompactBlocksTest(BitcoinTestFramework):
+    def __init__(self):
+        super().__init__()
+        self.setup_clean_chain = True
+        # Node0 = pre-segwit, node1 = segwit-aware
+        self.num_nodes = 2
+        self.utxos = []
+
+    def setup_network(self):
+        self.nodes = []
+
+        # Start up node0 to be a version 1, pre-segwit node.
+        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, 
+                [["-bip9params=segwit:0:0"], 
+                 ["-txindex"]])
+        connect_nodes(self.nodes[0], 1)
+
+    def build_block_on_tip(self, node, segwit=False):
+        height = node.getblockcount()
+        tip = node.getbestblockhash()
+        mtp = node.getblockheader(tip)['mediantime']
+        block = create_block(int(tip, 16), create_coinbase(height + 1), mtp + 1)
+        block.nVersion = 4
+        if segwit:
+            add_witness_commitment(block)
+        block.solve()
+        return block
+
+    # Create 10 more anyone-can-spend utxo's for testing.
+    def make_utxos(self):
+        # Doesn't matter which node we use, just use node0.
+        block = self.build_block_on_tip(self.nodes[0])
+        self.test_node.send_and_ping(msg_block(block))
+        assert(int(self.nodes[0].getbestblockhash(), 16) == block.sha256)
+        self.nodes[0].generate(100)
+
+        total_value = block.vtx[0].vout[0].nValue
+        out_value = total_value // 10
+        tx = CTransaction()
+        tx.vin.append(CTxIn(COutPoint(block.vtx[0].sha256, 0), b''))
+        for i in range(10):
+            tx.vout.append(CTxOut(out_value, CScript([OP_TRUE])))
+        tx.rehash()
+
+        block2 = self.build_block_on_tip(self.nodes[0])
+        block2.vtx.append(tx)
+        block2.hashMerkleRoot = block2.calc_merkle_root()
+        block2.solve()
+        self.test_node.send_and_ping(msg_block(block2))
+        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block2.sha256)
+        self.utxos.extend([[tx.sha256, i, out_value] for i in range(10)])
+        return
+
+    # Test "sendcmpct" (between peers preferring the same version):
+    # - No compact block announcements unless sendcmpct is sent.
+    # - If sendcmpct is sent with version > preferred_version, the message is ignored.
+    # - If sendcmpct is sent with boolean 0, then block announcements are not
+    #   made with compact blocks.
+    # - If sendcmpct is then sent with boolean 1, then new block announcements
+    #   are made with compact blocks.
+    # If old_node is passed in, request compact blocks with version=preferred-1
+    # and verify that it receives block announcements via compact block.
+    def test_sendcmpct(self, node, test_node, preferred_version, old_node=None):
+        # Make sure we get a SENDCMPCT message from our peer
+        def received_sendcmpct():
+            return (len(test_node.last_sendcmpct) > 0)
+        got_message = wait_until(received_sendcmpct, timeout=30)
+        assert(received_sendcmpct())
+        assert(got_message)
+        with mininode_lock:
+            # Check that the first version received is the preferred one
+            assert_equal(test_node.last_sendcmpct[0].version, preferred_version)
+            # And that we receive versions down to 1.
+            assert_equal(test_node.last_sendcmpct[-1].version, 1)
+            test_node.last_sendcmpct = []
+
+        tip = int(node.getbestblockhash(), 16)
+
+        def check_announcement_of_new_block(node, peer, predicate):
+            peer.clear_block_announcement()
+            block_hash = int(node.generate(1)[0], 16)
+            peer.wait_for_block_announcement(block_hash, timeout=30)
+            assert(peer.block_announced)
+            assert(got_message)
+
+            with mininode_lock:
+                assert predicate(peer), (
+                    "block_hash={!r}, cmpctblock={!r}, inv={!r}".format(
+                        block_hash, peer.last_cmpctblock, peer.last_inv))
+
+        # We shouldn't get any block announcements via cmpctblock yet.
+        check_announcement_of_new_block(node, test_node, lambda p: p.last_cmpctblock is None)
+
+        # Try one more time, this time after requesting headers.
+        test_node.request_headers_and_sync(locator=[tip])
+        check_announcement_of_new_block(node, test_node, lambda p: p.last_cmpctblock is None and p.last_inv is not None)
+
+        # Test a few ways of using sendcmpct that should NOT
+        # result in compact block announcements.
+        # Before each test, sync the headers chain.
+        test_node.request_headers_and_sync(locator=[tip])
+
+        # Now try a SENDCMPCT message with too-high version
+        sendcmpct = msg_sendcmpct()
+        sendcmpct.version = preferred_version+1
+        sendcmpct.announce = True
+        test_node.send_and_ping(sendcmpct)
+        check_announcement_of_new_block(node, test_node, lambda p: p.last_cmpctblock is None)
+
+        # Headers sync before next test.
+        test_node.request_headers_and_sync(locator=[tip])
+
+        # Now try a SENDCMPCT message with valid version, but announce=False
+        sendcmpct.version = preferred_version
+        sendcmpct.announce = False
+        test_node.send_and_ping(sendcmpct)
+        check_announcement_of_new_block(node, test_node, lambda p: p.last_cmpctblock is None)
+
+        # Headers sync before next test.
+        test_node.request_headers_and_sync(locator=[tip])
+
+        # Finally, try a SENDCMPCT message with announce=True
+        sendcmpct.version = preferred_version
+        sendcmpct.announce = True
+        test_node.send_and_ping(sendcmpct)
+        check_announcement_of_new_block(node, test_node, lambda p: p.last_cmpctblock is not None)
+
+        # Try one more time (no headers sync should be needed!)
+        check_announcement_of_new_block(node, test_node, lambda p: p.last_cmpctblock is not None)
+
+        # Try one more time, after turning on sendheaders
+        test_node.send_and_ping(msg_sendheaders())
+        check_announcement_of_new_block(node, test_node, lambda p: p.last_cmpctblock is not None)
+
+        # Try one more time, after sending a version-1, announce=false message.
+        sendcmpct.version = preferred_version-1
+        sendcmpct.announce = False
+        test_node.send_and_ping(sendcmpct)
+        check_announcement_of_new_block(node, test_node, lambda p: p.last_cmpctblock is not None)
+
+        # Now turn off announcements
+        sendcmpct.version = preferred_version
+        sendcmpct.announce = False
+        test_node.send_and_ping(sendcmpct)
+        check_announcement_of_new_block(node, test_node, lambda p: p.last_cmpctblock is None and p.last_headers is not None)
+
+        if old_node is not None:
+            # Verify that a peer using an older protocol version can receive
+            # announcements from this node.
+            sendcmpct.version = preferred_version-1
+            sendcmpct.announce = True
+            old_node.send_and_ping(sendcmpct)
+            # Header sync
+            old_node.request_headers_and_sync(locator=[tip])
+            check_announcement_of_new_block(node, old_node, lambda p: p.last_cmpctblock is not None)
+
+    # This test actually causes bitcoind to (reasonably!) disconnect us, so do this last.
+    def test_invalid_cmpctblock_message(self):
+        self.nodes[0].generate(101)
+        block = self.build_block_on_tip(self.nodes[0])
+
+        cmpct_block = P2PHeaderAndShortIDs()
+        cmpct_block.header = CBlockHeader(block)
+        cmpct_block.prefilled_txn_length = 1
+        # This index will be too high
+        prefilled_txn = PrefilledTransaction(1, block.vtx[0])
+        cmpct_block.prefilled_txn = [prefilled_txn]
+        self.test_node.send_and_ping(msg_cmpctblock(cmpct_block))
+        assert(int(self.nodes[0].getbestblockhash(), 16) == block.hashPrevBlock)
+
+    # Compare the generated shortids to what we expect based on BIP 152, given
+    # bitcoind's choice of nonce.
+    def test_compactblock_construction(self, node, test_node, version, use_witness_address):
+        # Generate a bunch of transactions.
+        node.generate(101)
+        num_transactions = 25
+        address = node.getnewaddress()
+        if use_witness_address:
+            # Want at least one segwit spend, so move all funds to
+            # a witness address.
+            address = node.addwitnessaddress(address)
+            value_to_send = node.getbalance()
+            node.sendtoaddress(address, satoshi_round(value_to_send-Decimal(0.1)))
+            node.generate(1)
+
+        segwit_tx_generated = False
+        for i in range(num_transactions):
+            txid = node.sendtoaddress(address, 0.1)
+            hex_tx = node.gettransaction(txid)["hex"]
+            tx = FromHex(CTransaction(), hex_tx)
+            if not tx.wit.is_null():
+                segwit_tx_generated = True
+
+        if use_witness_address:
+            assert(segwit_tx_generated) # check that our test is not broken
+
+        # Wait until we've seen the block announcement for the resulting tip
+        tip = int(node.getbestblockhash(), 16)
+        assert(test_node.wait_for_block_announcement(tip))
+
+        # Make sure we will receive a fast-announce compact block
+        self.request_cb_announcements(test_node, node, version)
+
+        # Now mine a block, and look at the resulting compact block.
+        test_node.clear_block_announcement()
+        block_hash = int(node.generate(1)[0], 16)
+
+        # Store the raw block in our internal format.
+        block = FromHex(CBlock(), node.getblock("%02x" % block_hash, False))
+        [tx.calc_sha256() for tx in block.vtx]
+        block.rehash()
+
+        # Wait until the block was announced (via compact blocks)
+        wait_until(test_node.received_block_announcement, timeout=30)
+        assert(test_node.received_block_announcement())
+
+        # Now fetch and check the compact block
+        header_and_shortids = None
+        with mininode_lock:
+            assert(test_node.last_cmpctblock is not None)
+            # Convert the on-the-wire representation to absolute indexes
+            header_and_shortids = HeaderAndShortIDs(test_node.last_cmpctblock.header_and_shortids)
+        self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block)
+
+        # Now fetch the compact block using a normal non-announce getdata
+        with mininode_lock:
+            test_node.clear_block_announcement()
+            inv = CInv(4, block_hash)  # 4 == "CompactBlock"
+            test_node.send_message(msg_getdata([inv]))
+
+        wait_until(test_node.received_block_announcement, timeout=30)
+        assert(test_node.received_block_announcement())
+
+        # Now fetch and check the compact block
+        header_and_shortids = None
+        with mininode_lock:
+            assert(test_node.last_cmpctblock is not None)
+            # Convert the on-the-wire representation to absolute indexes
+            header_and_shortids = HeaderAndShortIDs(test_node.last_cmpctblock.header_and_shortids)
+        self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block)
+
+    def check_compactblock_construction_from_block(self, version, header_and_shortids, block_hash, block):
+        # Check that we got the right block!
+        header_and_shortids.header.calc_sha256()
+        assert_equal(header_and_shortids.header.sha256, block_hash)
+
+        # Make sure the prefilled_txn appears to have included the coinbase
+        assert(len(header_and_shortids.prefilled_txn) >= 1)
+        assert_equal(header_and_shortids.prefilled_txn[0].index, 0)
+
+        # Check that all prefilled_txn entries match what's in the block.
+        for entry in header_and_shortids.prefilled_txn:
+            entry.tx.calc_sha256()
+            # This checks the non-witness parts of the tx agree
+            assert_equal(entry.tx.sha256, block.vtx[entry.index].sha256)
+
+            # And this checks the witness
+            wtxid = entry.tx.calc_sha256(True)
+            if version == 2:
+                assert_equal(wtxid, block.vtx[entry.index].calc_sha256(True))
+            else:
+                # Shouldn't have received a witness
+                assert(entry.tx.wit.is_null())
+
+        # Check that the cmpctblock message announced all the transactions.
+        assert_equal(len(header_and_shortids.prefilled_txn) + len(header_and_shortids.shortids), len(block.vtx))
+
+        # And now check that all the shortids are as expected as well.
+        # Determine the siphash keys to use.
+        [k0, k1] = header_and_shortids.get_siphash_keys()
+
+        index = 0
+        while index < len(block.vtx):
+            if (len(header_and_shortids.prefilled_txn) > 0 and
+                    header_and_shortids.prefilled_txn[0].index == index):
+                # Already checked prefilled transactions above
+                header_and_shortids.prefilled_txn.pop(0)
+            else:
+                tx_hash = block.vtx[index].sha256
+                if version == 2:
+                    tx_hash = block.vtx[index].calc_sha256(True)
+                shortid = calculate_shortid(k0, k1, tx_hash)
+                assert_equal(shortid, header_and_shortids.shortids[0])
+                header_and_shortids.shortids.pop(0)
+            index += 1
+
+    # Test that bitcoind requests compact blocks when we announce new blocks
+    # via header or inv, and that responding to getblocktxn causes the block
+    # to be successfully reconstructed.
+    # Post-segwit: upgraded nodes would only make this request of cb-version-2,
+    # NODE_WITNESS peers.  Unupgraded nodes would still make this request of
+    # any cb-version-1-supporting peer.
+    def test_compactblock_requests(self, node, test_node, version, segwit):
+        # Try announcing a block with an inv or header, expect a compactblock
+        # request
+        for announce in ["inv", "header"]:
+            block = self.build_block_on_tip(node, segwit=segwit)
+            with mininode_lock:
+                test_node.last_getdata = None
+
+            if announce == "inv":
+                test_node.send_message(msg_inv([CInv(2, block.sha256)]))
+                success = wait_until(lambda: test_node.last_getheaders is not None, timeout=30)
+                assert(success)
+                test_node.send_header_for_blocks([block])
+            else:
+                test_node.send_header_for_blocks([block])
+            success = wait_until(lambda: test_node.last_getdata is not None, timeout=30)
+            assert(success)
+            assert_equal(len(test_node.last_getdata.inv), 1)
+            assert_equal(test_node.last_getdata.inv[0].type, 4)
+            assert_equal(test_node.last_getdata.inv[0].hash, block.sha256)
+
+            # Send back a compactblock message that omits the coinbase
+            comp_block = HeaderAndShortIDs()
+            comp_block.header = CBlockHeader(block)
+            comp_block.nonce = 0
+            [k0, k1] = comp_block.get_siphash_keys()
+            coinbase_hash = block.vtx[0].sha256
+            if version == 2:
+                coinbase_hash = block.vtx[0].calc_sha256(True)
+            comp_block.shortids = [
+                    calculate_shortid(k0, k1, coinbase_hash) ]
+            test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
+            assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)
+            # Expect a getblocktxn message.
+            with mininode_lock:
+                assert(test_node.last_getblocktxn is not None)
+                absolute_indexes = test_node.last_getblocktxn.block_txn_request.to_absolute()
+            assert_equal(absolute_indexes, [0])  # should be a coinbase request
+
+            # Send the coinbase, and verify that the tip advances.
+            if version == 2:
+                msg = msg_witness_blocktxn()
+            else:
+                msg = msg_blocktxn()
+            msg.block_transactions.blockhash = block.sha256
+            msg.block_transactions.transactions = [block.vtx[0]]
+            test_node.send_and_ping(msg)
+            assert_equal(int(node.getbestblockhash(), 16), block.sha256)
+
+    # Create a chain of transactions from given utxo, and add to a new block.
+    def build_block_with_transactions(self, node, utxo, num_transactions):
+        block = self.build_block_on_tip(node)
+
+        for i in range(num_transactions):
+            tx = CTransaction()
+            tx.vin.append(CTxIn(COutPoint(utxo[0], utxo[1]), b''))
+            tx.vout.append(CTxOut(utxo[2] - 1000, CScript([OP_TRUE])))
+            tx.rehash()
+            utxo = [tx.sha256, 0, tx.vout[0].nValue]
+            block.vtx.append(tx)
+
+        block.hashMerkleRoot = block.calc_merkle_root()
+        block.solve()
+        return block
+
+    # Test that we only receive getblocktxn requests for transactions that the
+    # node needs, and that responding to them causes the block to be
+    # reconstructed.
+    def test_getblocktxn_requests(self, node, test_node, version):
+        with_witness = (version==2)
+
+        def test_getblocktxn_response(compact_block, peer, expected_result):
+            msg = msg_cmpctblock(compact_block.to_p2p())
+            peer.send_and_ping(msg)
+            with mininode_lock:
+                assert(peer.last_getblocktxn is not None)
+                absolute_indexes = peer.last_getblocktxn.block_txn_request.to_absolute()
+            assert_equal(absolute_indexes, expected_result)
+
+        def test_tip_after_message(node, peer, msg, tip):
+            peer.send_and_ping(msg)
+            assert_equal(int(node.getbestblockhash(), 16), tip)
+
+        # First try announcing compactblocks that won't reconstruct, and verify
+        # that we receive getblocktxn messages back.
+        utxo = self.utxos.pop(0)
+
+        block = self.build_block_with_transactions(node, utxo, 5)
+        self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
+        comp_block = HeaderAndShortIDs()
+        comp_block.initialize_from_block(block, use_witness=with_witness)
+
+        test_getblocktxn_response(comp_block, test_node, [1, 2, 3, 4, 5])
+
+        msg_bt = msg_blocktxn()
+        if with_witness:
+            msg_bt = msg_witness_blocktxn() # serialize with witnesses
+        msg_bt.block_transactions = BlockTransactions(block.sha256, block.vtx[1:])
+        test_tip_after_message(node, test_node, msg_bt, block.sha256)
+
+        utxo = self.utxos.pop(0)
+        block = self.build_block_with_transactions(node, utxo, 5)
+        self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
+
+        # Now try interspersing the prefilled transactions
+        comp_block.initialize_from_block(block, prefill_list=[0, 1, 5], use_witness=with_witness)
+        test_getblocktxn_response(comp_block, test_node, [2, 3, 4])
+        msg_bt.block_transactions = BlockTransactions(block.sha256, block.vtx[2:5])
+        test_tip_after_message(node, test_node, msg_bt, block.sha256)
+
+        # Now try giving one transaction ahead of time.
+        utxo = self.utxos.pop(0)
+        block = self.build_block_with_transactions(node, utxo, 5)
+        self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
+        test_node.send_and_ping(msg_tx(block.vtx[1]))
+        assert(block.vtx[1].hash in node.getrawmempool())
+
+        # Prefill 4 out of the 6 transactions, and verify that only the one
+        # that was not in the mempool is requested.
+        comp_block.initialize_from_block(block, prefill_list=[0, 2, 3, 4], use_witness=with_witness)
+        test_getblocktxn_response(comp_block, test_node, [5])
+
+        msg_bt.block_transactions = BlockTransactions(block.sha256, [block.vtx[5]])
+        test_tip_after_message(node, test_node, msg_bt, block.sha256)
+
+        # Now provide all transactions to the node before the block is
+        # announced and verify reconstruction happens immediately.
+        utxo = self.utxos.pop(0)
+        block = self.build_block_with_transactions(node, utxo, 10)
+        self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
+        for tx in block.vtx[1:]:
+            test_node.send_message(msg_tx(tx))
+        test_node.sync_with_ping()
+        # Make sure all transactions were accepted.
+        mempool = node.getrawmempool()
+        for tx in block.vtx[1:]:
+            assert(tx.hash in mempool)
+
+        # Clear out last request.
+        with mininode_lock:
+            test_node.last_getblocktxn = None
+
+        # Send compact block
+        comp_block.initialize_from_block(block, prefill_list=[0], use_witness=with_witness)
+        test_tip_after_message(node, test_node, msg_cmpctblock(comp_block.to_p2p()), block.sha256)
+        with mininode_lock:
+            # Shouldn't have gotten a request for any transaction
+            assert(test_node.last_getblocktxn is None)
+
+    # Incorrectly responding to a getblocktxn shouldn't cause the block to be
+    # permanently failed.
+    def test_incorrect_blocktxn_response(self, node, test_node, version):
+        if (len(self.utxos) == 0):
+            self.make_utxos()
+        utxo = self.utxos.pop(0)
+
+        block = self.build_block_with_transactions(node, utxo, 10)
+        self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
+        # Relay the first 5 transactions from the block in advance
+        for tx in block.vtx[1:6]:
+            test_node.send_message(msg_tx(tx))
+        test_node.sync_with_ping()
+        # Make sure all transactions were accepted.
+        mempool = node.getrawmempool()
+        for tx in block.vtx[1:6]:
+            assert(tx.hash in mempool)
+
+        # Send compact block
+        comp_block = HeaderAndShortIDs()
+        comp_block.initialize_from_block(block, prefill_list=[0], use_witness=(version == 2))
+        test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
+        absolute_indexes = []
+        with mininode_lock:
+            assert(test_node.last_getblocktxn is not None)
+            absolute_indexes = test_node.last_getblocktxn.block_txn_request.to_absolute()
+        assert_equal(absolute_indexes, [6, 7, 8, 9, 10])
+
+        # Now give an incorrect response.
+        # Note that it's possible for bitcoind to be smart enough to know we're
+        # lying, since it could check to see if the shortid matches what we're
+        # sending, and eg disconnect us for misbehavior.  If that behavior
+        # change were made, we could just modify this test by having a
+        # different peer provide the block further down, so that we're still
+        # verifying that the block isn't marked bad permanently. This is good
+        # enough for now.
+        msg = msg_blocktxn()
+        if version==2:
+            msg = msg_witness_blocktxn()
+        msg.block_transactions = BlockTransactions(block.sha256, [block.vtx[5]] + block.vtx[7:])
+        test_node.send_and_ping(msg)
+
+        # Tip should not have updated
+        assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)
+
+        # We should receive a getdata request
+        success = wait_until(lambda: test_node.last_getdata is not None, timeout=10)
+        assert(success)
+        assert_equal(len(test_node.last_getdata.inv), 1)
+        assert(test_node.last_getdata.inv[0].type == 2 or test_node.last_getdata.inv[0].type == 2|MSG_WITNESS_FLAG)
+        assert_equal(test_node.last_getdata.inv[0].hash, block.sha256)
+
+        # Deliver the block
+        if version==2:
+            test_node.send_and_ping(msg_witness_block(block))
+        else:
+            test_node.send_and_ping(msg_block(block))
+        assert_equal(int(node.getbestblockhash(), 16), block.sha256)
+
+    def test_getblocktxn_handler(self, node, test_node, version):
+        # bitcoind will not send blocktxn responses for blocks whose height is
+        # more than 10 blocks deep.
+        MAX_GETBLOCKTXN_DEPTH = 10
+        chain_height = node.getblockcount()
+        current_height = chain_height
+        while (current_height >= chain_height - MAX_GETBLOCKTXN_DEPTH):
+            block_hash = node.getblockhash(current_height)
+            block = FromHex(CBlock(), node.getblock(block_hash, False))
+
+            msg = msg_getblocktxn()
+            msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), [])
+            num_to_request = random.randint(1, len(block.vtx))
+            msg.block_txn_request.from_absolute(sorted(random.sample(range(len(block.vtx)), num_to_request)))
+            test_node.send_message(msg)
+            success = wait_until(lambda: test_node.last_blocktxn is not None, timeout=10)
+            assert(success)
+
+            [tx.calc_sha256() for tx in block.vtx]
+            with mininode_lock:
+                assert_equal(test_node.last_blocktxn.block_transactions.blockhash, int(block_hash, 16))
+                all_indices = msg.block_txn_request.to_absolute()
+                for index in all_indices:
+                    tx = test_node.last_blocktxn.block_transactions.transactions.pop(0)
+                    tx.calc_sha256()
+                    assert_equal(tx.sha256, block.vtx[index].sha256)
+                    if version == 1:
+                        # Witnesses should have been stripped
+                        assert(tx.wit.is_null())
+                    else:
+                        # Check that the witness matches
+                        assert_equal(tx.calc_sha256(True), block.vtx[index].calc_sha256(True))
+                test_node.last_blocktxn = None
+            current_height -= 1
+
+        # Next request should send a full block response, as we're past the
+        # allowed depth for a blocktxn response.
+        block_hash = node.getblockhash(current_height)
+        msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), [0])
+        with mininode_lock:
+            test_node.last_block = None
+            test_node.last_blocktxn = None
+        test_node.send_and_ping(msg)
+        with mininode_lock:
+            test_node.last_block.block.calc_sha256()
+            assert_equal(test_node.last_block.block.sha256, int(block_hash, 16))
+            assert_equal(test_node.last_blocktxn, None)
+
+    def test_compactblocks_not_at_tip(self, node, test_node):
+        # Test that requesting old compactblocks doesn't work.
+        MAX_CMPCTBLOCK_DEPTH = 5
+        new_blocks = []
+        for i in range(MAX_CMPCTBLOCK_DEPTH + 1):
+            test_node.clear_block_announcement()
+            new_blocks.append(node.generate(1)[0])
+            wait_until(test_node.received_block_announcement, timeout=30)
+
+        test_node.clear_block_announcement()
+        test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
+        success = wait_until(lambda: test_node.last_cmpctblock is not None, timeout=30)
+        assert(success)
+
+        test_node.clear_block_announcement()
+        node.generate(1)
+        wait_until(test_node.received_block_announcement, timeout=30)
+        test_node.clear_block_announcement()
+        with mininode_lock:
+            test_node.last_block = None
+        test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
+        success = wait_until(lambda: test_node.last_block is not None, timeout=30)
+        assert(success)
+        with mininode_lock:
+            test_node.last_block.block.calc_sha256()
+            assert_equal(test_node.last_block.block.sha256, int(new_blocks[0], 16))
+
+        # Generate an old compactblock, and verify that it's not accepted.
+        cur_height = node.getblockcount()
+        hashPrevBlock = int(node.getblockhash(cur_height-5), 16)
+        block = self.build_block_on_tip(node)
+        block.hashPrevBlock = hashPrevBlock
+        block.solve()
+
+        comp_block = HeaderAndShortIDs()
+        comp_block.initialize_from_block(block)
+        test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
+
+        tips = node.getchaintips()
+        found = False
+        for x in tips:
+            if x["hash"] == block.hash:
+                assert_equal(x["status"], "headers-only")
+                found = True
+                break
+        assert(found)
+
+        # Requesting this block via getblocktxn should silently fail
+        # (to avoid fingerprinting attacks).
+        msg = msg_getblocktxn()
+        msg.block_txn_request = BlockTransactionsRequest(block.sha256, [0])
+        with mininode_lock:
+            test_node.last_blocktxn = None
+        test_node.send_and_ping(msg)
+        with mininode_lock:
+            assert(test_node.last_blocktxn is None)
+
+    def activate_segwit(self, node):
+        node.generate(144*3)
+        assert_equal(get_bip9_status(node, "segwit")["status"], 'active')
+
+    def test_end_to_end_block_relay(self, node, listeners):
+        utxo = self.utxos.pop(0)
+
+        block = self.build_block_with_transactions(node, utxo, 10)
+
+        [l.clear_block_announcement() for l in listeners]
+
+        # ToHex() won't serialize with witness, but this block has no witnesses
+        # anyway. TODO: repeat this test with witness tx's to a segwit node.
+        node.submitblock(ToHex(block))
+
+        for l in listeners:
+            wait_until(lambda: l.received_block_announcement(), timeout=30)
+        with mininode_lock:
+            for l in listeners:
+                assert(l.last_cmpctblock is not None)
+                l.last_cmpctblock.header_and_shortids.header.calc_sha256()
+                assert_equal(l.last_cmpctblock.header_and_shortids.header.sha256, block.sha256)
+
+    # Test that we don't get disconnected if we relay a compact block with valid header,
+    # but invalid transactions.
+    def test_invalid_tx_in_compactblock(self, node, test_node, use_segwit):
+        assert(len(self.utxos))
+        utxo = self.utxos[0]
+
+        block = self.build_block_with_transactions(node, utxo, 5)
+        del block.vtx[3]
+        block.hashMerkleRoot = block.calc_merkle_root()
+        if use_segwit:
+            # If we're testing with segwit, also drop the coinbase witness,
+            # but include the witness commitment.
+            add_witness_commitment(block)
+            block.vtx[0].wit.vtxinwit = []
+        block.solve()
+
+        # Now send the compact block with all transactions prefilled, and
+        # verify that we don't get disconnected.
+        comp_block = HeaderAndShortIDs()
+        comp_block.initialize_from_block(block, prefill_list=[0, 1, 2, 3, 4], use_witness=use_segwit)
+        msg = msg_cmpctblock(comp_block.to_p2p())
+        test_node.send_and_ping(msg)
+
+        # Check that the tip didn't advance
+        assert(int(node.getbestblockhash(), 16) is not block.sha256)
+        test_node.sync_with_ping()
+
+    # Helper for enabling cb announcements
+    # Send the sendcmpct request and sync headers
+    def request_cb_announcements(self, peer, node, version):
+        tip = node.getbestblockhash()
+        peer.get_headers(locator=[int(tip, 16)], hashstop=0)
+
+        msg = msg_sendcmpct()
+        msg.version = version
+        msg.announce = True
+        peer.send_and_ping(msg)
+
+    def test_compactblock_reconstruction_multiple_peers(self, node, stalling_peer, delivery_peer):
+        assert(len(self.utxos))
+
+        def announce_cmpct_block(node, peer):
+            utxo = self.utxos.pop(0)
+            block = self.build_block_with_transactions(node, utxo, 5)
+
+            cmpct_block = HeaderAndShortIDs()
+            cmpct_block.initialize_from_block(block)
+            msg = msg_cmpctblock(cmpct_block.to_p2p())
+            peer.send_and_ping(msg)
+            with mininode_lock:
+                assert(peer.last_getblocktxn is not None)
+            return block, cmpct_block
+
+        block, cmpct_block = announce_cmpct_block(node, stalling_peer)
+
+        for tx in block.vtx[1:]:
+            delivery_peer.send_message(msg_tx(tx))
+        delivery_peer.sync_with_ping()
+        mempool = node.getrawmempool()
+        for tx in block.vtx[1:]:
+            assert(tx.hash in mempool)
+
+        delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p()))
+        assert_equal(int(node.getbestblockhash(), 16), block.sha256)
+
+        self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
+
+        # Now test that delivering an invalid compact block won't break relay
+
+        block, cmpct_block = announce_cmpct_block(node, stalling_peer)
+        for tx in block.vtx[1:]:
+            delivery_peer.send_message(msg_tx(tx))
+        delivery_peer.sync_with_ping()
+
+        cmpct_block.prefilled_txn[0].tx.wit.vtxinwit = [ CTxInWitness() ]
+        cmpct_block.prefilled_txn[0].tx.wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(0)]
+
+        cmpct_block.use_witness = True
+        delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p()))
+        assert(int(node.getbestblockhash(), 16) != block.sha256)
+
+        msg = msg_blocktxn()
+        msg.block_transactions.blockhash = block.sha256
+        msg.block_transactions.transactions = block.vtx[1:]
+        stalling_peer.send_and_ping(msg)
+        assert_equal(int(node.getbestblockhash(), 16), block.sha256)
+
+    def run_test(self):
+        # Setup the p2p connections and start up the network thread.
+        self.test_node = TestNode()
+        self.segwit_node = TestNode()
+        self.old_node = TestNode()  # version 1 peer <--> segwit node
+
+        connections = []
+        connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node))
+        connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1],
+                    self.segwit_node, services=NODE_NETWORK|NODE_WITNESS))
+        connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1],
+                    self.old_node, services=NODE_NETWORK))
+        self.test_node.add_connection(connections[0])
+        self.segwit_node.add_connection(connections[1])
+        self.old_node.add_connection(connections[2])
+
+        NetworkThread().start()  # Start up network handling in another thread
+
+        # Test logic begins here
+        self.test_node.wait_for_verack()
+
+        # We will need UTXOs to construct transactions in later tests.
+        self.make_utxos()
+
+        self.log.info("Running tests, pre-segwit activation:")
+
+        self.log.info("Testing SENDCMPCT p2p message... ")
+        self.test_sendcmpct(self.nodes[0], self.test_node, 1)
+        sync_blocks(self.nodes)
+        self.test_sendcmpct(self.nodes[1], self.segwit_node, 2, old_node=self.old_node)
+        sync_blocks(self.nodes)
+
+        self.log.info("Testing compactblock construction...")
+        self.test_compactblock_construction(self.nodes[0], self.test_node, 1, False)
+        sync_blocks(self.nodes)
+        self.test_compactblock_construction(self.nodes[1], self.segwit_node, 2, False)
+        sync_blocks(self.nodes)
+
+        self.log.info("Testing compactblock requests... ")
+        self.test_compactblock_requests(self.nodes[0], self.test_node, 1, False)
+        sync_blocks(self.nodes)
+        self.test_compactblock_requests(self.nodes[1], self.segwit_node, 2, False)
+        sync_blocks(self.nodes)
+
+        self.log.info("Testing getblocktxn requests...")
+        self.test_getblocktxn_requests(self.nodes[0], self.test_node, 1)
+        sync_blocks(self.nodes)
+        self.test_getblocktxn_requests(self.nodes[1], self.segwit_node, 2)
+        sync_blocks(self.nodes)
+
+        self.log.info("Testing getblocktxn handler...")
+        self.test_getblocktxn_handler(self.nodes[0], self.test_node, 1)
+        sync_blocks(self.nodes)
+        self.test_getblocktxn_handler(self.nodes[1], self.segwit_node, 2)
+        self.test_getblocktxn_handler(self.nodes[1], self.old_node, 1)
+        sync_blocks(self.nodes)
+
+        self.log.info("Testing compactblock requests/announcements not at chain tip...")
+        self.test_compactblocks_not_at_tip(self.nodes[0], self.test_node)
+        sync_blocks(self.nodes)
+        self.test_compactblocks_not_at_tip(self.nodes[1], self.segwit_node)
+        self.test_compactblocks_not_at_tip(self.nodes[1], self.old_node)
+        sync_blocks(self.nodes)
+
+        self.log.info("Testing handling of incorrect blocktxn responses...")
+        self.test_incorrect_blocktxn_response(self.nodes[0], self.test_node, 1)
+        sync_blocks(self.nodes)
+        self.test_incorrect_blocktxn_response(self.nodes[1], self.segwit_node, 2)
+        sync_blocks(self.nodes)
+
+        # End-to-end block relay tests
+        self.log.info("Testing end-to-end block relay...")
+        self.request_cb_announcements(self.test_node, self.nodes[0], 1)
+        self.request_cb_announcements(self.old_node, self.nodes[1], 1)
+        self.request_cb_announcements(self.segwit_node, self.nodes[1], 2)
+        self.test_end_to_end_block_relay(self.nodes[0], [self.segwit_node, self.test_node, self.old_node])
+        self.test_end_to_end_block_relay(self.nodes[1], [self.segwit_node, self.test_node, self.old_node])
+
+        self.log.info("Testing handling of invalid compact blocks...")
+        self.test_invalid_tx_in_compactblock(self.nodes[0], self.test_node, False)
+        self.test_invalid_tx_in_compactblock(self.nodes[1], self.segwit_node, False)
+        self.test_invalid_tx_in_compactblock(self.nodes[1], self.old_node, False)
+
+        self.log.info("Testing reconstructing compact blocks from all peers...")
+        self.test_compactblock_reconstruction_multiple_peers(self.nodes[1], self.segwit_node, self.old_node)
+        sync_blocks(self.nodes)
+
+        # Advance to segwit activation
+        self.log.info("Advancing to segwit activation")
+        self.activate_segwit(self.nodes[1])
+        self.log.info("Running tests, post-segwit activation...")
+
+        self.log.info("Testing compactblock construction...")
+        self.test_compactblock_construction(self.nodes[1], self.old_node, 1, True)
+        self.test_compactblock_construction(self.nodes[1], self.segwit_node, 2, True)
+        sync_blocks(self.nodes)
+
+        self.log.info("Testing compactblock requests (unupgraded node)... ")
+        self.test_compactblock_requests(self.nodes[0], self.test_node, 1, True)
+
+        self.log.info("Testing getblocktxn requests (unupgraded node)...")
+        self.test_getblocktxn_requests(self.nodes[0], self.test_node, 1)
+
+        # Need to manually sync node0 and node1, because post-segwit activation,
+        # node1 will not download blocks from node0.
+        self.log.info("Syncing nodes...")
+        assert(self.nodes[0].getbestblockhash() != self.nodes[1].getbestblockhash())
+        while (self.nodes[0].getblockcount() > self.nodes[1].getblockcount()):
+            block_hash = self.nodes[0].getblockhash(self.nodes[1].getblockcount()+1)
+            self.nodes[1].submitblock(self.nodes[0].getblock(block_hash, False))
+        assert_equal(self.nodes[0].getbestblockhash(), self.nodes[1].getbestblockhash())
+
+        self.log.info("Testing compactblock requests (segwit node)... ")
+        self.test_compactblock_requests(self.nodes[1], self.segwit_node, 2, True)
+
+        self.log.info("Testing getblocktxn requests (segwit node)...")
+        self.test_getblocktxn_requests(self.nodes[1], self.segwit_node, 2)
+        sync_blocks(self.nodes)
+
+        self.log.info("Testing getblocktxn handler (segwit node should return witnesses)...")
+        self.test_getblocktxn_handler(self.nodes[1], self.segwit_node, 2)
+        self.test_getblocktxn_handler(self.nodes[1], self.old_node, 1)
+
+        # Test that if we submitblock to node1, we'll get a compact block
+        # announcement to all peers.
+        # (Post-segwit activation, blocks won't propagate from node0 to node1
+        # automatically, so don't bother testing a block announced to node0.)
+        self.log.info("Testing end-to-end block relay...")
+        self.request_cb_announcements(self.test_node, self.nodes[0], 1)
+        self.request_cb_announcements(self.old_node, self.nodes[1], 1)
+        self.request_cb_announcements(self.segwit_node, self.nodes[1], 2)
+        self.test_end_to_end_block_relay(self.nodes[1], [self.segwit_node, self.test_node, self.old_node])
+
+        self.log.info("Testing handling of invalid compact blocks...")
+        self.test_invalid_tx_in_compactblock(self.nodes[0], self.test_node, False)
+        self.test_invalid_tx_in_compactblock(self.nodes[1], self.segwit_node, True)
+        self.test_invalid_tx_in_compactblock(self.nodes[1], self.old_node, True)
+
+        self.log.info("Testing invalid index in cmpctblock message...")
+        self.test_invalid_cmpctblock_message()
+
+
+if __name__ == '__main__':
+    CompactBlocksTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/p2p-feefilter.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,112 @@
+#!/usr/bin/env python3
+# Copyright (c) 2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test processing of feefilter messages."""
+
+from test_framework.mininode import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import time
+
+
+def hashToHex(hash):
+    return format(hash, '064x')
+
+# Wait up to 60 secs to see if the testnode has received all the expected invs
+def allInvsMatch(invsExpected, testnode):
+    for x in range(60):
+        with mininode_lock:
+            if (sorted(invsExpected) == sorted(testnode.txinvs)):
+                return True
+        time.sleep(1)
+    return False
+
+# TestNode: bare-bones "peer".  Used to track which invs are received from a node
+# and to send the node feefilter messages.
+class TestNode(SingleNodeConnCB):
+    def __init__(self):
+        SingleNodeConnCB.__init__(self)
+        self.txinvs = []
+
+    def on_inv(self, conn, message):
+        for i in message.inv:
+            if (i.type == 1):
+                self.txinvs.append(hashToHex(i.hash))
+
+    def clear_invs(self):
+        with mininode_lock:
+            self.txinvs = []
+
+    def send_filter(self, feerate):
+        self.send_message(msg_feefilter(feerate))
+        self.sync_with_ping()
+
+class FeeFilterTest(BitcoinTestFramework):
+
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 2
+        self.setup_clean_chain = False
+
+    def setup_network(self):
+        # Node1 will be used to generate txs which should be relayed from Node0
+        # to our test node
+        self.nodes = []
+        self.nodes.append(start_node(0, self.options.tmpdir))
+        self.nodes.append(start_node(1, self.options.tmpdir))
+        connect_nodes(self.nodes[0], 1)
+
+    def run_test(self):
+        node1 = self.nodes[1]
+        node0 = self.nodes[0]
+        # Get out of IBD
+        node1.generate(1)
+        sync_blocks(self.nodes)
+
+        # Setup the p2p connections and start up the network thread.
+        test_node = TestNode()
+        connection = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node)
+        test_node.add_connection(connection)
+        NetworkThread().start()
+        test_node.wait_for_verack()
+
+        # Test that invs are received for all txs at feerate of 20 sat/byte
+        node1.settxfee(Decimal("0.00020000"))
+        txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
+        assert(allInvsMatch(txids, test_node))
+        test_node.clear_invs()
+
+        # Set a filter of 15 sat/byte
+        test_node.send_filter(15000)
+
+        # Test that txs are still being received (paying 20 sat/byte)
+        txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
+        assert(allInvsMatch(txids, test_node))
+        test_node.clear_invs()
+
+        # Change tx fee rate to 10 sat/byte and test they are no longer received
+        node1.settxfee(Decimal("0.00010000"))
+        [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
+        sync_mempools(self.nodes) # must be sure node 0 has received all txs 
+
+        # Send one transaction from node0 that should be received, so that we
+        # we can sync the test on receipt (if node1's txs were relayed, they'd
+        # be received by the time this node0 tx is received). This is
+        # unfortunately reliant on the current relay behavior where we batch up
+        # to 35 entries in an inv, which means that when this next transaction
+        # is eligible for relay, the prior transactions from node1 are eligible
+        # as well.
+        node0.settxfee(Decimal("0.00020000"))
+        txids = [node0.sendtoaddress(node0.getnewaddress(), 1)]
+        assert(allInvsMatch(txids, test_node))
+        test_node.clear_invs()
+
+        # Remove fee filter and check that txs are received again
+        test_node.send_filter(0)
+        txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
+        assert(allInvsMatch(txids, test_node))
+        test_node.clear_invs()
+
+if __name__ == '__main__':
+    FeeFilterTest().main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/p2p-fullblocktest.py	Thu Mar 09 09:44:57 2017 -0500
@@ -0,0 +1,1290 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test block processing.
+
+This reimplements tests from the bitcoinj/FullBlockTestGenerator used
+by the pull-tester.
+
+We use the testing framework in which we expect a particular answer from
+each test.
+"""
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.comptool import TestManager, TestInstance, RejectResult
+from test_framework.blocktools import *
+import time
+from test_framework.key import CECKey
+from test_framework.script import *
+import struct
+
+class PreviousSpendableOutput(object):
+    def __init__(self, tx = CTransaction(), n = -1):
+        self.tx = tx
+        self.n = n  # the output we're spending
+
+#  Use this class for tests that require behavior other than normal "mininode" behavior.
+#  For now, it is used to serialize a bloated varint (b64).
+class CBrokenBlock(CBlock):
+    def __init__(self, header=None):
+        super(CBrokenBlock, self).__init__(header)
+
+    def initialize(self, base_block):
+        self.vtx = copy.deepcopy(base_block.vtx)
+        self.hashMerkleRoot = self.calc_merkle_root()
+
+    def serialize(self):
+        r = b""
+        r += super(CBlock, self).serialize()
+        r += struct.pack("<BQ", 255, len(self.vtx))
+        for tx in self.vtx:
+            r += tx.serialize()
+        return r
+
+    def normal_serialize(self):
+        r = b""
+        r += super(CBrokenBlock, self).serialize()
+        return r
+
+class FullBlockTest(ComparisonTestFramework):
+
+    # Can either run this test as 1 node with expected answers, or two and compare them.
+    # Change the "outcome" variable from each TestInstance object to only do the comparison.
+    def __init__(self):
+        super().__init__()
+        self.num_nodes = 1
+        self.block_heights = {}
+        self.coinbase_key = CECKey()
+        self.coinbase_key.set_secretbytes(b"horsebattery")
+        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
+        self.tip = None
+        self.blocks = {}
+
+    def add_options(self, parser):
+        super().add_options(parser)
+        parser.add_option("--runbarelyexpensive", dest="runbarelyexpensive", default=True)
+
+    def run_test(self):
+        self.test = TestManager(self, self.options.tmpdir)
+        self.test.add_all_connections(self.nodes)
+        NetworkThread().start() # Start up network handling in another thread
+        self.test.run()
+
+    def add_transactions_to_block(self, block, tx_list):
+        [ tx.rehash() for tx in tx_list ]
+        block.vtx.extend(tx_list)
+
+    # this is a little handier to use than the version in blocktools.py
+    def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
+        tx = create_transaction(spend_tx, n, b"", value, script)
+        return tx
+
+    # sign a transaction, using the key we know about
+    # this signs input 0 in tx, which is assumed to be spending output n in spend_tx
+    def sign_tx(self, tx, spend_tx, n):
+        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
+        if (scriptPubKey[0] == OP_TRUE):  # an anyone-can-spend
+            tx.vin[0].scriptSig = CScript()
+            return
+        (sighash, err) = SignatureHash(spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL)
+        tx.vin[0].scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
+
+    def create_and_sign_transaction(self, spend_tx, n, value, script=CScript([OP_TRUE])):
+        tx = self.create_tx(spend_tx, n, value, script)
+        self.sign_tx(tx, spend_tx, n)
+        tx.rehash()
+        return tx
+
+    def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True):
+        if self.tip == None:
+            base_block_hash = self.genesis_hash
+            block_time = int(time.time())+1
+        else:
+            base_block_hash = self.tip.sha256
+            block_time = self.tip.nTime + 1
+        # First create the coinbase
+        height = self.block_heights[base_block_hash] + 1
+        coinbase = create_coinbase(height, self.coinbase_pubkey)
+        coinbase.vout[0].nValue += additional_coinbase_value
+        coinbase.rehash()
+        if spend == None:
+            block = create_block(base_block_hash, coinbase, block_time)
+        else:
+            coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
+            coinbase.rehash()
+            block = create_block(base_block_hash, coinbase, block_time)
+            tx = create_transaction(spend.tx, spend.n, b"", 1, script)  # spend 1 satoshi
+            self.sign_tx(tx, spend.tx, spend.n)
+            self.add_transactions_to_block(block, [tx])
+            block.hashMerkleRoot = block.calc_merkle_root()
+        if solve:
+            block.solve()
+        self.tip = block
+        self.block_heights[block.sha256] = height
+        assert number not in self.blocks
+        self.blocks[number] = block
+        return block
+
+    def get_tests(self):
+        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
+        self.block_heights[self.genesis_hash] = 0
+        spendable_outputs = []
+
+        # save the current tip so it can be spent by a later block
+        def save_spendable_output():
+            spendable_outputs.append(self.tip)
+
+        # get an output that we previously marked as spendable
+        def get_spendable_output():
+            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)
+
+        # returns a test case that asserts that the current tip was accepted
+        def accepted():
+            return TestInstance([[self.tip, True]])
+
+        # returns a test case that asserts that the current tip was rejected
+        def rejected(reject = None):
+            if reject is None:
+                return TestInstance([[self.tip, False]])
+            else:
+                return TestInstance([[self.tip, reject]])
+
+        # move the tip back to a previous block
+        def tip(number):
+            self.tip = self.blocks[number]
+
+        # adds transactions to the block and updates state
+        def update_block(block_number, new_transactions):
+            block = self.blocks[block_number]
+            self.add_transactions_to_block(block, new_transactions)
+            old_sha256 = block.sha256
+            block.hashMerkleRoot = block.calc_merkle_root()
+            block.solve()
+            # Update the internal state just like in next_block
+            self.tip = block
+            if block.sha256 != old_sha256:
+                self.block_heights[block.sha256] = self.block_heights[old_sha256]
+                del self.block_heights[old_sha256]
+            self.blocks[block_number] = block
+            return block
+
+        # shorthand for functions
+        block = self.next_block
+        create_tx = self.create_tx
+        create_and_sign_tx = self.create_and_sign_transaction
+
+        # these must be updated if consensus changes
+        MAX_BLOCK_SIGOPS = 20000
+
+
+        # Create a new block
+        block(0)
+        save_spendable_output()
+        yield accepted()
+
+
+        # Now we need that block to mature so we can spend the coinbase.
+        test = TestInstance(sync_every_block=False)
+        for i in range(99):
+            block(5000 + i)
+            test.blocks_and_transactions.append([self.tip, True])
+            save_spendable_output()
+        yield test
+
+        # collect spendable outputs now to avoid cluttering the code later on
+        out = []
+        for i in range(33):
+            out.append(get_spendable_output())
+
+        # Start by building a couple of blocks on top (which output is spent is
+        # in parentheses):
+        #     genesis -> b1 (0) -> b2 (1)
+        block(1, spend=out[0])
+        save_spendable_output()
+        yield accepted()
+
+        block(2, spend=out[1])
+        yield accepted()
+        save_spendable_output()
+
+        # so fork like this:
+        #
+        #     genesis -> b1 (0) -> b2 (1)
+        #                      \-> b3 (1)
+        #
+        # Nothing should happen at this point. We saw b2 first so it takes priority.
+        tip(1)
+        b3 = block(3, spend=out[1])
+        txout_b3 = PreviousSpendableOutput(b3.vtx[1], 0)
+        yield rejected()
+
+
+        # Now we add another block to make the alternative chain longer.
+        #
+        #     genesis -> b1 (0) -> b2 (1)
+        #                      \-> b3 (1) -> b4 (2)
+        block(4, spend=out[2])
+        yield accepted()
+
+
+        # ... and back to the first chain.
+        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+        #                      \-> b3 (1) -> b4 (2)
+        tip(2)
+        block(5, spend=out[2])
+        save_spendable_output()
+        yield rejected()
+
+        block(6, spend=out[3])
+        yield accepted()
+
+        # Try to create a fork that double-spends
+        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+        #                                          \-> b7 (2) -> b8 (4)
+        #                      \-> b3 (1) -> b4 (2)
+        tip(5)
+        block(7, spend=out[2])
+        yield rejected()
+
+        block(8, spend=out[4])
+        yield rejected()
+
+        # Try to create a block that has too much fee
+        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+        #                                                    \-> b9 (4)
+        #                      \-> b3 (1) -> b4 (2)
+        tip(6)
+        block(9, spend=out[4], additional_coinbase_value=1)
+        yield rejected(RejectResult(16, b'bad-cb-amount'))
+
+        # Create a fork that ends in a block with too much fee (the one that causes the reorg)
+        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
+        #                                          \-> b10 (3) -> b11 (4)
+        #                      \-> b3 (1) -> b4 (2)
+        tip(5)
+        block(10, spend=out[3])
+        yield rejected()
+
+        block(11, spend=out[4], additional_coinbase_value=1)
+        yield rejected(RejectResult(16, b'bad-cb-amount'))
+
+
+        # Try again, but with a valid fork first
+        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
+        #                                          \-> b12 (3) -> b13 (4) -> b14 (5)
+        #                                              (b12 added last)
+        #                      \-> b3 (1) -> b4 (2)
+        tip(5)
+        b12 = block(12, spend=out[3])
+        save_spendable_output()
+        b13 = block(13, spend=out[4])
+        # Deliver the block header for b12, and the block b13.
+        # b13 should be accepted but the tip won't advance until b12 is delivered.
+        yield TestInstance([[CBlockHeader(b12), None], [b13, False]])
+
+        save_spendable_output()
+        # b14 is invalid, but the node won't know that until it tries to connect
+        # Tip still can't advance because b12 is missing
+        block(14, spend=out[5], additional_coinbase_value=1)
+        yield rejected()
+
+        yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.
+