diff --git a/counterparty-core/counterpartycore/cli.py b/counterparty-core/counterpartycore/cli.py
index a3fd2e3816..a5ebd04b49 100755
--- a/counterparty-core/counterpartycore/cli.py
+++ b/counterparty-core/counterpartycore/cli.py
@@ -7,7 +7,7 @@
from termcolor import cprint
from counterpartycore import server
-from counterpartycore.lib import config, log, setup
+from counterpartycore.lib import config, setup
logger = logging.getLogger(config.LOGGER_NAME)
@@ -170,6 +170,35 @@
"help": f"number of RPC queries by batch (default: {config.DEFAULT_RPC_BATCH_SIZE})",
},
],
+ [
+ ("--api-host",),
+ {
+ "default": "localhost",
+ "help": "the IP of the interface to bind to for providing API access (0.0.0.0 for all interfaces)",
+ },
+ ],
+ [
+ ("--api-port",),
+ {"type": int, "help": f"port on which to provide the {config.APP_NAME} API"},
+ ],
+ [
+ ("--api-user",),
+ {
+ "default": "api",
+ "help": f"required username to use the {config.APP_NAME} API (via HTTP basic auth)",
+ },
+ ],
+ [
+ ("--api-password",),
+ {
+ "default": "api",
+ "help": f"required password (for rpc-user) to use the {config.APP_NAME} API (via HTTP basic auth)",
+ },
+ ],
+ [
+ ("--api-no-allow-cors",),
+ {"action": "store_true", "default": False, "help": "allow ajax cross domain request"},
+ ],
[
("--requests-timeout",),
{
@@ -204,6 +233,10 @@
"help": "log API requests to the specified file",
},
],
+ [
+ ("--enable-api-v1",),
+ {"action": "store_true", "default": False, "help": "Enable the API v1"},
+ ],
[
("--no-log-files",),
{"action": "store_true", "default": False, "help": "Don't write log files"},
@@ -368,60 +401,8 @@ def main():
parser.print_help()
exit(0)
- # Configuration
- init_args = dict(
- database_file=args.database_file,
- testnet=args.testnet,
- testcoin=args.testcoin,
- regtest=args.regtest,
- customnet=args.customnet,
- api_limit_rows=args.api_limit_rows,
- backend_connect=args.backend_connect,
- backend_port=args.backend_port,
- backend_user=args.backend_user,
- backend_password=args.backend_password,
- backend_ssl=args.backend_ssl,
- backend_ssl_no_verify=args.backend_ssl_no_verify,
- backend_poll_interval=args.backend_poll_interval,
- indexd_connect=args.indexd_connect,
- indexd_port=args.indexd_port,
- rpc_host=args.rpc_host,
- rpc_port=args.rpc_port,
- rpc_user=args.rpc_user,
- rpc_password=args.rpc_password,
- rpc_no_allow_cors=args.rpc_no_allow_cors,
- requests_timeout=args.requests_timeout,
- rpc_batch_size=args.rpc_batch_size,
- check_asset_conservation=args.check_asset_conservation,
- force=args.force,
- p2sh_dust_return_pubkey=args.p2sh_dust_return_pubkey,
- utxo_locks_max_addresses=args.utxo_locks_max_addresses,
- utxo_locks_max_age=args.utxo_locks_max_age,
- no_mempool=args.no_mempool,
- skip_db_check=args.skip_db_check,
- )
-
- server.initialise_log_config(
- verbose=args.verbose,
- quiet=args.quiet,
- log_file=args.log_file,
- api_log_file=args.api_log_file,
- no_log_files=args.no_log_files,
- testnet=args.testnet,
- testcoin=args.testcoin,
- regtest=args.regtest,
- json_log=args.json_log,
- )
-
- # set up logging
- log.set_up(
- verbose=config.VERBOSE,
- quiet=config.QUIET,
- log_file=config.LOG,
- log_in_console=args.action == "start",
- )
-
- server.initialise_config(**init_args)
+ # Configuration and logging
+ server.initialise_log_and_config(args)
logger.info(f"Running v{APP_VERSION} of {APP_NAME}.")
@@ -447,7 +428,7 @@ def main():
)
elif args.action == "start":
- server.start_all(catch_up=args.catch_up)
+ server.start_all(args)
elif args.action == "show-params":
server.show_params()
diff --git a/counterparty-core/counterpartycore/lib/api/api_server.py b/counterparty-core/counterpartycore/lib/api/api_server.py
new file mode 100644
index 0000000000..9a258fd665
--- /dev/null
+++ b/counterparty-core/counterpartycore/lib/api/api_server.py
@@ -0,0 +1,192 @@
+import argparse
+import logging
+import multiprocessing
+import signal
+from multiprocessing import Process
+from threading import Timer
+
+import flask
+from counterpartycore import server
+from counterpartycore.lib import (
+ blocks,
+ config,
+ database,
+ ledger,
+)
+from counterpartycore.lib.api.routes import ROUTES
+from counterpartycore.lib.api.util import get_backend_height, init_api_access_log, remove_rowids
+from flask import Flask, request
+from flask import g as flask_globals
+from flask_cors import CORS
+from flask_httpauth import HTTPBasicAuth
+
+multiprocessing.set_start_method("spawn", force=True)
+
+logger = logging.getLogger(config.LOGGER_NAME)
+auth = HTTPBasicAuth()
+
+BACKEND_HEIGHT = 0
+REFRESH_BACKEND_HEIGHT_INTERVAL = 10
+BACKEND_HEIGHT_TIMER = None
+
+
+def get_db():
+ """Get the database connection."""
+ if not hasattr(flask_globals, "db"):
+ flask_globals.db = database.get_connection(read_only=True)
+ return flask_globals.db
+
+
+@auth.verify_password
+def verify_password(username, password):
+ return username == config.API_USER and password == config.API_PASSWORD
+
+
+def api_root():
+ counterparty_height = blocks.last_db_index(get_db())
+ routes = []
+ for path in ROUTES:
+ route = ROUTES[path]
+ routes.append(
+ {
+ "path": path,
+ "args": route.get("args", []),
+ "description": route.get("description", ""),
+ }
+ )
+ network = "mainnet"
+ if config.TESTNET:
+ network = "testnet"
+ elif config.REGTEST:
+ network = "regtest"
+ elif config.TESTCOIN:
+ network = "testcoin"
+ return {
+ "server_ready": counterparty_height >= BACKEND_HEIGHT,
+ "network": network,
+ "version": config.VERSION_STRING,
+ "backend_height": BACKEND_HEIGHT,
+ "counterparty_height": counterparty_height,
+ "routes": routes,
+ }
+
+
+def inject_headers(result, return_code=None):
+ server_ready = ledger.CURRENT_BLOCK_INDEX >= BACKEND_HEIGHT
+ http_code = 200
+ if return_code:
+ http_code = return_code
+ elif not server_ready:
+ http_code = config.API_NOT_READY_HTTP_CODE
+ if isinstance(result, flask.Response):
+ response = result
+ else:
+ response = flask.make_response(flask.jsonify(result), http_code)
+ response.headers["X-COUNTERPARTY-HEIGHT"] = ledger.CURRENT_BLOCK_INDEX
+ response.headers["X-COUNTERPARTY-READY"] = ledger.CURRENT_BLOCK_INDEX >= BACKEND_HEIGHT
+ response.headers["X-BACKEND-HEIGHT"] = BACKEND_HEIGHT
+ return response
+
+
+def prepare_args(route, **kwargs):
+ function_args = dict(kwargs)
+ if "pass_all_args" in route and route["pass_all_args"]:
+ function_args = request.args | function_args
+ elif "args" in route:
+ for arg in route["args"]:
+ arg_name = arg["name"]
+ if arg_name in function_args:
+ continue
+ str_arg = request.args.get(arg_name)
+ if str_arg is None and arg["required"]:
+ raise ValueError(f"Missing required parameter: {arg_name}")
+ if str_arg is None:
+ function_args[arg_name] = arg["default"]
+ elif arg["type"] == "bool":
+ function_args[arg_name] = str_arg.lower() in ["true", "1"]
+ elif arg["type"] == "int":
+ try:
+ function_args[arg_name] = int(str_arg)
+ except ValueError as e:
+ raise ValueError(f"Invalid integer: {arg_name}") from e
+ else:
+ function_args[arg_name] = str_arg
+ return function_args
+
+
+@auth.login_required
+def handle_route(**kwargs):
+ db = get_db()
+ # update the current block index
+ ledger.CURRENT_BLOCK_INDEX = blocks.last_db_index(db)
+ rule = str(request.url_rule.rule)
+ if rule == "/":
+ result = api_root()
+ else:
+ route = ROUTES.get(rule)
+ try:
+ function_args = prepare_args(route, **kwargs)
+ except ValueError as e:
+ return inject_headers({"error": str(e)}, return_code=400)
+ result = route["function"](db, **function_args)
+ result = remove_rowids(result)
+ return inject_headers(result)
+
+
+def run_api_server(args):
+ # default signal handlers
+ signal.signal(signal.SIGTERM, signal.SIG_DFL)
+ signal.signal(signal.SIGINT, signal.default_int_handler)
+
+ app = Flask(config.APP_NAME)
+ # Initialise log and config
+ server.initialise_log_and_config(argparse.Namespace(**args))
+ with app.app_context():
+ if not config.API_NO_ALLOW_CORS:
+ CORS(app)
+ # Initialise the API access log
+ init_api_access_log(app)
+ # Get the last block index
+ ledger.CURRENT_BLOCK_INDEX = blocks.last_db_index(get_db())
+ # Add routes
+ app.add_url_rule("/", view_func=handle_route)
+ for path in ROUTES:
+ app.add_url_rule(path, view_func=handle_route)
+ # run the scheduler to refresh the backend height
+ # `no_refresh_backend_height` used only for testing. TODO: find a way to mock it
+ if "no_refresh_backend_height" not in args or not args["no_refresh_backend_height"]:
+ refresh_backend_height()
+ try:
+ # Start the API server
+ app.run(host=config.API_HOST, port=config.API_PORT, debug=False)
+ finally:
+ # ensure timer is cancelled
+ if BACKEND_HEIGHT_TIMER:
+ BACKEND_HEIGHT_TIMER.cancel()
+
+
+def refresh_backend_height():
+ global BACKEND_HEIGHT, BACKEND_HEIGHT_TIMER # noqa F811
+ BACKEND_HEIGHT = get_backend_height()
+ if BACKEND_HEIGHT_TIMER:
+ BACKEND_HEIGHT_TIMER.cancel()
+ BACKEND_HEIGHT_TIMER = Timer(REFRESH_BACKEND_HEIGHT_INTERVAL, refresh_backend_height)
+ BACKEND_HEIGHT_TIMER.start()
+
+
+class APIServer(object):
+ def __init__(self):
+ self.process = None
+
+ def start(self, args):
+ if self.process is not None:
+ raise Exception("API server is already running")
+ self.process = Process(target=run_api_server, args=(vars(args),))
+ self.process.start()
+ return self.process
+
+ def stop(self):
+ logger.info("Stopping API server v2...")
+ if self.process and self.process.is_alive():
+ self.process.terminate()
+ self.process = None
diff --git a/counterparty-core/counterpartycore/lib/api.py b/counterparty-core/counterpartycore/lib/api/api_v1.py
similarity index 79%
rename from counterparty-core/counterpartycore/lib/api.py
rename to counterparty-core/counterpartycore/lib/api/api_v1.py
index ddad11a5cd..fcf03d6bde 100644
--- a/counterparty-core/counterpartycore/lib/api.py
+++ b/counterparty-core/counterpartycore/lib/api/api_v1.py
@@ -5,39 +5,22 @@
problem.
"""
+import binascii
import collections
import decimal
import json
import logging
-import os # noqa: F401
+import math
+import os
import re
-import sys
import threading
import time
import traceback
-from logging import handlers as logging_handlers
-import requests # noqa: F401
-
-D = decimal.Decimal
-import binascii # noqa: E402
-import inspect # noqa: E402
-import math # noqa: E402
-import struct # noqa: E402, F401
-
-import apsw # noqa: E402, F401
-import flask # noqa: E402
-import jsonrpc # noqa: E402
-from flask import request # noqa: E402
-from flask_httpauth import HTTPBasicAuth # noqa: E402
-from jsonrpc import dispatcher # noqa: E402
-from jsonrpc.exceptions import JSONRPCDispatchException # noqa: E402
-from werkzeug.serving import make_server # noqa: E402
-from xmltodict import unparse as serialize_to_xml # noqa: E402
-
-from counterpartycore.lib import ( # noqa: E402
+import flask
+import jsonrpc
+from counterpartycore.lib import (
backend,
- blocks, # noqa: F401
config,
database,
exceptions,
@@ -48,8 +31,9 @@
transaction,
util,
)
-from counterpartycore.lib.kickstart.blocks_parser import BlockchainParser # noqa: E402
-from counterpartycore.lib.messages import ( # noqa: E402
+from counterpartycore.lib.api import util as api_util
+from counterpartycore.lib.kickstart.blocks_parser import BlockchainParser
+from counterpartycore.lib.messages import (
bet, # noqa: F401
broadcast, # noqa: F401
btcpay, # noqa: F401
@@ -66,6 +50,14 @@
sweep, # noqa: F401
)
from counterpartycore.lib.messages.versions import enhanced_send # noqa: E402
+from flask import request
+from flask_httpauth import HTTPBasicAuth
+from jsonrpc import dispatcher
+from jsonrpc.exceptions import JSONRPCDispatchException
+from werkzeug.serving import make_server
+from xmltodict import unparse as serialize_to_xml
+
+D = decimal.Decimal
logger = logging.getLogger(config.LOGGER_NAME)
@@ -163,49 +155,6 @@
""",
}
-API_TRANSACTIONS = [
- "bet",
- "broadcast",
- "btcpay",
- "burn",
- "cancel",
- "destroy",
- "dividend",
- "issuance",
- "order",
- "send",
- "rps",
- "rpsresolve",
- "sweep",
- "dispenser",
-]
-
-COMMONS_ARGS = [
- "encoding",
- "fee_per_kb",
- "regular_dust_size",
- "multisig_dust_size",
- "op_return_value",
- "pubkey",
- "allow_unconfirmed_inputs",
- "fee",
- "fee_provided",
- "estimate_fee_per_kb",
- "estimate_fee_per_kb_nblocks",
- "unspent_tx_hash",
- "custom_inputs",
- "dust_return_pubkey",
- "disable_utxo_locks",
- "extended_tx_info",
- "p2sh_source_multisig_pubkeys",
- "p2sh_source_multisig_pubkeys_required",
- "p2sh_pretx_txid",
-]
-
-API_MAX_LOG_SIZE = (
- 10 * 1024 * 1024
-) # max log size of 20 MB before rotation (make configurable later)
-API_MAX_LOG_COUNT = 10
JSON_RPC_ERROR_API_COMPOSE = -32001 # code to use for error composing transaction result
CURRENT_API_STATUS_CODE = None # is updated by the APIStatusPoller
@@ -241,14 +190,6 @@ class DatabaseError(Exception):
pass
-def check_database_state(db, blockcount):
- f"""Checks {config.XCP_NAME} database to see if is caught up with backend.""" # noqa: B021
- if ledger.CURRENT_BLOCK_INDEX + 1 < blockcount:
- raise DatabaseError(f"{config.XCP_NAME} database is behind backend.")
- # logger.debug("Database state check passed.")
- return
-
-
# TODO: ALL queries EVERYWHERE should be done with these methods
def db_query(db, statement, bindings=(), callback=None, **callback_args):
"""Allow direct access to the database in a parametrized manner."""
@@ -541,129 +482,6 @@ def adjust_get_transactions_results(query_result):
return filtered_results
-def get_default_args(func):
- signature = inspect.signature(func)
- return {
- k: v.default
- for k, v in signature.parameters.items()
- if v.default is not inspect.Parameter.empty
- }
-
-
-def compose_transaction(
- db,
- name,
- params,
- encoding="auto",
- fee_per_kb=None,
- estimate_fee_per_kb=None,
- regular_dust_size=config.DEFAULT_REGULAR_DUST_SIZE,
- multisig_dust_size=config.DEFAULT_MULTISIG_DUST_SIZE,
- op_return_value=config.DEFAULT_OP_RETURN_VALUE,
- pubkey=None,
- allow_unconfirmed_inputs=False,
- fee=None,
- fee_provided=0,
- unspent_tx_hash=None,
- custom_inputs=None,
- dust_return_pubkey=None,
- disable_utxo_locks=False,
- extended_tx_info=False,
- p2sh_source_multisig_pubkeys=None,
- p2sh_source_multisig_pubkeys_required=None,
- p2sh_pretx_txid=None,
- old_style_api=True,
- segwit=False,
-):
- """Create and return a transaction."""
-
- # Get provided pubkeys.
- if type(pubkey) == str: # noqa: E721
- provided_pubkeys = [pubkey]
- elif type(pubkey) == list: # noqa: E721
- provided_pubkeys = pubkey
- elif pubkey == None: # noqa: E711
- provided_pubkeys = []
- else:
- assert False # noqa: B011
-
- # Get additional pubkeys from `source` and `destination` params.
- # Convert `source` and `destination` to pubkeyhash form.
- for address_name in ["source", "destination"]:
- if address_name in params:
- address = params[address_name]
- if isinstance(address, list):
- # pkhshs = []
- # for addr in address:
- # provided_pubkeys += script.extract_pubkeys(addr)
- # pkhshs.append(script.make_pubkeyhash(addr))
- # params[address_name] = pkhshs
- pass
- else:
- provided_pubkeys += script.extract_pubkeys(address)
- params[address_name] = script.make_pubkeyhash(address)
-
- # Check validity of collected pubkeys.
- for pubkey in provided_pubkeys:
- if not script.is_fully_valid(binascii.unhexlify(pubkey)):
- raise script.AddressError(f"invalid public key: {pubkey}")
-
- compose_method = sys.modules[f"counterpartycore.lib.messages.{name}"].compose
- compose_params = inspect.getfullargspec(compose_method)[0]
- missing_params = [p for p in compose_params if p not in params and p != "db"]
- if len(missing_params) > 0:
- default_values = get_default_args(compose_method)
- for param in missing_params:
- if param in default_values:
- params[param] = default_values[param]
- else:
- raise exceptions.ComposeError(f"missing parameters: {', '.join(missing_params)}")
-
- # dont override fee_per_kb if specified
- if fee_per_kb is not None:
- estimate_fee_per_kb = False
- else:
- fee_per_kb = config.DEFAULT_FEE_PER_KB
-
- if "extended_tx_info" in params:
- extended_tx_info = params["extended_tx_info"]
- del params["extended_tx_info"]
-
- if "old_style_api" in params:
- old_style_api = params["old_style_api"]
- del params["old_style_api"]
-
- if "segwit" in params:
- segwit = params["segwit"]
- del params["segwit"]
-
- tx_info = compose_method(db, **params)
- return transaction.construct(
- db,
- tx_info,
- encoding=encoding,
- fee_per_kb=fee_per_kb,
- estimate_fee_per_kb=estimate_fee_per_kb,
- regular_dust_size=regular_dust_size,
- multisig_dust_size=multisig_dust_size,
- op_return_value=op_return_value,
- provided_pubkeys=provided_pubkeys,
- allow_unconfirmed_inputs=allow_unconfirmed_inputs,
- exact_fee=fee,
- fee_provided=fee_provided,
- unspent_tx_hash=unspent_tx_hash,
- custom_inputs=custom_inputs,
- dust_return_pubkey=dust_return_pubkey,
- disable_utxo_locks=disable_utxo_locks,
- extended_tx_info=extended_tx_info,
- p2sh_source_multisig_pubkeys=p2sh_source_multisig_pubkeys,
- p2sh_source_multisig_pubkeys_required=p2sh_source_multisig_pubkeys_required,
- p2sh_pretx_txid=p2sh_pretx_txid,
- old_style_api=old_style_api,
- segwit=segwit,
- )
-
-
def conditional_decorator(decorator, condition):
"""Checks the condition and if True applies specified decorator."""
@@ -675,27 +493,6 @@ def gen_decorator(f):
return gen_decorator
-def init_api_access_log(app):
- """Initialize API logger."""
- loggers = (logging.getLogger("werkzeug"), app.logger)
-
- # Disable console logging...
- for l in loggers: # noqa: E741
- l.setLevel(logging.CRITICAL)
- l.propagate = False
-
- # Log to file, if configured...
- if config.API_LOG:
- handler = logging_handlers.RotatingFileHandler(
- config.API_LOG, "a", API_MAX_LOG_SIZE, API_MAX_LOG_COUNT
- )
- for l in loggers: # noqa: E741
- handler.setLevel(logging.DEBUG)
- l.addHandler(handler)
-
- flask.cli.show_server_banner = lambda *args: None
-
-
class APIStatusPoller(threading.Thread):
"""Perform regular checks on the state of the backend and the database."""
@@ -732,7 +529,7 @@ def run(self):
check_backend_state()
code = 12
logger.debug("Checking database state.")
- check_database_state(self.db, backend.getblockcount())
+ api_util.check_last_parsed_block(backend.getblockcount())
self.last_database_check = time.time()
except (BackendError, DatabaseError) as e:
exception_name = e.__class__.__name__
@@ -762,12 +559,12 @@ def __init__(self, db=None):
threading.Thread.__init__(self)
def stop(self):
- logger.info("Stopping API Server...")
- self.server.shutdown()
self.db.close()
+ self.server.shutdown()
+ self.join()
def run(self):
- logger.info("Starting API Server...")
+ logger.info("Starting API Server v1.")
self.db = self.db or database.get_connection(read_only=True)
app = flask.Flask(__name__)
auth = HTTPBasicAuth()
@@ -807,23 +604,12 @@ def sql(query, bindings=None):
# Generate dynamically create_{transaction} methods
def generate_create_method(tx):
- def split_params(**kwargs):
- transaction_args = {}
- common_args = {}
- private_key_wif = None
- for key in kwargs:
- if key in COMMONS_ARGS:
- common_args[key] = kwargs[key]
- elif key == "privkey":
- private_key_wif = kwargs[key]
- else:
- transaction_args[key] = kwargs[key]
- return transaction_args, common_args, private_key_wif
-
def create_method(**kwargs):
try:
- transaction_args, common_args, private_key_wif = split_params(**kwargs)
- return compose_transaction(
+ transaction_args, common_args, private_key_wif = (
+ transaction.split_compose_arams(**kwargs)
+ )
+ return transaction.compose_transaction(
self.db, name=tx, params=transaction_args, **common_args
)
except (
@@ -843,7 +629,7 @@ def create_method(**kwargs):
return create_method
- for tx in API_TRANSACTIONS:
+ for tx in transaction.COMPOSABLE_TRANSACTIONS:
create_method = generate_create_method(tx)
create_method.__name__ = f"create_{tx}"
dispatcher.add_method(create_method)
@@ -1021,7 +807,7 @@ def get_running_info():
latest_block_index = backend.getblockcount()
try:
- check_database_state(self.db, latest_block_index)
+ api_util.check_last_parsed_block(latest_block_index)
except DatabaseError:
caught_up = False
else:
@@ -1196,12 +982,11 @@ def unpack(data_hex):
# TODO: Enabled only for `send`.
if message_type_id == send.ID:
- unpack_method = send.unpack
+ unpacked = send.unpack(self.db, message, ledger.CURRENT_BLOCK_INDEX)
elif message_type_id == enhanced_send.ID:
- unpack_method = enhanced_send.unpack
+ unpacked = enhanced_send.unpack(message, ledger.CURRENT_BLOCK_INDEX)
else:
raise APIError("unsupported message type")
- unpacked = unpack_method(self.db, message, ledger.CURRENT_BLOCK_INDEX)
return message_type_id, unpacked
@dispatcher.add_method
@@ -1276,74 +1061,10 @@ def _set_cors_headers(response):
##### REST ROUTES #####
- @app.route("/addresses/
/balances", methods=["GET"])
- def handle_address_balances(address):
- return remove_rowids(ledger.get_address_balances(self.db, address))
-
- @app.route("/assets//balances", methods=["GET"])
- def handle_asset_balances(asset):
- return remove_rowids(ledger.get_asset_balances(self.db, asset))
-
- @app.route("/assets//", methods=["GET"])
- def handle_asset_info(asset):
- return remove_rowids(get_asset_info(asset=asset))
-
- @app.route("/assets//orders", methods=["GET"])
- def handle_asset_orders(asset):
- status = request.args.get("status", "open")
- return remove_rowids(ledger.get_orders_by_asset(self.db, asset, status))
-
- @app.route("/orders/", methods=["GET"])
- def handle_order_info(tx_hash):
- return remove_rowids(ledger.get_order(self.db, tx_hash))
-
- @app.route("/orders//matches", methods=["GET"])
- def handle_order_matches(tx_hash):
- status = request.args.get("status", "pending")
- return remove_rowids(ledger.get_order_matches_by_order(self.db, tx_hash, status))
-
@app.route("/healthz", methods=["GET"])
def handle_healthz():
- msg, code = "Healthy", 200
-
- type_ = request.args.get("type", "light")
-
- def light_check():
- latest_block_index = backend.getblockcount()
- check_database_state(self.db, latest_block_index)
-
- def heavy_check():
- compose_transaction(
- self.db,
- name="send",
- params={
- "source": config.UNSPENDABLE,
- "destination": config.UNSPENDABLE,
- "asset": config.XCP,
- "quantity": 100000000,
- },
- allow_unconfirmed_inputs=True,
- fee=1000,
- )
-
- try:
- if type_ == "heavy":
- # Perform a heavy healthz check.
- # Do everything in light but also compose a
- # send tx
-
- logger.debug("Performing heavy healthz check.")
-
- light_check()
- heavy_check()
- else:
- logger.debug("Performing light healthz check.")
- light_check()
-
- except Exception:
- msg, code = "Unhealthy", 503
-
- return flask.Response(msg, code, mimetype="application/json")
+ check_type = request.args.get("type", "light")
+ return api_util.handle_healthz_route(self.db, check_type)
@app.route("/", defaults={"args_path": ""}, methods=["GET", "POST", "OPTIONS"])
@app.route("/", methods=["GET", "POST", "OPTIONS"])
@@ -1351,12 +1072,11 @@ def heavy_check():
@conditional_decorator(auth.login_required, hasattr(config, "RPC_PASSWORD"))
def handle_root(args_path):
"""Handle all paths, decide where to forward the query."""
+ request_path = args_path.lower()
if (
- args_path == ""
- or args_path.startswith("api/")
- or args_path.startswith("API/")
- or args_path.startswith("rpc/")
- or args_path.startswith("RPC/")
+ request_path == "old"
+ or request_path.startswith("old/api/")
+ or request_path.startswith("old/rpc/")
):
if flask.request.method == "POST":
# Need to get those here because it might not be available in this aux function.
@@ -1369,7 +1089,7 @@ def handle_root(args_path):
else:
error = "Invalid method."
return flask.Response(error, 405, mimetype="application/json")
- elif args_path.startswith("rest/") or args_path.startswith("REST/"):
+ elif request_path.startswith("old/rest/"):
if flask.request.method == "GET" or flask.request.method == "POST":
# Pass the URL path without /REST/ part and Flask request object.
rest_path = args_path.split("/", 1)[1]
@@ -1388,6 +1108,7 @@ def handle_root(args_path):
def handle_rpc_options():
response = flask.Response("", 204)
_set_cors_headers(response)
+ response.headers["X-API-WARN"] = "Deprecated API"
return response
def handle_rpc_post(request_json):
@@ -1427,6 +1148,10 @@ def handle_rpc_post(request_json):
jsonrpc_response.json.encode(), 200, mimetype="application/json"
)
_set_cors_headers(response)
+ response.headers["X-API-WARN"] = "Deprecated API"
+ logger.warning(
+ "API v1 is deprecated and should be removed soon. Please migrate to REST API."
+ )
return response
######################
@@ -1451,7 +1176,7 @@ def handle_rest(path_args, flask_request):
error = "No query_type provided."
return flask.Response(error, 400, mimetype="application/json")
# Check if message type or table name are valid.
- if (compose and query_type not in API_TRANSACTIONS) or (
+ if (compose and query_type not in transaction.COMPOSABLE_TRANSACTIONS) or (
not compose and query_type not in API_TABLES
):
error = f'No such query type in supported queries: "{query_type}".'
@@ -1462,24 +1187,9 @@ def handle_rest(path_args, flask_request):
query_data = {}
if compose:
- common_args = {}
- transaction_args = {}
- for key, value in extra_args:
- # Determine value type.
- try:
- value = int(value) # noqa: PLW2901
- except ValueError:
- try:
- value = float(value) # noqa: PLW2901
- except ValueError:
- pass
- # Split keys into common and transaction-specific arguments. Discard the privkey.
- if key in COMMONS_ARGS:
- common_args[key] = value
- elif key == "privkey":
- pass
- else:
- transaction_args[key] = value
+ transaction_args, common_args, private_key_wif = transaction.split_compose_arams(
+ **extra_args
+ )
# Must have some additional transaction arguments.
if not len(transaction_args):
@@ -1488,7 +1198,7 @@ def handle_rest(path_args, flask_request):
# Compose the transaction.
try:
- query_data = compose_transaction(
+ query_data = transaction.compose_transaction(
self.db, name=query_type, params=transaction_args, **common_args
)
except (
@@ -1541,7 +1251,7 @@ def handle_rest(path_args, flask_request):
# Init the HTTP Server.
self.is_ready = True
self.server = make_server(config.RPC_HOST, config.RPC_PORT, app)
- init_api_access_log(app)
+ api_util.init_api_access_log(app)
self.ctx = app.app_context()
self.ctx.push()
# Run app server (blocking)
diff --git a/counterparty-core/counterpartycore/lib/api/routes.py b/counterparty-core/counterpartycore/lib/api/routes.py
new file mode 100644
index 0000000000..8b32a86a88
--- /dev/null
+++ b/counterparty-core/counterpartycore/lib/api/routes.py
@@ -0,0 +1,105 @@
+from counterpartycore.lib import (
+ backend,
+ ledger,
+ transaction,
+)
+from counterpartycore.lib.api import util
+
+# Define the API routes except root (`/`) defined in `api_server.py`
+ROUTES = util.prepare_routes(
+ {
+ ### /blocks ###
+ "/blocks": ledger.get_blocks,
+ "/blocks/": ledger.get_block,
+ "/blocks//transactions": ledger.get_transactions_by_block,
+ "/blocks//events": ledger.get_events_by_block,
+ "/blocks//events/counts": ledger.get_events_counts_by_block,
+ "/blocks//events/": ledger.get_events_by_block_and_event,
+ "/blocks//credits": ledger.get_credits_by_block,
+ "/blocks//debits": ledger.get_debits_by_block,
+ "/blocks//expirations": ledger.get_expirations,
+ "/blocks//cancels": ledger.get_cancels,
+ "/blocks//destructions": ledger.get_destructions,
+ "/blocks//issuances": ledger.get_issuances_by_block,
+ "/blocks//sends": ledger.get_sends_or_receives_by_block,
+ "/blocks//dispenses": ledger.get_dispenses_by_block,
+ "/blocks//sweeps": ledger.get_sweeps_by_block,
+ ### /transactions ###
+ "/transactions/info": transaction.info,
+ "/transactions/unpack": transaction.unpack,
+ "/transactions/": ledger.get_transaction,
+ ### /addresses ###
+ "/addresses//balances": ledger.get_address_balances,
+ "/addresses//balances/": ledger.get_balance_object,
+ "/addresses//credits": ledger.get_credits_by_address,
+ "/addresses//debits": ledger.get_debits_by_address,
+ "/addresses//bets": ledger.get_bet_by_feed,
+ "/addresses//broadcasts": ledger.get_broadcasts_by_source,
+ "/addresses//burns": ledger.get_burns_by_address,
+ "/addresses//sends": ledger.get_send_by_address,
+ "/addresses//receives": ledger.get_receive_by_address,
+ "/addresses//sends/": ledger.get_send_by_address_and_asset,
+ "/addresses//receives/": ledger.get_receive_by_address_and_asset,
+ "/addresses//dispensers": ledger.get_dispensers_by_address,
+ "/addresses//dispensers/": ledger.get_dispensers_by_address_and_asset,
+ "/addresses//sweeps": ledger.get_sweeps_by_address,
+ ### /address//compose/ ###
+ "/address//compose/bet": transaction.compose_bet,
+ "/address//compose/broadcast": transaction.compose_broadcast,
+ "/address//compose/btcpay": transaction.compose_btcpay,
+ "/address//compose/burn": transaction.compose_burn,
+ "/address//compose/cancel": transaction.compose_cancel,
+ "/address//compose/destroy": transaction.compose_destroy,
+ "/address//compose/dispenser": transaction.compose_dispenser,
+ "/address//compose/dividend": transaction.compose_dividend,
+ "/address//compose/issuance": transaction.compose_issuance,
+ "/address//compose/mpma": transaction.compose_mpma,
+ "/address//compose/order": transaction.compose_order,
+ "/address//compose/send": transaction.compose_send,
+ "/address//compose/sweep": transaction.compose_sweep,
+ ### /assets ###
+ "/assets": ledger.get_valid_assets,
+ "/assets/": ledger.get_asset_info,
+ "/assets//balances": ledger.get_asset_balances,
+ "/assets//balances/": ledger.get_balance_object,
+ "/assets//orders": ledger.get_orders_by_asset,
+ "/assets//credits": ledger.get_credits_by_asset,
+ "/assets//debits": ledger.get_debits_by_asset,
+ "/assets//dividends": ledger.get_dividends,
+ "/assets//issuances": ledger.get_issuances_by_asset,
+ "/assets//sends": ledger.get_sends_or_receives_by_asset,
+ "/assets//dispensers": ledger.get_dispensers_by_asset,
+ "/assets//dispensers/": ledger.get_dispensers_by_address_and_asset,
+ "/assets//holders": ledger.get_asset_holders,
+ ### /orders ###
+ "/orders/": ledger.get_order,
+ "/orders//matches": ledger.get_order_matches_by_order,
+ "/orders//btcpays": ledger.get_btcpays_by_order,
+ ### /bets ###
+ "/bets/": ledger.get_bet,
+ "/bets//matches": ledger.get_bet_matches_by_bet,
+ "/bets//resolutions": ledger.get_resolutions_by_bet,
+ ### /burns ###
+ "/burns": ledger.get_all_burns,
+ ### /dispensers ###
+ "/dispensers/": ledger.get_dispenser_info_by_tx_hash,
+ "/dispensers//dispenses": ledger.get_dispenses_by_dispenser,
+ ### /events ###
+ "/events": ledger.get_all_events,
+ "/events/": ledger.get_event_by_index,
+ "/events/counts": ledger.get_all_events_counts,
+ "/events/": ledger.get_events_by_event,
+ ### /healthz ###
+ "/healthz": util.handle_healthz_route,
+ ### /backend ###
+ "/backend/addresses//transactions": backend.search_raw_transactions,
+ "/backend/addresses//transactions/oldest": backend.get_oldest_tx,
+ "/backend/addresses//utxos": backend.get_unspent_txouts,
+ "/backend/addresses//pubkey": util.pubkeyhash_to_pubkey,
+ "/backend/transactions/": util.get_raw_transaction,
+ "/backend/estimatesmartfee": backend.fee_per_kb,
+ ### /mempool ###
+ "/mempool/events": ledger.get_all_mempool_events,
+ "/mempool/events/": ledger.get_mempool_events_by_event,
+ }
+)
diff --git a/counterparty-core/counterpartycore/lib/api/util.py b/counterparty-core/counterpartycore/lib/api/util.py
new file mode 100644
index 0000000000..06d0fa0713
--- /dev/null
+++ b/counterparty-core/counterpartycore/lib/api/util.py
@@ -0,0 +1,195 @@
+import inspect
+import logging
+from logging import handlers as logging_handlers
+
+import flask
+from counterpartycore.lib import backend, config, exceptions, ledger, transaction
+from docstring_parser import parse as parse_docstring
+
+logger = logging.getLogger(config.LOGGER_NAME)
+
+
+def check_last_parsed_block(blockcount):
+ """Checks database to see if is caught up with backend."""
+ if ledger.CURRENT_BLOCK_INDEX + 1 < blockcount:
+ raise exceptions.DatabaseError(f"{config.XCP_NAME} database is behind backend.")
+ logger.debug("Database state check passed.")
+
+
+def healthz_light(db):
+ logger.debug("Performing light healthz check.")
+ latest_block_index = backend.getblockcount()
+ check_last_parsed_block(latest_block_index)
+
+
+def healthz_heavy(db):
+ logger.debug("Performing heavy healthz check.")
+ transaction.compose_transaction(
+ db,
+ name="send",
+ params={
+ "source": config.UNSPENDABLE,
+ "destination": config.UNSPENDABLE,
+ "asset": config.XCP,
+ "quantity": 100000000,
+ },
+ allow_unconfirmed_inputs=True,
+ fee=1000,
+ )
+
+
+def healthz(db, check_type="heavy"):
+ try:
+ if check_type == "light":
+ healthz_light(db)
+ else:
+ healthz_light(db)
+ healthz_heavy(db)
+ except Exception as e:
+ logger.error(f"Health check failed: {e}")
+ return False
+ return True
+
+
+def handle_healthz_route(db, check_type: str = "heavy"):
+ """
+ Health check route.
+ :param check_type: Type of health check to perform. Options are 'light' and 'heavy'.
+ """
+ msg, code = "Healthy", 200
+ if not healthz(db, check_type):
+ msg, code = "Unhealthy", 503
+ return flask.Response(msg, code, mimetype="application/json")
+
+
+def remove_rowids(query_result):
+ """Remove the rowid field from the query result."""
+ if isinstance(query_result, list):
+ filtered_results = []
+ for row in list(query_result):
+ if "rowid" in row:
+ del row["rowid"]
+ if "MAX(rowid)" in row:
+ del row["MAX(rowid)"]
+ filtered_results.append(row)
+ return filtered_results
+ if isinstance(query_result, dict):
+ filtered_results = query_result
+ if "rowid" in filtered_results:
+ del filtered_results["rowid"]
+ if "MAX(rowid)" in filtered_results:
+ del filtered_results["MAX(rowid)"]
+ return filtered_results
+ return query_result
+
+
+def getrawtransactions(tx_hashes, verbose=False, skip_missing=False, _retry=0):
+ txhash_list = tx_hashes.split(",")
+ return backend.getrawtransaction_batch(txhash_list, verbose, skip_missing, _retry)
+
+
+def pubkeyhash_to_pubkey(address: str, provided_pubkeys: str = None):
+ """
+ Get pubkey for an address.
+ :param address: Address to get pubkey for.
+ :param provided_pubkeys: Comma separated list of provided pubkeys.
+ """
+ if provided_pubkeys:
+ provided_pubkeys_list = provided_pubkeys.split(",")
+ else:
+ provided_pubkeys_list = None
+ return backend.pubkeyhash_to_pubkey(address, provided_pubkeys=provided_pubkeys_list)
+
+
+def get_raw_transaction(tx_hash: str, verbose: bool = False):
+ """
+ Get a raw transaction from the blockchain
+ :param tx_hash: The transaction hash
+ :param verbose: Whether to return JSON output or raw hex
+ """
+ return backend.getrawtransaction(tx_hash, verbose=verbose)
+
+
+def get_backend_height():
+ block_count = backend.getblockcount()
+ blocks_behind = backend.getindexblocksbehind()
+ return block_count + blocks_behind
+
+
+def init_api_access_log(flask_app):
+ """Initialize API logger."""
+ flask_app.logger.removeHandler(flask.logging.default_handler)
+ flask_app.logger.setLevel(logging.DEBUG)
+ werkzeug_logger = logging.getLogger("werkzeug")
+ while len(werkzeug_logger.handlers) > 0:
+ werkzeug_logger.removeHandler(werkzeug_logger.handlers[0])
+
+ # Log to file, if configured...
+ if config.API_LOG:
+ handler = logging_handlers.RotatingFileHandler(
+ config.API_LOG, "a", config.API_MAX_LOG_SIZE, config.API_MAX_LOG_COUNT
+ )
+ handler.propagate = False
+ handler.setLevel(logging.DEBUG)
+ flask_app.logger.addHandler(handler)
+ werkzeug_logger.addHandler(handler)
+
+ flask.cli.show_server_banner = lambda *args: None
+
+
+def get_args_description(function):
+ docstring = parse_docstring(function.__doc__)
+ args = {}
+ for param in docstring.params:
+ args[param.arg_name] = param.description
+ return args
+
+
+def prepare_route_args(function):
+ args = []
+ function_args = inspect.signature(function).parameters
+ args_description = get_args_description(function)
+ for arg_name, arg in function_args.items():
+ if arg_name == "construct_args":
+ for carg_name, carg_info in transaction.COMPOSE_COMMONS_ARGS.items():
+ args.append(
+ {
+ "name": carg_name,
+ "type": carg_info[0].__name__,
+ "default": carg_info[1],
+ "description": carg_info[2],
+ "required": False,
+ }
+ )
+ continue
+ annotation = arg.annotation
+ if annotation is inspect.Parameter.empty:
+ continue
+ route_arg = {"name": arg_name}
+ default = arg.default
+ if default is not inspect.Parameter.empty:
+ route_arg["default"] = default
+ route_arg["required"] = False
+ else:
+ route_arg["required"] = True
+ route_arg["type"] = arg.annotation.__name__
+ if arg_name in args_description:
+ route_arg["description"] = args_description[arg_name]
+ args.append(route_arg)
+ return args
+
+
+def get_function_description(function):
+ docstring = parse_docstring(function.__doc__)
+ return docstring.description
+
+
+def prepare_routes(routes):
+ prepared_routes = {}
+ for route, function in routes.items():
+ prepared_routes[route] = {
+ "function": function,
+ "description": get_function_description(function),
+ "args": prepare_route_args(function),
+ }
+ return prepared_routes
diff --git a/counterparty-core/counterpartycore/lib/backend/__init__.py b/counterparty-core/counterpartycore/lib/backend/__init__.py
index bf476b658e..4ed53f602a 100644
--- a/counterparty-core/counterpartycore/lib/backend/__init__.py
+++ b/counterparty-core/counterpartycore/lib/backend/__init__.py
@@ -72,7 +72,9 @@ def clear_pretx(txid):
del PRETX_CACHE[binascii.hexlify(txid).decode("utf8")]
-def getrawtransaction(tx_hash, verbose=False, skip_missing=False, block_index=None):
+def getrawtransaction(
+ tx_hash: str, verbose: bool = False, skip_missing: bool = False, block_index: int = None
+):
if block_index and block_index in prefetcher.BLOCKCHAIN_CACHE:
return prefetcher.BLOCKCHAIN_CACHE[block_index]["raw_transactions"][tx_hash]
@@ -124,14 +126,15 @@ def ensure_script_pub_key_for_inputs(coins):
return coins
-def fee_per_kb(conf_target, mode, nblocks=None):
+def fee_per_kb(
+ conf_target: int = config.ESTIMATE_FEE_CONF_TARGET, mode: str = config.ESTIMATE_FEE_MODE
+):
"""
- :param conf_target:
- :param mode:
- :return: fee_per_kb in satoshis, or None when unable to determine
+ Get the fee per kilobyte for a transaction to be confirmed in `conf_target` blocks.
+ :param conf_target: Confirmation target in blocks (1 - 1008)
+ :param mode: The fee estimate mode.
"""
-
- return backend().fee_per_kb(conf_target, mode, nblocks=nblocks)
+ return backend().fee_per_kb(conf_target, mode, nblocks=None)
def deserialize(tx_hex):
@@ -207,9 +210,12 @@ class MempoolError(Exception):
pass
-def get_unspent_txouts(source, unconfirmed=False, unspent_tx_hash=None):
- """returns a list of unspent outputs for a specific address
- @return: A list of dicts, with each entry in the dict having the following keys:
+def get_unspent_txouts(source: str, unconfirmed: bool = False, unspent_tx_hash: str = None):
+ """
+ Returns a list of unspent outputs for a specific address
+ :param source: The address to search for
+ :param unconfirmed: Include unconfirmed transactions
+ :param unspent_tx_hash: Filter by unspent_tx_hash
"""
unspent = backend().get_unspent_txouts(source)
@@ -232,11 +238,22 @@ def get_unspent_txouts(source, unconfirmed=False, unspent_tx_hash=None):
return unspent
-def search_raw_transactions(address, unconfirmed=True, only_tx_hashes=False):
+def search_raw_transactions(address, unconfirmed: bool = True, only_tx_hashes: bool = False):
+ """
+ Returns all transactions involving a given address
+ :param address: The address to search for
+ :param unconfirmed: Include unconfirmed transactions
+ :param only_tx_hashes: Return only the tx hashes
+ """
return backend().search_raw_transactions(address, unconfirmed, only_tx_hashes)
-def get_oldest_tx(address, block_index=None):
+def get_oldest_tx(address: str, block_index: int = None):
+ """
+ Get the oldest transaction for an address.
+ :param address: The address to search for.
+ :param block_index: The block index to search from.
+ """
return backend().get_oldest_tx(address, block_index=block_index)
diff --git a/counterparty-core/counterpartycore/lib/backend/addrindexrs.py b/counterparty-core/counterpartycore/lib/backend/addrindexrs.py
index 2ea4fd4d9c..4bd492b0d3 100644
--- a/counterparty-core/counterpartycore/lib/backend/addrindexrs.py
+++ b/counterparty-core/counterpartycore/lib/backend/addrindexrs.py
@@ -667,7 +667,7 @@ def get_unspent_txouts(source):
# ]
#
# }
-def search_raw_transactions(address, unconfirmed=True, only_tx_hashes=False):
+def search_raw_transactions(address, unconfirmed: bool = True, only_tx_hashes: bool = False):
hsh = _address_to_hash(address)
txs = INDEXER_THREAD.send({"method": "blockchain.scripthash.get_history", "params": [hsh]})[
"result"
@@ -801,7 +801,7 @@ def get_oldest_tx(self, address, timeout=ADDRINDEXRS_CLIENT_TIMEOUT, block_index
ADDRINDEXRS_CLIENT = None
-def get_oldest_tx(address, block_index=None):
+def get_oldest_tx(address: str, block_index: int = None):
current_block_index = block_index or ledger.CURRENT_BLOCK_INDEX
hardcoded_key = f"{current_block_index}-{address}"
if hardcoded_key in GET_OLDEST_TX_HARDCODED:
diff --git a/counterparty-core/counterpartycore/lib/blocks.py b/counterparty-core/counterpartycore/lib/blocks.py
index be904ee828..8bfbda920f 100644
--- a/counterparty-core/counterpartycore/lib/blocks.py
+++ b/counterparty-core/counterpartycore/lib/blocks.py
@@ -628,6 +628,9 @@ def initialise(db):
bindings TEXT,
timestamp INTEGER)
""")
+ columns = [column["name"] for column in cursor.execute("""PRAGMA table_info(mempool)""")]
+ if "event" not in columns:
+ cursor.execute("""ALTER TABLE mempool ADD COLUMN event TEXT""")
# Lock UPDATE on all tables
for table in TABLES:
@@ -898,9 +901,6 @@ def follow(db):
check.software_version()
last_software_check = time.time()
- # Initialise.
- initialise(db)
-
# Get index of last block.
if ledger.CURRENT_BLOCK_INDEX == 0:
logger.warning("New database.")
@@ -1254,7 +1254,7 @@ def follow(db):
tx_hash, new_message = message
new_message["tx_hash"] = tx_hash
cursor.execute(
- """INSERT INTO mempool VALUES(:tx_hash, :command, :category, :bindings, :timestamp)""",
+ """INSERT INTO mempool VALUES(:tx_hash, :command, :category, :bindings, :timestamp, :event)""",
new_message,
)
diff --git a/counterparty-core/counterpartycore/lib/config.py b/counterparty-core/counterpartycore/lib/config.py
index 2560075478..e2f88c2a87 100644
--- a/counterparty-core/counterpartycore/lib/config.py
+++ b/counterparty-core/counterpartycore/lib/config.py
@@ -53,9 +53,13 @@
FULL_APP_NAME = "Counterparty Core"
LOGGER_NAME = APP_NAME
-DEFAULT_RPC_PORT_REGTEST = 24000
-DEFAULT_RPC_PORT_TESTNET = 14000
-DEFAULT_RPC_PORT = 4000
+DEFAULT_API_PORT_REGTEST = 24000
+DEFAULT_API_PORT_TESTNET = 14000
+DEFAULT_API_PORT = 4000
+
+DEFAULT_RPC_PORT_REGTEST = 24100
+DEFAULT_RPC_PORT_TESTNET = 14100
+DEFAULT_RPC_PORT = 4100
DEFAULT_BACKEND_PORT_REGTEST = 28332
DEFAULT_BACKEND_PORT_TESTNET = 18332
@@ -151,5 +155,7 @@
BOOTSTRAP_URL_MAINNET = "https://bootstrap.counterparty.io/counterparty.latest.tar.gz"
BOOTSTRAP_URL_TESTNET = "https://bootstrap.counterparty.io/counterparty-testnet.latest.tar.gz"
-
-# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
+API_MAX_LOG_SIZE = (
+ 10 * 1024 * 1024
+) # max log size of 20 MB before rotation (make configurable later)
+API_MAX_LOG_COUNT = 10
diff --git a/counterparty-core/counterpartycore/lib/exceptions.py b/counterparty-core/counterpartycore/lib/exceptions.py
index 07f846c577..cbe5260481 100644
--- a/counterparty-core/counterpartycore/lib/exceptions.py
+++ b/counterparty-core/counterpartycore/lib/exceptions.py
@@ -83,4 +83,8 @@ class ComposeTransactionError(Exception):
pass
+class InvalidArgument(Exception):
+ pass
+
+
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
diff --git a/counterparty-core/counterpartycore/lib/ledger.py b/counterparty-core/counterpartycore/lib/ledger.py
index 4870cbadc5..6700c1a560 100644
--- a/counterparty-core/counterpartycore/lib/ledger.py
+++ b/counterparty-core/counterpartycore/lib/ledger.py
@@ -6,7 +6,7 @@
import time
from decimal import Decimal as D
-from counterpartycore.lib import config, exceptions, log, util
+from counterpartycore.lib import backend, config, exceptions, log, util
logger = logging.getLogger(config.LOGGER_NAME)
@@ -64,6 +64,153 @@ def get_messages(db, block_index=None, block_index_in=None, message_index_in=Non
return cursor.fetchall()
+def get_events(db, block_index=None, event=None, event_index=None, last=None, limit=None):
+ cursor = db.cursor()
+ where = []
+ bindings = []
+ if block_index is not None:
+ where.append("block_index = ?")
+ bindings.append(block_index)
+ if event is not None:
+ where.append("event = ?")
+ bindings.append(event)
+ if event_index is not None:
+ where.append("message_index = ?")
+ bindings.append(event_index)
+ if last is not None:
+ where.append("message_index <= ?")
+ bindings.append(last)
+ if block_index is None and limit is None:
+ limit = 100
+ if limit is not None:
+ limit = f"LIMIT {int(limit)}"
+ else:
+ limit = ""
+ # no sql injection here
+ query = f"""
+ SELECT message_index AS event_index, event, bindings, block_index, timestamp
+ FROM messages
+ WHERE ({" AND ".join(where)})
+ ORDER BY message_index DESC {limit}
+ """ # nosec B608 # noqa: S608
+ cursor.execute(query, tuple(bindings))
+ events = cursor.fetchall()
+ for i, _ in enumerate(events):
+ events[i]["bindings"] = json.loads(events[i]["bindings"])
+ return events
+
+
+def get_all_events(db, last: int = None, limit: int = 100):
+ """
+ Returns all events
+ :param int last: The last event index to return
+ :param int limit: The maximum number of events to return
+ """
+ return get_events(db, last=last, limit=limit)
+
+
+def get_events_by_block(db, block_index: int):
+ """
+ Returns the events of a block
+ :param int block_index: The index of the block to return
+ """
+ return get_events(db, block_index=block_index)
+
+
+def get_events_by_block_and_event(db, block_index: int, event: str):
+ """
+ Returns the events of a block filtered by event
+ :param int block_index: The index of the block to return
+ :param str event: The event to filter by
+ """
+ return get_events(db, block_index=block_index, event=event)
+
+
+def get_event_by_index(db, event_index: int):
+ """
+ Returns the event of an index
+ :param int event_index: The index of the event to return
+ """
+ return get_events(db, event_index=event_index)
+
+
+def get_events_by_event(db, event: str, last: int = None, limit: int = 100):
+ """
+ Returns the events filtered by event name
+ :param str event: The event to return
+ :param int last: The last event index to return
+ :param int limit: The maximum number of events to return
+ """
+ return get_events(db, event=event, last=last, limit=limit)
+
+
+def get_mempool_events(db, event_name=None):
+ cursor = db.cursor()
+ where = []
+ bindings = []
+ if event_name is not None:
+ where.append("event = ?")
+ bindings.append(event_name)
+ # no sql injection here
+ query = """
+ SELECT tx_hash, event, bindings, timestamp
+ FROM mempool
+ """
+ if event_name is not None:
+ query += f"""WHERE ({" AND ".join(where)})""" # nosec B608 # noqa: S608
+ query += """ORDER BY timestamp DESC"""
+ cursor.execute(query, tuple(bindings))
+ events = cursor.fetchall()
+ for i, _ in enumerate(events):
+ events[i]["bindings"] = json.loads(events[i]["bindings"])
+ return events
+
+
+def get_all_mempool_events(db):
+ """
+ Returns all mempool events
+ """
+ return get_mempool_events(db)
+
+
+def get_mempool_events_by_event(db, event_name: str):
+ """
+ Returns the mempool events filtered by event name
+ :param str event_name: The event to return
+ """
+ return get_mempool_events(db, event_name=event_name)
+
+
+def get_events_counts(db, block_index=None):
+ cursor = db.cursor()
+ bindings = []
+ query = """
+ SELECT event, COUNT(*) AS event_count
+ FROM messages
+ """
+ if block_index is not None:
+ query += "WHERE block_index = ?"
+ bindings.append(block_index)
+ query += "GROUP BY event"
+ cursor.execute(query)
+ return cursor.fetchall()
+
+
+def get_events_counts_by_block(db, block_index: int):
+ """
+ Returns the event counts of a block
+ :param int block_index: The index of the block to return
+ """
+ return get_events_counts(db, block_index=block_index)
+
+
+def get_all_events_counts(db):
+ """
+ Returns the event counts of all blocks
+ """
+ return get_events_counts(db)
+
+
# we are using a function here for testing purposes
def curr_time():
return int(time.time())
@@ -285,7 +432,24 @@ def get_balance(db, address, asset, raise_error_if_no_balance=False, return_list
return balances[0]["quantity"]
-def get_address_balances(db, address):
+def get_balance_object(db, address: str, asset: str):
+ """
+ Returns the balance of an address and asset
+ :param str address: The address to return
+ :param str asset: The asset to return
+ """
+ return {
+ "address": address,
+ "asset": asset,
+ "quantity": get_balance(db, address, asset),
+ }
+
+
+def get_address_balances(db, address: str):
+ """
+ Returns the balances of an address
+ :param str address: The address to return
+ """
cursor = db.cursor()
query = """
SELECT address, asset, quantity, MAX(rowid)
@@ -326,6 +490,208 @@ def get_balances_count(db, address):
return cursor.fetchall()
+def get_credits_or_debits(db, table, address=None, asset=None, block_index=None, tx_index=None):
+ cursor = db.cursor()
+ where = []
+ bindings = []
+ if address is not None:
+ where.append("address = ?")
+ bindings.append(address)
+ if asset is not None:
+ where.append("asset = ?")
+ bindings.append(asset)
+ if block_index is not None:
+ where.append("block_index = ?")
+ bindings.append(block_index)
+ if tx_index is not None:
+ where.append("tx_index = ?")
+ bindings.append(tx_index)
+ # no sql injection here
+ query = f"""SELECT * FROM {table} WHERE ({" AND ".join(where)})""" # nosec B608 # noqa: S608
+ cursor.execute(query, tuple(bindings))
+ return cursor.fetchall()
+
+
+def get_credits(db, address=None, asset=None, block_index=None, tx_index=None):
+ return get_credits_or_debits(db, "credits", address, asset, block_index, tx_index)
+
+
+def get_credits_by_block(db, block_index: int):
+ """
+ Returns the credits of a block
+ :param int block_index: The index of the block to return
+ """
+ return get_credits(db, block_index=block_index)
+
+
+def get_credits_by_address(db, address: str):
+ """
+ Returns the credits of an address
+ :param str address: The address to return
+ """
+ return get_credits(db, address=address)
+
+
+def get_credits_by_asset(db, asset: str):
+ """
+ Returns the credits of an asset
+ :param str asset: The asset to return
+ """
+ return get_credits(db, asset=asset)
+
+
+def get_debits(db, address=None, asset=None, block_index=None, tx_index=None):
+ return get_credits_or_debits(db, "debits", address, asset, block_index, tx_index)
+
+
+def get_debits_by_block(db, block_index: int):
+ """
+ Returns the debits of a block
+ :param int block_index: The index of the block to return
+ """
+ return get_debits(db, block_index=block_index)
+
+
+def get_debits_by_address(db, address: str):
+ """
+ Returns the debits of an address
+ :param str address: The address to return
+ """
+ return get_debits(db, address=address)
+
+
+def get_debits_by_asset(db, asset: str):
+ """
+ Returns the debits of an asset
+ :param str asset: The asset to return
+ """
+ return get_debits(db, asset=asset)
+
+
+def get_sends_or_receives(
+ db, source=None, destination=None, asset=None, block_index=None, status="valid"
+):
+ cursor = db.cursor()
+ where = []
+ bindings = []
+ if source is not None:
+ where.append("source = ?")
+ bindings.append(source)
+ if destination is not None:
+ where.append("destination = ?")
+ bindings.append(destination)
+ if asset is not None:
+ where.append("asset = ?")
+ bindings.append(asset)
+ if block_index is not None:
+ where.append("block_index = ?")
+ bindings.append(block_index)
+ if status is not None:
+ where.append("status = ?")
+ bindings.append(status)
+ # no sql injection here
+ query = f"""SELECT * FROM sends WHERE ({" AND ".join(where)})""" # nosec B608 # noqa: S608
+ cursor.execute(query, tuple(bindings))
+ return cursor.fetchall()
+
+
+def get_sends_or_receives_by_block(db, block_index: int):
+ """
+ Returns the sends of a block
+ :param int block_index: The index of the block to return
+ """
+ return get_sends_or_receives(db, block_index=block_index)
+
+
+def get_sends_or_receives_by_asset(db, asset: str):
+ """
+ Returns the sends of an asset
+ :param str asset: The asset to return
+ """
+ return get_sends_or_receives(db, asset=asset)
+
+
+def get_sends(db, address=None, asset=None, block_index=None, status="valid"):
+ return get_sends_or_receives(
+ db, source=address, asset=asset, block_index=block_index, status=status
+ )
+
+
+def get_send_by_address(db, address: str):
+ """
+ Returns the sends of an address
+ :param str address: The address to return
+ """
+ return get_sends(db, address=address)
+
+
+def get_send_by_address_and_asset(db, address: str, asset: str):
+ """
+ Returns the sends of an address and asset
+ :param str address: The address to return
+ :param str asset: The asset to return
+ """
+ return get_sends(db, address=address, asset=asset)
+
+
+def get_receives(db, address=None, asset=None, block_index=None, status="valid"):
+ return get_sends_or_receives(
+ db, destination=address, asset=asset, block_index=block_index, status=status
+ )
+
+
+def get_receive_by_address(db, address: str):
+ """
+ Returns the receives of an address
+ :param str address: The address to return
+ """
+ return get_receives(db, address=address)
+
+
+def get_receive_by_address_and_asset(db, address: str, asset: str):
+ """
+ Returns the receives of an address and asset
+ :param str address: The address to return
+ :param str asset: The asset to return
+ """
+ return get_receives(db, address=address, asset=asset)
+
+
+def get_sweeps(db, address=None, block_index=None, status="valid"):
+ cursor = db.cursor()
+ where = []
+ bindings = []
+ if address is not None:
+ where.append("source = ?")
+ bindings.append(address)
+ if block_index is not None:
+ where.append("block_index = ?")
+ bindings.append(block_index)
+ if status is not None:
+ where.append("status = ?")
+ bindings.append(status)
+ # no sql injection here
+ query = f"""SELECT * FROM sweeps WHERE ({" AND ".join(where)})""" # nosec B608 # noqa: S608
+ cursor.execute(query, tuple(bindings))
+ return cursor.fetchall()
+
+
+def get_sweeps_by_block(db, block_index: int):
+ """
+ Returns the sweeps of a block
+ :param int block_index: The index of the block to return
+ """
+ return get_sweeps(db, block_index=block_index)
+
+
+def get_sweeps_by_address(db, address: str):
+ """
+ Returns the sweeps of an address
+ :param str address: The address to return
+ """
+ return get_sweeps(db, address=address)
+
+
#####################
# ISSUANCES #
#####################
@@ -574,7 +940,12 @@ def get_asset_issued(db, address):
return cursor.fetchall()
-def get_asset_balances(db, asset, exclude_zero_balances=True):
+def get_asset_balances(db, asset: str, exclude_zero_balances: bool = True):
+ """
+ Returns the asset balances
+ :param str asset: The asset to return
+ :param bool exclude_zero_balances: Whether to exclude zero balances
+ """
cursor = db.cursor()
query = """
SELECT address, asset, quantity, MAX(rowid)
@@ -608,22 +979,63 @@ def get_asset_issuances_quantity(db, asset):
return issuances[0]["issuances_count"]
-def get_asset_info(db, asset):
- if asset == config.BTC or asset == config.XCP:
- return {"divisible": True}
+def get_asset_info(db, asset: str):
+ """
+ Returns the asset information
+ :param str asset: The asset to return
+ """
+ asset_name = resolve_subasset_longname(db, asset)
+
+ # Defaults.
+ asset_info = {
+ "asset": asset_name,
+ "asset_longname": None,
+ "owner": None,
+ "divisible": True,
+ "locked": False,
+ "supply": 0,
+ "description": "",
+ "issuer": None,
+ }
+
+ if asset_name == config.BTC:
+ asset_info["supply"] = backend.get_btc_supply(normalize=False)
+ return asset_info
+
+ if asset_name == config.XCP:
+ asset_info["supply"] = xcp_supply(db)
+ asset_info["holder_count"] = get_asset_holder_count(db, asset)
+ return asset_info
+
+ asset_info["supply"] = asset_supply(db, asset_name)
+ asset_info["holder_count"] = get_asset_holder_count(db, asset)
+
cursor = db.cursor()
query = """
SELECT * FROM issuances
WHERE (status = ? AND asset = ?)
- ORDER BY tx_index DESC
+ ORDER BY rowid DESC
+ LIMIT 1
"""
bindings = ("valid", asset)
cursor.execute(query, bindings)
- issuances = cursor.fetchall()
- return issuances[0]
+ issuance = cursor.fetchone()
+
+ asset_info = asset_info | {
+ "asset_longname": issuance["asset_longname"],
+ "owner": issuance["issuer"],
+ "divisible": bool(issuance["divisible"]),
+ "locked": bool(issuance["locked"]),
+ "description": issuance["description"],
+ "issuer": issuance["issuer"],
+ }
+
+ return asset_info
-def get_issuances(db, asset=None, status=None, locked=None, first=False, last=False):
+def get_issuances(
+ db, asset=None, status=None, locked=None, block_index=None, first=False, last=False
+):
cursor = db.cursor()
cursor = db.cursor()
where = []
@@ -637,6 +1049,9 @@ def get_issuances(db, asset=None, status=None, locked=None, first=False, last=Fa
if locked is not None:
where.append("locked = ?")
bindings.append(locked)
+ if block_index is not None:
+ where.append("block_index = ?")
+ bindings.append(block_index)
# no sql injection here
query = f"""SELECT * FROM issuances WHERE ({" AND ".join(where)})""" # nosec B608 # noqa: S608
if first:
@@ -647,6 +1062,22 @@ def get_issuances(db, asset=None, status=None, locked=None, first=False, last=Fa
return cursor.fetchall()
+def get_issuances_by_block(db, block_index: int):
+ """
+ Returns the issuances of a block
+ :param int block_index: The index of the block to return
+ """
+ return get_issuances(db, block_index=block_index)
+
+
+def get_issuances_by_asset(db, asset: str):
+ """
+ Returns the issuances of an asset
+ :param str asset: The asset to return
+ """
+ return get_issuances(db, asset=asset)
+
+
def get_assets_by_longname(db, asset_longname):
cursor = db.cursor()
query = """
@@ -658,7 +1089,17 @@ def get_assets_by_longname(db, asset_longname):
return cursor.fetchall()
-def get_valid_assets(db):
+def get_valid_assets(db, offset: int = 0, limit: int = 100):
+ """
+ Returns the valid assets
+ :param int offset: The offset of the assets to return
+ :param int limit: The limit of the assets to return
+ """
+ try:
+ int(offset)
+ int(limit)
+ except ValueError as e:
+ raise exceptions.InvalidArgument("Invalid offset or limit parameter") from e
cursor = db.cursor()
query = """
SELECT asset, asset_longname
@@ -671,6 +1112,21 @@ def get_valid_assets(db):
return cursor.fetchall()
+def get_dividends(db, asset: str):
+ """
+ Returns the dividends of an asset
+ :param str asset: The asset to return
+ """
+ cursor = db.cursor()
+ query = """
+ SELECT * FROM dividends
+ WHERE asset = ? AND status = ?
+ """
+ bindings = (asset, "valid")
+ cursor.execute(query, bindings)
+ return cursor.fetchall()
+
+
#####################
# BROADCASTS #
#####################
@@ -706,14 +1162,22 @@ def get_oracle_last_price(db, oracle_address, block_index):
)
-def get_broadcasts_by_source(db, source, status):
+def get_broadcasts_by_source(db, address: str, status: str = "valid", order_by: str = "DESC"):
+ """
+ Returns the broadcasts of a source
+ :param str address: The address to return
+ :param str status: The status of the broadcasts to return
+ :param str order_by: The order of the broadcasts to return
+ """
+ if order_by not in ["ASC", "DESC"]:
+ raise exceptions.InvalidArgument("Invalid order_by parameter")
cursor = db.cursor()
- query = """
+ query = f"""
SELECT * FROM broadcasts
WHERE (status = ? AND source = ?)
- ORDER BY tx_index ASC
- """
- bindings = (status, source)
+ ORDER BY tx_index {order_by}
+ """ # nosec B608 # noqa: S608
+ bindings = (status, address)
cursor.execute(query, bindings)
return cursor.fetchall()
@@ -723,25 +1187,110 @@ def get_broadcasts_by_source(db, source, status):
#####################
-def get_burns(db, status=None, source=None):
+def get_burns(db, address: str = None, status: str = "valid"):
+ """
+ Returns the burns of an address
+ :param str address: The address to return
+ :param str status: The status of the burns to return
+ """
cursor = db.cursor()
where = []
bindings = []
if status is not None:
where.append("status = ?")
bindings.append(status)
- if source is not None:
+ if address is not None:
where.append("source = ?")
- bindings.append(source)
+ bindings.append(address)
# no sql injection here
query = f"""SELECT * FROM burns WHERE ({" AND ".join(where)})""" # nosec B608 # noqa: S608
cursor.execute(query, tuple(bindings))
return cursor.fetchall()
-###########################
-# TRANSACTIONS #
-###########################
+def get_burns_by_address(db, address: str):
+ """
+ Returns the burns of an address
+ :param str address: The address to return
+ """
+ return get_burns(db, address=address)
+
+
+def get_all_burns(db, status: str = "valid", offset: int = 0, limit: int = 100):
+ """
+ Returns the burns
+ :param str status: The status of the burns to return
+ :param int offset: The offset of the burns to return
+ :param int limit: The limit of the burns to return
+ """
+ try:
+ int(offset)
+ int(limit)
+ except ValueError as e:
+ raise exceptions.InvalidArgument("Invalid offset or limit parameter") from e
+ cursor = db.cursor()
+ query = """
+ SELECT * FROM burns
+ WHERE status = ?
+ ORDER BY tx_index ASC
+ LIMIT ? OFFSET ?
+ """
+ bindings = (status, limit, offset)
+ cursor.execute(query, bindings)
+ return cursor.fetchall()
+
+
+######################################
+# BLOCKS AND TRANSACTIONS #
+######################################
+
+
+def get_blocks(db, last: int = None, limit: int = 10):
+ """
+ Returns the list of the last ten blocks
+ :param int last: The index of the most recent block to return
+ :param int limit: The number of blocks to return
+ """
+ cursor = db.cursor()
+ bindings = []
+ query = """
+ SELECT * FROM blocks WHERE
+ ORDER BY block_index DESC
+ """
+ if last is not None:
+ query += "WHERE BLOCK_INDEX <= ?"
+ bindings.append(last)
+ query += "LIMIT ?"
+ bindings.append(limit)
+ cursor.execute(query, tuple(bindings))
+ return cursor.fetchall()
+
+
+def get_block(db, block_index: int):
+ """
+ Return the information of a block
+ :param int block_index: The index of the block to return
+ """
+ blocks = get_blocks(db, last=block_index, limit=1)
+ if blocks:
+ return blocks[0]
+ return None
+
+
+def get_transactions_by_block(db, block_index: int):
+ """
+ Returns the transactions of a block
+ :param int block_index: The index of the block to return
+ """
+ cursor = db.cursor()
+ query = """
+ SELECT * FROM transactions
+ WHERE block_index = ?
+ ORDER BY tx_index ASC
+ """
+ bindings = (block_index,)
+ cursor.execute(query, bindings)
+ return cursor.fetchall()
def get_vouts(db, tx_hash):
@@ -771,6 +1320,17 @@ def get_transactions(db, tx_hash=None):
return cursor.fetchall()
+def get_transaction(db, tx_hash: str):
+ """
+ Returns the information of a transaction
+ :param str tx_hash: The hash of the transaction to return
+ """
+ transactions = get_transactions(db, tx_hash)
+ if transactions:
+ return transactions[0]
+ return None
+
+
def get_transaction_source(db, tx_hash):
cursor = db.cursor()
query = """SELECT source FROM transactions WHERE tx_hash = ?"""
@@ -792,6 +1352,74 @@ def get_addresses(db, address=None):
return cursor.fetchall()
+def get_expirations(db, block_index: int):
+ """
+ Returns the expirations of a block
+ :param int block_index: The index of the block to return
+ """
+ cursor = db.cursor()
+ queries = [
+ """
+ SELECT 'order' AS type, order_hash AS object_id FROM order_expirations
+ WHERE block_index = ?
+ """,
+ """
+ SELECT 'order_match' AS type, order_match_id AS object_id FROM order_match_expirations
+ WHERE block_index = ?
+ """,
+ """
+ SELECT 'bet' AS type, bet_hash AS object_id FROM bet_expirations
+ WHERE block_index = ?
+ """,
+ """
+ SELECT 'bet_match' AS type, bet_match_id AS object_id FROM bet_match_expirations
+ WHERE block_index = ?
+ """,
+ """
+ SELECT 'rps' AS type, rps_hash AS object_id FROM rps_expirations
+ WHERE block_index = ?
+ """,
+ """
+ SELECT 'rps_match' AS type, rps_match_id AS object_id FROM rps_match_expirations
+ WHERE block_index = ?
+ """,
+ ]
+ query = " UNION ALL ".join(queries)
+ bindings = (block_index,)
+ cursor.execute(query, bindings)
+ return cursor.fetchall()
+
+
+def get_cancels(db, block_index: int):
+ """
+ Returns the cancels of a block
+ :param int block_index: The index of the block to return
+ """
+ cursor = db.cursor()
+ query = """
+ SELECT * FROM cancels
+ WHERE block_index = ?
+ """
+ bindings = (block_index,)
+ cursor.execute(query, bindings)
+ return cursor.fetchall()
+
+
+def get_destructions(db, block_index: int):
+ """
+ Returns the destructions of a block
+ :param int block_index: The index of the block to return
+ """
+ cursor = db.cursor()
+ query = """
+ SELECT * FROM destructions
+ WHERE block_index = ?
+ """
+ bindings = (block_index,)
+ cursor.execute(query, bindings)
+ return cursor.fetchall()
+
+
###############################
# UTIL FUNCTIONS #
###############################
@@ -963,6 +1591,14 @@ def get_dispenser_info(db, tx_hash=None, tx_index=None):
return cursor.fetchall()
+def get_dispenser_info_by_tx_hash(db, tx_hash: str):
+ """
+ Returns the dispenser information by tx_hash
+ :param str tx_hash: The hash of the dispenser to return
+ """
+ return get_dispenser_info(db, tx_hash=tx_hash)
+
+
def get_refilling_count(db, dispenser_tx_hash):
cursor = db.cursor()
query = """
@@ -1077,7 +1713,7 @@ def get_dispensers(
db,
status_in=None,
source_in=None,
- source=None,
+ address=None,
asset=None,
origin=None,
status=None,
@@ -1089,9 +1725,9 @@ def get_dispensers(
bindings = []
# where for immutable fields
first_where = []
- if source is not None:
+ if address is not None:
first_where.append("source = ?")
- bindings.append(source)
+ bindings.append(address)
if source_in is not None:
first_where.append(f"source IN ({','.join(['?' for e in range(0, len(source_in))])})")
bindings += source_in
@@ -1132,6 +1768,63 @@ def get_dispensers(
return cursor.fetchall()
+def get_dispensers_by_address(db, address: str, status: int = 0):
+ """
+ Returns the dispensers of an address
+ :param str address: The address to return
+ """
+ return get_dispensers(db, address=address, status=status)
+
+
+def get_dispensers_by_asset(db, asset: str, status: int = 0):
+ """
+ Returns the dispensers of an asset
+ :param str asset: The asset to return
+ """
+ return get_dispensers(db, asset=asset, status=status)
+
+
+def get_dispensers_by_address_and_asset(db, address: str, asset: str, status: int = 0):
+ """
+ Returns the dispensers of an address and an asset
+ :param str address: The address to return
+ :param str asset: The asset to return
+ """
+ return get_dispensers(db, address=address, asset=asset, status=status)
+
+
+def get_dispenses(db, dispenser_tx_hash=None, block_index=None):
+ cursor = db.cursor()
+ where = []
+ bindings = []
+ if dispenser_tx_hash is not None:
+ where.append("dispenser_tx_hash = ?")
+ bindings.append(dispenser_tx_hash)
+ if block_index is not None:
+ where.append("block_index = ?")
+ bindings.append(block_index)
+ # no sql injection here
+ query = f"""SELECT * FROM dispenses WHERE ({" AND ".join(where)})""" # nosec B608 # noqa: S608
+ cursor.execute(query, tuple(bindings))
+ return cursor.fetchall()
+
+
+def get_dispenses_by_block(db, block_index: int):
+ """
+ Returns the dispenses of a block
+ :param int block_index: The index of the block to return
+ """
+ return get_dispenses(db, block_index=block_index)
+
+
+def get_dispenses_by_dispenser(db, tx_hash: str):
+ """
+ Returns the dispenses of a dispenser
+ :param str tx_hash: The hash of the dispenser to return
+ """
+ return get_dispenses(db, dispenser_tx_hash=tx_hash)
+
+
### UPDATES ###
@@ -1186,7 +1879,11 @@ def get_bet_matches_to_expire(db, block_time):
return cursor.fetchall()
-def get_bet(db, tx_hash):
+def get_bet(db, tx_hash: str):
+ """
+ Returns the information of a bet
+ :param str tx_hash: The hash of the bet to return
+ """
cursor = db.cursor()
query = """
SELECT * FROM bets
@@ -1230,7 +1927,12 @@ def get_matching_bets(db, feed_address, bet_type):
return cursor.fetchall()
-def get_open_bet_by_feed(db, feed_address):
+def get_bet_by_feed(db, address: str, status: str = "open"):
+ """
+ Returns the bets of a feed
+ :param str address: The address of the feed
+ :param str status: The status of the bet
+ """
cursor = db.cursor()
query = """
SELECT * FROM (
@@ -1241,7 +1943,43 @@ def get_open_bet_by_feed(db, feed_address):
) WHERE status = ?
ORDER BY tx_index, tx_hash
"""
- bindings = (feed_address, "open")
+ bindings = (address, status)
+ cursor.execute(query, bindings)
+ return cursor.fetchall()
+
+
+def get_bet_matches_by_bet(db, tx_hash: str, status: str = "pending"):
+ """
+ Returns the bet matches of a bet
+ :param str tx_hash: The hash of the bet
+ :param str status: The status of the bet matches
+ """
+ cursor = db.cursor()
+ query = """
+ SELECT * FROM (
+ SELECT *, MAX(rowid)
+ FROM bet_matches
+ WHERE (tx0_hash = ? OR tx1_hash = ?)
+ GROUP BY id
+ ) WHERE status = ?
+ """
+ bindings = (tx_hash, tx_hash, status)
+ cursor.execute(query, bindings)
+ return cursor.fetchall()
+
+
+def get_resolutions_by_bet(db, tx_hash: str):
+ """
+ Returns the resolutions of a bet
+ :param str tx_hash: The hash of the bet
+ """
+ cursor = db.cursor()
+ query = """
+ SELECT *
+ FROM bet_match_resolutions
+ WHERE bet_match_id LIKE '%?%'
+ """
+ bindings = (tx_hash,)
cursor.execute(query, bindings)
return cursor.fetchall()
@@ -1325,7 +2063,11 @@ def get_order_matches_to_expire(db, block_index):
return cursor.fetchall()
-def get_order(db, tx_hash):
+def get_order(db, tx_hash: str):
+ """
+ Returns the information of an order
+ :param str tx_hash: The hash of the order to return
+ """
cursor = db.cursor()
query = """
SELECT * FROM orders
@@ -1396,7 +2138,12 @@ def get_matching_orders(db, tx_hash, give_asset, get_asset):
return cursor.fetchall()
-def get_orders_by_asset(db, asset, status="open"):
+def get_orders_by_asset(db, asset: str, status: str = "open"):
+ """
+ Returns the orders of an asset
+ :param str asset: The asset to return
+ :param str status: The status of the orders to return
+ """
cursor = db.cursor()
query = """
SELECT * FROM (
@@ -1411,7 +2158,12 @@ def get_orders_by_asset(db, asset, status="open"):
return cursor.fetchall()
-def get_order_matches_by_order(db, tx_hash, status="pending"):
+def get_order_matches_by_order(db, tx_hash: str, status: str = "pending"):
+ """
+ Returns the order matches of an order
+ :param str tx_hash: The hash of the order
+ :param str status: The status of the order matches to return
+ """
cursor = db.cursor()
query = """
SELECT * FROM (
@@ -1426,6 +2178,22 @@ def get_order_matches_by_order(db, tx_hash, status="pending"):
return cursor.fetchall()
+def get_btcpays_by_order(db, tx_hash: str):
+ """
+ Returns the BTC pays of an order
+ :param str tx_hash: The hash of the order
+ """
+ cursor = db.cursor()
+ query = """
+ SELECT *
+ FROM btc_pays
+ WHERE order_match_id LIKE '%?%'
+ """
+ bindings = (tx_hash,)
+ cursor.execute(query, bindings)
+ return cursor.fetchall()
+
+
### UPDATES ###
@@ -1854,6 +2622,21 @@ def holders(db, asset, exclude_empty_holders=False):
return holders
+def get_asset_holders(db, asset: str):
+ """
+ Returns the holders of an asset
+ :param str asset: The asset to return
+ """
+ asset_name = resolve_subasset_longname(db, asset)
+ return holders(db, asset_name, True)
+
+
+def get_asset_holder_count(db, asset):
+ holders = get_asset_holders(db, asset)
+ addresses = [holder["address"] for holder in holders]
+ return len(set(addresses))
+
+
def xcp_created(db):
"""Return number of XCP created thus far."""
cursor = db.cursor()
diff --git a/counterparty-core/counterpartycore/lib/log.py b/counterparty-core/counterpartycore/lib/log.py
index 1df6153e37..4ff3cea510 100644
--- a/counterparty-core/counterpartycore/lib/log.py
+++ b/counterparty-core/counterpartycore/lib/log.py
@@ -3,6 +3,7 @@
import sys
import traceback
from datetime import datetime
+from logging.handlers import RotatingFileHandler
from colorlog import ColoredFormatter
from dateutil.tz import tzlocal
@@ -34,7 +35,7 @@ def set_up(verbose=False, quiet=True, log_file=None, log_in_console=False):
# File Logging
if log_file:
max_log_size = 20 * 1024 * 1024 # 20 MB
- fileh = logging.handlers.RotatingFileHandler(log_file, maxBytes=max_log_size, backupCount=5)
+ fileh = RotatingFileHandler(log_file, maxBytes=max_log_size, backupCount=5)
fileh.setLevel(log_level)
log_format = "%(asctime)s [%(levelname)s] %(message)s"
formatter = logging.Formatter(log_format, "%Y-%m-%d-T%H:%M:%S%z")
diff --git a/counterparty-core/counterpartycore/lib/messages/bet.py b/counterparty-core/counterpartycore/lib/messages/bet.py
index 6a56955827..2af526b8b4 100644
--- a/counterparty-core/counterpartycore/lib/messages/bet.py
+++ b/counterparty-core/counterpartycore/lib/messages/bet.py
@@ -254,7 +254,7 @@ def cancel_bet_match(db, bet_match, status, block_index, tx_index):
def get_fee_fraction(db, feed_address):
"""Get fee fraction from last broadcast from the feed_address address."""
- broadcasts = ledger.get_broadcasts_by_source(db, feed_address, "valid")
+ broadcasts = ledger.get_broadcasts_by_source(db, feed_address, "valid", order_by="ASC")
if broadcasts:
last_broadcast = broadcasts[-1]
@@ -297,7 +297,7 @@ def validate(
problems.append("integer overflow")
# Look at feed to be bet on.
- broadcasts = ledger.get_broadcasts_by_source(db, feed_address, "valid")
+ broadcasts = ledger.get_broadcasts_by_source(db, feed_address, "valid", order_by="ASC")
if not broadcasts:
problems.append("feed doesn’t exist")
elif not broadcasts[-1]["text"]:
@@ -358,15 +358,15 @@ def validate(
def compose(
db,
- source,
- feed_address,
- bet_type,
- deadline,
- wager_quantity,
- counterwager_quantity,
- target_value,
- leverage,
- expiration,
+ source: str,
+ feed_address: str,
+ bet_type: int,
+ deadline: int,
+ wager_quantity: int,
+ counterwager_quantity: int,
+ target_value: int,
+ leverage: int,
+ expiration: int,
):
if ledger.get_balance(db, source, config.XCP) < wager_quantity:
raise exceptions.ComposeError("insufficient funds")
@@ -403,9 +403,7 @@ def compose(
return (source, [(feed_address, None)], data)
-def parse(db, tx, message):
- bet_parse_cursor = db.cursor()
-
+def unpack(message, return_dict=False):
# Unpack message.
try:
if len(message) != LENGTH:
@@ -429,9 +427,45 @@ def parse(db, tx, message):
target_value,
leverage,
expiration,
- fee_fraction_int, # noqa: F841
- ) = 0, 0, 0, 0, 0, 0, 0, 0
+ ) = 0, 0, 0, 0, 0, 0, 0
status = "invalid: could not unpack"
+ if return_dict:
+ return {
+ "bet_type": bet_type,
+ "deadline": deadline,
+ "wager_quantity": wager_quantity,
+ "counterwager_quantity": counterwager_quantity,
+ "target_value": target_value,
+ "leverage": leverage,
+ "expiration": expiration,
+ "status": status,
+ }
+ return (
+ bet_type,
+ deadline,
+ wager_quantity,
+ counterwager_quantity,
+ target_value,
+ leverage,
+ expiration,
+ status,
+ )
+
+
+def parse(db, tx, message):
+ bet_parse_cursor = db.cursor()
+
+ # Unpack message.
+ (
+ bet_type,
+ deadline,
+ wager_quantity,
+ counterwager_quantity,
+ target_value,
+ leverage,
+ expiration,
+ status,
+ ) = unpack(message)
odds, fee_fraction = 0, 0
feed_address = tx["destination"]
@@ -661,7 +695,7 @@ def match(db, tx):
ledger.update_bet(db, tx1["tx_hash"], set_data)
# Get last value of feed.
- broadcasts = ledger.get_broadcasts_by_source(db, feed_address, "valid")
+ broadcasts = ledger.get_broadcasts_by_source(db, feed_address, "valid", order_by="ASC")
initial_value = broadcasts[-1]["value"]
# Record bet fulfillment.
diff --git a/counterparty-core/counterpartycore/lib/messages/broadcast.py b/counterparty-core/counterpartycore/lib/messages/broadcast.py
index 6a7dff6049..0b55e0eee1 100644
--- a/counterparty-core/counterpartycore/lib/messages/broadcast.py
+++ b/counterparty-core/counterpartycore/lib/messages/broadcast.py
@@ -111,7 +111,7 @@ def validate(db, source, timestamp, value, fee_fraction_int, text, block_index):
if not source:
problems.append("null source address")
# Check previous broadcast in this feed.
- broadcasts = ledger.get_broadcasts_by_source(db, source, "valid")
+ broadcasts = ledger.get_broadcasts_by_source(db, source, "valid", order_by="ASC")
if broadcasts:
last_broadcast = broadcasts[-1]
if last_broadcast["locked"]:
@@ -135,7 +135,7 @@ def validate(db, source, timestamp, value, fee_fraction_int, text, block_index):
return problems
-def compose(db, source, timestamp, value, fee_fraction, text):
+def compose(db, source: str, timestamp: int, value: float, fee_fraction: float, text: str):
# Store the fee fraction as an integer.
fee_fraction_int = int(fee_fraction * 1e8)
@@ -162,12 +162,9 @@ def compose(db, source, timestamp, value, fee_fraction, text):
return (source, [], data)
-def parse(db, tx, message):
- cursor = db.cursor()
-
- # Unpack message.
+def unpack(message, block_index, return_dict=False):
try:
- if ledger.enabled("broadcast_pack_text", tx["block_index"]):
+ if ledger.enabled("broadcast_pack_text", block_index):
timestamp, value, fee_fraction_int, rawtext = struct.unpack(
FORMAT + f"{len(message) - LENGTH}s", message
)
@@ -197,6 +194,24 @@ def parse(db, tx, message):
except AssertionError:
timestamp, value, fee_fraction_int, text = 0, None, 0, None
status = "invalid: could not unpack text"
+
+ if return_dict:
+ return {
+ "timestamp": timestamp,
+ "value": value,
+ "fee_fraction_int": fee_fraction_int,
+ "text": text,
+ "status": status,
+ }
+ return timestamp, value, fee_fraction_int, text, status
+
+
+def parse(db, tx, message):
+ cursor = db.cursor()
+
+ # Unpack message.
+ timestamp, value, fee_fraction_int, text, status = unpack(message, tx["block_index"])
+
if status == "valid":
# For SQLite3
timestamp = min(timestamp, config.MAX_INT)
@@ -256,7 +271,7 @@ def parse(db, tx, message):
if value is None or value < 0:
# Cancel Open Bets?
if value == -2:
- for i in ledger.get_open_bet_by_feed(db, tx["source"]):
+ for i in ledger.get_bet_by_feed(db, tx["source"], status="open"):
bet.cancel_bet(db, i, "dropped", tx["block_index"], tx["tx_index"])
# Cancel Pending Bet Matches?
if value == -3:
diff --git a/counterparty-core/counterpartycore/lib/messages/btcpay.py b/counterparty-core/counterpartycore/lib/messages/btcpay.py
index f4dec2b7bb..49308b5e14 100644
--- a/counterparty-core/counterpartycore/lib/messages/btcpay.py
+++ b/counterparty-core/counterpartycore/lib/messages/btcpay.py
@@ -106,7 +106,7 @@ def validate(db, source, order_match_id, block_index):
return destination, btc_quantity, escrowed_asset, escrowed_quantity, order_match, problems
-def compose(db, source, order_match_id):
+def compose(db, source: str, order_match_id: str):
tx0_hash, tx1_hash = util.parse_id(order_match_id)
destination, btc_quantity, escrowed_asset, escrowed_quantity, order_match, problems = validate(
@@ -133,10 +133,7 @@ def compose(db, source, order_match_id):
return (source, [(destination, btc_quantity)], data)
-def parse(db, tx, message):
- cursor = db.cursor()
-
- # Unpack message.
+def unpack(message, return_dict=False):
try:
if len(message) != LENGTH:
raise exceptions.UnpackError
@@ -151,6 +148,22 @@ def parse(db, tx, message):
tx0_hash, tx1_hash, order_match_id = None, None, None
status = "invalid: could not unpack"
+ if return_dict:
+ return {
+ "tx0_hash": tx0_hash,
+ "tx1_hash": tx1_hash,
+ "order_match_id": order_match_id,
+ "status": status,
+ }
+ return tx0_hash, tx1_hash, order_match_id, status
+
+
+def parse(db, tx, message):
+ cursor = db.cursor()
+
+ # Unpack message.
+ tx0_hash, tx1_hash, order_match_id, status = unpack(message)
+
if status == "valid":
destination, btc_quantity, escrowed_asset, escrowed_quantity, order_match, problems = (
validate(db, tx["source"], order_match_id, tx["block_index"])
diff --git a/counterparty-core/counterpartycore/lib/messages/burn.py b/counterparty-core/counterpartycore/lib/messages/burn.py
index 568cb51d6e..f7cc8f2a8d 100644
--- a/counterparty-core/counterpartycore/lib/messages/burn.py
+++ b/counterparty-core/counterpartycore/lib/messages/burn.py
@@ -72,7 +72,7 @@ def validate(db, source, destination, quantity, block_index, overburn=False):
return problems
-def compose(db, source, quantity, overburn=False):
+def compose(db, source: str, quantity: int, overburn: bool = False):
cursor = db.cursor()
destination = config.UNSPENDABLE
problems = validate(
@@ -82,7 +82,7 @@ def compose(db, source, quantity, overburn=False):
raise exceptions.ComposeError(problems)
# Check that a maximum of 1 BTC total is burned per address.
- burns = ledger.get_burns(db, status="valid", source=source)
+ burns = ledger.get_burns(db, source)
already_burned = sum([burn["burned"] for burn in burns])
if quantity > (1 * config.UNIT - already_burned) and not overburn:
@@ -118,7 +118,7 @@ def parse(db, tx, mainnet_burns, message=None):
if status == "valid":
# Calculate quantity of XCP earned. (Maximum 1 BTC in total, ever.)
- burns = ledger.get_burns(db, status="valid", source=tx["source"])
+ burns = ledger.get_burns(db, tx["source"])
already_burned = sum([burn["burned"] for burn in burns])
one = 1 * config.UNIT
max_burn = one - already_burned
diff --git a/counterparty-core/counterpartycore/lib/messages/cancel.py b/counterparty-core/counterpartycore/lib/messages/cancel.py
index 22c57e59bd..bc51437e9f 100644
--- a/counterparty-core/counterpartycore/lib/messages/cancel.py
+++ b/counterparty-core/counterpartycore/lib/messages/cancel.py
@@ -82,7 +82,7 @@ def validate(db, source, offer_hash):
return offer, offer_type, problems
-def compose(db, source, offer_hash):
+def compose(db, source: str, offer_hash: str):
# Check that offer exists.
offer, offer_type, problems = validate(db, source, offer_hash)
if problems:
@@ -94,10 +94,7 @@ def compose(db, source, offer_hash):
return (source, [], data)
-def parse(db, tx, message):
- cursor = db.cursor()
-
- # Unpack message.
+def unpack(message, return_dict=False):
try:
if len(message) != LENGTH:
raise exceptions.UnpackError
@@ -107,6 +104,19 @@ def parse(db, tx, message):
except (exceptions.UnpackError, struct.error) as e: # noqa: F841
offer_hash = None
status = "invalid: could not unpack"
+ if return_dict:
+ return {
+ "offer_hash": offer_hash,
+ "status": status,
+ }
+ return offer_hash, status
+
+
+def parse(db, tx, message):
+ cursor = db.cursor()
+
+ # Unpack message.
+ offer_hash, status = unpack(message)
if status == "valid":
offer, offer_type, problems = validate(db, tx["source"], offer_hash)
diff --git a/counterparty-core/counterpartycore/lib/messages/destroy.py b/counterparty-core/counterpartycore/lib/messages/destroy.py
index 5793a0c15b..1e556b58d6 100644
--- a/counterparty-core/counterpartycore/lib/messages/destroy.py
+++ b/counterparty-core/counterpartycore/lib/messages/destroy.py
@@ -68,7 +68,7 @@ def pack(db, asset, quantity, tag):
return data
-def unpack(db, message):
+def unpack(db, message, return_dict=False):
try:
asset_id, quantity = struct.unpack(FORMAT, message[0:16])
tag = message[16:]
@@ -80,6 +80,8 @@ def unpack(db, message):
except AssetIDError: # noqa: F405
raise UnpackError("asset id invalid") # noqa: B904, F405
+ if return_dict:
+ return {"asset": asset, "quantity": quantity, "tag": tag}
return asset, quantity, tag
@@ -113,7 +115,7 @@ def validate(db, source, destination, asset, quantity):
raise BalanceError("balance insufficient") # noqa: F405
-def compose(db, source, asset, quantity, tag):
+def compose(db, source: str, asset: str, quantity: int, tag: str):
# resolve subassets
asset = ledger.resolve_subasset_longname(db, asset)
diff --git a/counterparty-core/counterpartycore/lib/messages/dispenser.py b/counterparty-core/counterpartycore/lib/messages/dispenser.py
index 2206bab853..9dbf07af54 100644
--- a/counterparty-core/counterpartycore/lib/messages/dispenser.py
+++ b/counterparty-core/counterpartycore/lib/messages/dispenser.py
@@ -228,12 +228,12 @@ def validate(
and open_address != source
):
open_dispensers = ledger.get_dispensers(
- db, status_in=[0, 11], source=open_address, asset=asset, origin=source
+ db, status_in=[0, 11], address=open_address, asset=asset, origin=source
)
else:
query_address = open_address if status == STATUS_OPEN_EMPTY_ADDRESS else source
open_dispensers = ledger.get_dispensers(
- db, status_in=[0, 11], source=query_address, asset=asset
+ db, status_in=[0, 11], address=query_address, asset=asset
)
if len(open_dispensers) == 0 or open_dispensers[0]["status"] != STATUS_CLOSING:
@@ -338,14 +338,14 @@ def validate(
def compose(
db,
- source,
- asset,
- give_quantity,
- escrow_quantity,
- mainchainrate,
- status,
- open_address=None,
- oracle_address=None,
+ source: str,
+ asset: str,
+ give_quantity: int,
+ escrow_quantity: int,
+ mainchainrate: int,
+ status: int,
+ open_address: str = None,
+ oracle_address: str = None,
):
assetid, problems = validate(
db,
@@ -409,12 +409,9 @@ def calculate_oracle_fee(
return oracle_fee_btc
-def parse(db, tx, message):
- cursor = db.cursor()
-
- # Unpack message.
+def unpack(message, return_dict=False):
try:
- action_address = tx["source"]
+ action_address = None
oracle_address = None
assetid, give_quantity, escrow_quantity, mainchainrate, dispenser_status = struct.unpack(
FORMAT, message[0:LENGTH]
@@ -432,9 +429,57 @@ def parse(db, tx, message):
asset = ledger.generate_asset_name(assetid, ledger.CURRENT_BLOCK_INDEX)
status = "valid"
except (exceptions.UnpackError, struct.error) as e: # noqa: F841
- assetid, give_quantity, mainchainrate, asset = None, None, None, None
+ (
+ give_quantity,
+ escrow_quantity,
+ mainchainrate,
+ dispenser_status,
+ action_address,
+ oracle_address,
+ asset,
+ ) = None, None, None, None, None, None, None
status = "invalid: could not unpack"
+ if return_dict:
+ return {
+ "asset": asset,
+ "give_quantity": give_quantity,
+ "escrow_quantity": escrow_quantity,
+ "mainchainrate": mainchainrate,
+ "dispenser_status": dispenser_status,
+ "action_address": action_address,
+ "oracle_address": oracle_address,
+ "status": status,
+ }
+ return (
+ asset,
+ give_quantity,
+ escrow_quantity,
+ mainchainrate,
+ dispenser_status,
+ action_address,
+ oracle_address,
+ status,
+ )
+
+
+def parse(db, tx, message):
+ cursor = db.cursor()
+
+ # Unpack message.
+ (
+ asset,
+ give_quantity,
+ escrow_quantity,
+ mainchainrate,
+ dispenser_status,
+ action_address,
+ oracle_address,
+ status,
+ ) = unpack(message)
+ if action_address is None:
+ action_address = tx["source"]
+
if status == "valid":
if ledger.enabled("dispenser_parsing_validation", ledger.CURRENT_BLOCK_INDEX):
asset_id, problems = validate(
@@ -459,7 +504,7 @@ def parse(db, tx, message):
else:
if dispenser_status == STATUS_OPEN or dispenser_status == STATUS_OPEN_EMPTY_ADDRESS:
existing = ledger.get_dispensers(
- db, source=action_address, asset=asset, status=STATUS_OPEN
+ db, address=action_address, asset=asset, status=STATUS_OPEN
)
if len(existing) == 0:
@@ -612,7 +657,7 @@ def parse(db, tx, message):
)
dispenser_tx_hash = ledger.get_dispensers(
- db, source=action_address, asset=asset, status=STATUS_OPEN
+ db, address=action_address, asset=asset, status=STATUS_OPEN
)[0]["tx_hash"]
bindings_refill = {
"tx_index": tx["tx_index"],
@@ -647,14 +692,14 @@ def parse(db, tx, message):
if close_from_another_address:
existing = ledger.get_dispensers(
db,
- source=action_address,
+ address=action_address,
asset=asset,
status=STATUS_OPEN,
origin=tx["source"],
)
else:
existing = ledger.get_dispensers(
- db, source=tx["source"], asset=asset, status=STATUS_OPEN
+ db, address=tx["source"], asset=asset, status=STATUS_OPEN
)
if len(existing) == 1:
if close_delay == 0:
@@ -692,7 +737,7 @@ def is_dispensable(db, address, amount):
if address is None:
return False
- dispensers = ledger.get_dispensers(db, source=address, status_in=[0, 11])
+ dispensers = ledger.get_dispensers(db, address=address, status_in=[0, 11])
for next_dispenser in dispensers:
if next_dispenser["oracle_address"] != None: # noqa: E711
@@ -731,7 +776,7 @@ def dispense(db, tx):
dispensers = []
if next_out["destination"] is not None:
dispensers = ledger.get_dispensers(
- db, source=next_out["destination"], status_in=[0, 11], order_by="asset"
+ db, address=next_out["destination"], status_in=[0, 11], order_by="asset"
)
for dispenser in dispensers:
diff --git a/counterparty-core/counterpartycore/lib/messages/dividend.py b/counterparty-core/counterpartycore/lib/messages/dividend.py
index eec5dd54c8..d7f5afe433 100644
--- a/counterparty-core/counterpartycore/lib/messages/dividend.py
+++ b/counterparty-core/counterpartycore/lib/messages/dividend.py
@@ -179,7 +179,7 @@ def validate(db, source, quantity_per_unit, asset, dividend_asset, block_index):
return dividend_total, outputs, problems, fee
-def compose(db, source, quantity_per_unit, asset, dividend_asset):
+def compose(db, source: str, quantity_per_unit: int, asset: str, dividend_asset: str):
# resolve subassets
asset = ledger.resolve_subasset_longname(db, asset)
dividend_asset = ledger.resolve_subasset_longname(db, dividend_asset)
@@ -207,23 +207,16 @@ def compose(db, source, quantity_per_unit, asset, dividend_asset):
return (source, [], data)
-def parse(db, tx, message):
- dividend_parse_cursor = db.cursor()
-
- fee = 0
-
- # Unpack message.
+def unpack(db, message, block_index, return_dict=False):
try:
- if (tx["block_index"] > 288150 or config.TESTNET or config.REGTEST) and len(
- message
- ) == LENGTH_2:
+ if (block_index > 288150 or config.TESTNET or config.REGTEST) and len(message) == LENGTH_2:
quantity_per_unit, asset_id, dividend_asset_id = struct.unpack(FORMAT_2, message)
- asset = ledger.get_asset_name(db, asset_id, tx["block_index"])
- dividend_asset = ledger.get_asset_name(db, dividend_asset_id, tx["block_index"])
+ asset = ledger.get_asset_name(db, asset_id, block_index)
+ dividend_asset = ledger.get_asset_name(db, dividend_asset_id, block_index)
status = "valid"
elif len(message) == LENGTH_1:
quantity_per_unit, asset_id = struct.unpack(FORMAT_1, message)
- asset = ledger.get_asset_name(db, asset_id, tx["block_index"])
+ asset = ledger.get_asset_name(db, asset_id, block_index)
dividend_asset = config.XCP
status = "valid"
else:
@@ -232,6 +225,24 @@ def parse(db, tx, message):
dividend_asset, quantity_per_unit, asset = None, None, None
status = "invalid: could not unpack"
+ if return_dict:
+ return {
+ "asset": asset,
+ "quantity_per_unit": quantity_per_unit,
+ "dividend_asset": dividend_asset,
+ "status": status,
+ }
+ return asset, quantity_per_unit, dividend_asset, status
+
+
+def parse(db, tx, message):
+ dividend_parse_cursor = db.cursor()
+
+ fee = 0
+
+ # Unpack message.
+ asset, quantity_per_unit, dividend_asset, status = unpack(db, message, tx["block_index"])
+
if dividend_asset == config.BTC:
status = f"invalid: cannot pay {config.BTC} dividends within protocol"
diff --git a/counterparty-core/counterpartycore/lib/messages/issuance.py b/counterparty-core/counterpartycore/lib/messages/issuance.py
index 91db18cc2f..81d1a6ff81 100644
--- a/counterparty-core/counterpartycore/lib/messages/issuance.py
+++ b/counterparty-core/counterpartycore/lib/messages/issuance.py
@@ -347,14 +347,14 @@ def validate(
def compose(
db,
- source,
- asset,
- quantity,
- transfer_destination=None,
- divisible=None,
- lock=None,
- reset=None,
- description=None,
+ source: str,
+ asset: str,
+ quantity: int,
+ transfer_destination: str = None,
+ divisible: bool = None,
+ lock: bool = None,
+ reset: bool = None,
+ description: str = None,
):
# Callability is deprecated, so for re‐issuances set relevant parameters
# to old values; for first issuances, make uncallable.
@@ -565,27 +565,26 @@ def compose(
return (source, destination_outputs, data)
-def parse(db, tx, message, message_type_id):
- issuance_parse_cursor = db.cursor()
+def unpack(db, message, message_type_id, block_index, return_dict=False):
asset_format = ledger.get_value_by_block_index(
- "issuance_asset_serialization_format", tx["block_index"]
+ "issuance_asset_serialization_format", block_index
)
asset_format_length = ledger.get_value_by_block_index(
- "issuance_asset_serialization_length", tx["block_index"]
+ "issuance_asset_serialization_length", block_index
)
subasset_format = ledger.get_value_by_block_index(
- "issuance_subasset_serialization_format", tx["block_index"]
+ "issuance_subasset_serialization_format", block_index
)
subasset_format_length = ledger.get_value_by_block_index(
- "issuance_subasset_serialization_length", tx["block_index"]
+ "issuance_subasset_serialization_length", block_index
)
# Unpack message.
try:
subasset_longname = None
if message_type_id == LR_SUBASSET_ID or message_type_id == SUBASSET_ID:
- if not ledger.enabled("subassets", block_index=tx["block_index"]):
- logger.warning(f"subassets are not enabled at block {tx['block_index']}")
+ if not ledger.enabled("subassets", block_index=block_index):
+ logger.warning(f"subassets are not enabled at block {block_index}")
raise exceptions.UnpackError
# parse a subasset original issuance message
@@ -607,9 +606,7 @@ def parse(db, tx, message, message_type_id):
description_length = len(message) - subasset_format_length - compacted_subasset_length
if description_length < 0:
- logger.warning(
- f"invalid subasset length: [issuance] tx [{tx['tx_hash']}]: {compacted_subasset_length}"
- )
+ logger.warning(f"invalid subasset length: {compacted_subasset_length}")
raise exceptions.UnpackError
messages_format = f">{compacted_subasset_length}s{description_length}s"
compacted_subasset_longname, description = struct.unpack(
@@ -628,7 +625,7 @@ def parse(db, tx, message, message_type_id):
description = None
except UnicodeDecodeError:
description = ""
- elif (tx["block_index"] > 283271 or config.TESTNET or config.REGTEST) and len(
+ elif (block_index > 283271 or config.TESTNET or config.REGTEST) and len(
message
) >= asset_format_length: # Protocol change.
if (len(message) - asset_format_length <= 42) and not ledger.enabled(
@@ -698,11 +695,11 @@ def parse(db, tx, message, message_type_id):
"",
)
try:
- asset = ledger.generate_asset_name(asset_id, tx["block_index"])
+ asset = ledger.generate_asset_name(asset_id, block_index)
##This is for backwards compatibility with assets names longer than 12 characters
if asset.startswith("A"):
- named_asset = ledger.get_asset_name(db, asset_id, tx["block_index"])
+ named_asset = ledger.get_asset_name(db, asset_id, block_index)
if named_asset != 0:
asset = named_asset
@@ -718,7 +715,21 @@ def parse(db, tx, message, message_type_id):
asset = None
status = "invalid: bad asset name"
except exceptions.UnpackError as e: # noqa: F841
- asset, quantity, divisible, lock, reset, callable_, call_date, call_price, description = (
+ (
+ asset_id,
+ asset,
+ subasset_longname,
+ quantity,
+ divisible,
+ lock,
+ reset,
+ callable_,
+ call_date,
+ call_price,
+ description,
+ ) = (
+ None,
+ None,
None,
None,
None,
@@ -731,6 +742,52 @@ def parse(db, tx, message, message_type_id):
)
status = "invalid: could not unpack"
+ if return_dict:
+ return {
+ "asset_id": asset_id,
+ "asset": asset,
+ "subasset_longname": subasset_longname,
+ "quantity": quantity,
+ "divisible": divisible,
+ "lock": lock,
+ "reset": reset,
+ "callable": callable_,
+ "call_date": call_date,
+ "call_price": call_price,
+ "description": description,
+ "status": status,
+ }
+ return (
+ asset_id,
+ asset,
+ subasset_longname,
+ quantity,
+ divisible,
+ lock,
+ reset,
+ callable_,
+ call_date,
+ call_price,
+ description,
+ status,
+ )
+
+
+def parse(db, tx, message, message_type_id):
+ (
+ asset_id,
+ asset,
+ subasset_longname,
+ quantity,
+ divisible,
+ lock,
+ reset,
+ callable_,
+ call_date,
+ call_price,
+ description,
+ status,
+ ) = unpack(db, message, message_type_id, tx["block_index"])
# parse and validate the subasset from the message
subasset_parent = None
if status == "valid" and subasset_longname is not None: # Protocol change.
@@ -942,5 +999,3 @@ def parse(db, tx, message, message_type_id):
action="issuance",
event=tx["tx_hash"],
)
-
- issuance_parse_cursor.close()
diff --git a/counterparty-core/counterpartycore/lib/messages/order.py b/counterparty-core/counterpartycore/lib/messages/order.py
index 6339b44438..3038483614 100644
--- a/counterparty-core/counterpartycore/lib/messages/order.py
+++ b/counterparty-core/counterpartycore/lib/messages/order.py
@@ -432,7 +432,14 @@ def validate(
def compose(
- db, source, give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required
+ db,
+ source: str,
+ give_asset: str,
+ give_quantity: int,
+ get_asset: str,
+ get_quantity: int,
+ expiration: int,
+ fee_required: int,
):
cursor = db.cursor()
@@ -470,18 +477,15 @@ def compose(
return (source, [], data)
-def parse(db, tx, message):
- order_parse_cursor = db.cursor()
-
- # Unpack message.
+def unpack(db, message, block_index, return_dict=False):
try:
if len(message) != LENGTH:
raise exceptions.UnpackError
give_id, give_quantity, get_id, get_quantity, expiration, fee_required = struct.unpack(
FORMAT, message
)
- give_asset = ledger.get_asset_name(db, give_id, tx["block_index"])
- get_asset = ledger.get_asset_name(db, get_id, tx["block_index"])
+ give_asset = ledger.get_asset_name(db, give_id, block_index)
+ get_asset = ledger.get_asset_name(db, get_id, block_index)
status = "open"
except (exceptions.UnpackError, exceptions.AssetNameError, struct.error) as e: # noqa: F841
give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required = (
@@ -494,6 +498,27 @@ def parse(db, tx, message):
)
status = "invalid: could not unpack"
+ if return_dict:
+ return {
+ "give_asset": give_asset,
+ "give_quantity": give_quantity,
+ "get_asset": get_asset,
+ "get_quantity": get_quantity,
+ "expiration": expiration,
+ "fee_required": fee_required,
+ "status": status,
+ }
+ return give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required, status
+
+
+def parse(db, tx, message):
+ order_parse_cursor = db.cursor()
+
+ # Unpack message.
+ (give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required, status) = unpack(
+ db, message, tx["block_index"]
+ )
+
price = 0
if status == "open":
try:
diff --git a/counterparty-core/counterpartycore/lib/messages/rps.py b/counterparty-core/counterpartycore/lib/messages/rps.py
index daf1f10b64..134dd89f38 100644
--- a/counterparty-core/counterpartycore/lib/messages/rps.py
+++ b/counterparty-core/counterpartycore/lib/messages/rps.py
@@ -282,7 +282,9 @@ def validate(db, source, possible_moves, wager, move_random_hash, expiration, bl
return problems
-def compose(db, source, possible_moves, wager, move_random_hash, expiration):
+def compose(
+ db, source: str, possible_moves: int, wager: int, move_random_hash: str, expiration: int
+):
problems = validate(
db, source, possible_moves, wager, move_random_hash, expiration, ledger.CURRENT_BLOCK_INDEX
)
@@ -298,9 +300,7 @@ def compose(db, source, possible_moves, wager, move_random_hash, expiration):
return (source, [], data)
-def parse(db, tx, message):
- rps_parse_cursor = db.cursor()
- # Unpack message.
+def unpack(message, return_dict=False):
try:
if len(message) != LENGTH:
raise exceptions.UnpackError
@@ -310,6 +310,22 @@ def parse(db, tx, message):
(possible_moves, wager, move_random_hash, expiration) = 0, 0, "", 0
status = "invalid: could not unpack"
+ if return_dict:
+ return {
+ "possible_moves": possible_moves,
+ "wager": wager,
+ "move_random_hash": binascii.hexlify(move_random_hash).decode("utf8"),
+ "expiration": expiration,
+ "status": status,
+ }
+ return possible_moves, wager, move_random_hash, expiration, status
+
+
+def parse(db, tx, message):
+ rps_parse_cursor = db.cursor()
+ # Unpack message.
+ possible_moves, wager, move_random_hash, expiration, status = unpack(message)
+
if status == "open":
move_random_hash = binascii.hexlify(move_random_hash).decode("utf8")
# Overbet
diff --git a/counterparty-core/counterpartycore/lib/messages/rpsresolve.py b/counterparty-core/counterpartycore/lib/messages/rpsresolve.py
index 9e4634ae2f..77c8bf4ca0 100644
--- a/counterparty-core/counterpartycore/lib/messages/rpsresolve.py
+++ b/counterparty-core/counterpartycore/lib/messages/rpsresolve.py
@@ -106,7 +106,7 @@ def validate(db, source, move, random, rps_match_id):
return txn, rps_match, problems
-def compose(db, source, move, random, rps_match_id):
+def compose(db, source: str, move: int, random: str, rps_match_id: str):
tx0_hash, tx1_hash = util.parse_id(rps_match_id)
txn, rps_match, problems = validate(db, source, move, random, rps_match_id)
@@ -128,10 +128,7 @@ def compose(db, source, move, random, rps_match_id):
return (source, [], data)
-def parse(db, tx, message):
- cursor = db.cursor()
-
- # Unpack message.
+def unpack(message, return_dict=False):
try:
if len(message) != LENGTH:
raise exceptions.UnpackError
@@ -147,6 +144,22 @@ def parse(db, tx, message):
move, random, tx0_hash, tx1_hash, rps_match_id = None, None, None, None, None
status = "invalid: could not unpack"
+ if return_dict:
+ return {
+ "move": move,
+ "random": random,
+ "rps_match_id": rps_match_id,
+ "status": status,
+ }
+ return move, random, rps_match_id, status
+
+
+def parse(db, tx, message):
+ cursor = db.cursor()
+
+ # Unpack message.
+ move, random, rps_match_id, status = unpack(message)
+
if status == "valid":
txn, rps_match, problems = validate(db, tx["source"], move, random, rps_match_id)
if problems:
diff --git a/counterparty-core/counterpartycore/lib/messages/send.py b/counterparty-core/counterpartycore/lib/messages/send.py
index fbe0a3a96e..decce16caa 100644
--- a/counterparty-core/counterpartycore/lib/messages/send.py
+++ b/counterparty-core/counterpartycore/lib/messages/send.py
@@ -112,7 +112,14 @@ def validate(db, source, destination, asset, quantity, block_index):
def compose(
- db, source, destination, asset, quantity, memo=None, memo_is_hex=False, use_enhanced_send=None
+ db,
+ source: str,
+ destination: str,
+ asset: str,
+ quantity: int,
+ memo: str = None,
+ memo_is_hex: bool = False,
+ use_enhanced_send: bool = None,
):
# special case - enhanced_send replaces send by default when it is enabled
# but it can be explicitly disabled with an API parameter
diff --git a/counterparty-core/counterpartycore/lib/messages/sweep.py b/counterparty-core/counterpartycore/lib/messages/sweep.py
index 9ed81c7ae2..9ed916cb8f 100644
--- a/counterparty-core/counterpartycore/lib/messages/sweep.py
+++ b/counterparty-core/counterpartycore/lib/messages/sweep.py
@@ -103,7 +103,7 @@ def validate(db, source, destination, flags, memo, block_index):
return problems, total_fee
-def compose(db, source, destination, flags, memo):
+def compose(db, source: str, destination: str, flags: int, memo: str):
if memo is None:
memo = b""
elif flags & FLAG_BINARY_MEMO:
@@ -126,7 +126,7 @@ def compose(db, source, destination, flags, memo):
return (source, [], data)
-def unpack(db, message, block_index):
+def unpack(message):
try:
memo_bytes_length = len(message) - LENGTH
if memo_bytes_length < 0:
@@ -162,7 +162,7 @@ def parse(db, tx, message):
# Unpack message.
try:
- unpacked = unpack(db, message, tx["block_index"])
+ unpacked = unpack(message)
destination, flags, memo_bytes = (
unpacked["destination"],
unpacked["flags"],
diff --git a/counterparty-core/counterpartycore/lib/messages/versions/enhanced_send.py b/counterparty-core/counterpartycore/lib/messages/versions/enhanced_send.py
index a7bbca7945..b423af2f9f 100644
--- a/counterparty-core/counterpartycore/lib/messages/versions/enhanced_send.py
+++ b/counterparty-core/counterpartycore/lib/messages/versions/enhanced_send.py
@@ -14,7 +14,7 @@
ID = 2 # 0x02
-def unpack(db, message, block_index):
+def unpack(message, block_index):
try:
# account for memo bytes
memo_bytes_length = len(message) - LENGTH
@@ -98,7 +98,9 @@ def validate(db, source, destination, asset, quantity, memo_bytes, block_index):
return problems
-def compose(db, source, destination, asset, quantity, memo, memo_is_hex):
+def compose(
+ db, source: str, destination: str, asset: str, quantity: int, memo: str, memo_is_hex: bool
+):
cursor = db.cursor()
# Just send BTC?
@@ -150,7 +152,7 @@ def parse(db, tx, message):
# Unpack message.
try:
- unpacked = unpack(db, message, tx["block_index"])
+ unpacked = unpack(message, tx["block_index"])
asset, quantity, destination, memo_bytes = (
unpacked["asset"],
unpacked["quantity"],
diff --git a/counterparty-core/counterpartycore/lib/messages/versions/mpma.py b/counterparty-core/counterpartycore/lib/messages/versions/mpma.py
index 95010bafd2..2d6a2c5c48 100644
--- a/counterparty-core/counterpartycore/lib/messages/versions/mpma.py
+++ b/counterparty-core/counterpartycore/lib/messages/versions/mpma.py
@@ -21,7 +21,7 @@
## expected functions for message version
-def unpack(db, message, block_index):
+def unpack(message, block_index):
try:
unpacked = _decode_mpma_send_decode(message, block_index)
except struct.error as e: # noqa: F841
@@ -98,7 +98,7 @@ def validate(db, source, asset_dest_quant_list, block_index):
return problems
-def compose(db, source, asset_dest_quant_list, memo, memo_is_hex):
+def compose(db, source: str, asset_dest_quant_list: list, memo: str, memo_is_hex: bool):
cursor = db.cursor()
out_balances = util.accumulate([(t[0], t[2]) for t in asset_dest_quant_list])
@@ -132,7 +132,7 @@ def compose(db, source, asset_dest_quant_list, memo, memo_is_hex):
def parse(db, tx, message):
try:
- unpacked = unpack(db, message, tx["block_index"])
+ unpacked = unpack(message, tx["block_index"])
status = "valid"
except struct.error as e: # noqa: F841
status = "invalid: truncated message"
diff --git a/counterparty-core/counterpartycore/lib/messages/versions/send1.py b/counterparty-core/counterpartycore/lib/messages/versions/send1.py
index 95e6642376..459c166d48 100644
--- a/counterparty-core/counterpartycore/lib/messages/versions/send1.py
+++ b/counterparty-core/counterpartycore/lib/messages/versions/send1.py
@@ -68,7 +68,7 @@ def validate(db, source, destination, asset, quantity, block_index):
return problems
-def compose(db, source, destination, asset, quantity):
+def compose(db, source: str, destination: str, asset: str, quantity: int):
cursor = db.cursor()
# Just send BTC?
diff --git a/counterparty-core/counterpartycore/lib/script.py b/counterparty-core/counterpartycore/lib/script.py
index 5904d110cc..76c3b3106b 100644
--- a/counterparty-core/counterpartycore/lib/script.py
+++ b/counterparty-core/counterpartycore/lib/script.py
@@ -12,8 +12,12 @@
from bitcoin.core.key import CPubKey
from counterparty_rs import b58, utils
-# We are using PyCryptodome not PyCrypto
-# from Crypto.Hash import RIPEMD160
+# TODO: Use `python-bitcointools` instead. (Get rid of `pycoin` dependency.)
+from pycoin.ecdsa.secp256k1 import secp256k1_generator as generator_secp256k1
+from pycoin.encoding.b58 import a2b_hashed_base58
+from pycoin.encoding.bytes32 import from_bytes_32
+from pycoin.encoding.exceptions import EncodingError
+from pycoin.encoding.sec import public_pair_to_sec
from ripemd import ripemd160 as RIPEMD160 # nosec B413
from counterpartycore.lib import config, exceptions, ledger, opcodes, util
@@ -404,14 +408,6 @@ def scriptpubkey_to_address(scriptpubkey):
return None
-# TODO: Use `python-bitcointools` instead. (Get rid of `pycoin` dependency.)
-from pycoin.ecdsa.secp256k1 import secp256k1_generator as generator_secp256k1 # noqa: E402
-from pycoin.encoding.b58 import a2b_hashed_base58 # noqa: E402
-from pycoin.encoding.bytes32 import from_bytes_32 # noqa: E402
-from pycoin.encoding.exceptions import EncodingError # noqa: E402
-from pycoin.encoding.sec import public_pair_to_sec # noqa: E402
-
-
def wif_to_tuple_of_prefix_secret_exponent_compressed(wif):
"""
Return a tuple of (prefix, secret_exponent, is_compressed).
diff --git a/counterparty-core/counterpartycore/lib/transaction.py b/counterparty-core/counterpartycore/lib/transaction.py
index 13819c7106..0192826379 100644
--- a/counterparty-core/counterpartycore/lib/transaction.py
+++ b/counterparty-core/counterpartycore/lib/transaction.py
@@ -7,30 +7,25 @@
import binascii
import decimal
import hashlib
+import inspect
import io
-import json # noqa: F401
import logging
-import math # noqa: F401
-import os # noqa: F401
-import re # noqa: F401
-import sys # noqa: F401
+import sys
import threading
-import time # noqa: F401
import bitcoin as bitcoinlib
import cachetools
-import requests # noqa: F401
-from bitcoin.core import CTransaction, b2lx, x # noqa: F401
-from bitcoin.core.script import CScript # noqa: F401
+from bitcoin.core import CTransaction
from counterpartycore.lib import (
arc4, # noqa: F401
backend,
- blocks, # noqa: F401
config,
exceptions,
gettxinfo,
ledger,
+ message_type,
+ messages,
script,
util,
)
@@ -959,3 +954,717 @@ def get_dust_return_pubkey(source, provided_pubkeys, encoding):
raise script.InputError("Invalid private key.") # noqa: B904
return dust_return_pubkey
+
+
+COMPOSE_COMMONS_ARGS = {
+ "encoding": (str, "auto", "The encoding method to use"),
+ "fee_per_kb": (
+ int,
+ None,
+ "The fee per kilobyte of transaction data constant that the server uses when deciding on the dynamic fee to use (in satoshi)",
+ ),
+ "regular_dust_size": (
+ int,
+ config.DEFAULT_REGULAR_DUST_SIZE,
+ "Specify (in satoshi) to override the (dust) amount of BTC used for each non-(bare) multisig output.",
+ ),
+ "multisig_dust_size": (
+ int,
+ config.DEFAULT_MULTISIG_DUST_SIZE,
+ "Specify (in satoshi) to override the (dust) amount of BTC used for each (bare) multisig output",
+ ),
+ "op_return_value": (
+ int,
+ config.DEFAULT_OP_RETURN_VALUE,
+ "The value (in satoshis) to use with any OP_RETURN outputs in the generated transaction. Defaults to 0. Don't use this, unless you like throwing your money away",
+ ),
+ "pubkey": (
+ str,
+ None,
+ "The hexadecimal public key of the source address (or a list of the keys, if multi-sig). Required when using encoding parameter values of multisig or pubkeyhash.",
+ ),
+ "allow_unconfirmed_inputs": (
+ bool,
+ False,
+ "Set to true to allow this transaction to utilize unconfirmed UTXOs as inputs",
+ ),
+ "fee": (
+ int,
+ None,
+ "If you'd like to specify a custom miners' fee, specify it here (in satoshi). Leave as default for the server to automatically choose",
+ ),
+ "fee_provided": (
+ int,
+ 0,
+ "If you would like to specify a maximum fee (up to and including which may be used as the transaction fee), specify it here (in satoshi). This differs from fee in that this is an upper bound value, which fee is an exact value",
+ ),
+ "unspent_tx_hash": (
+ str,
+ None,
+ "When compiling the UTXOs to use as inputs for the transaction being created, only consider unspent outputs from this specific transaction hash. Defaults to null to consider all UTXOs for the address. Do not use this parameter if you are specifying custom_inputs",
+ ),
+ "dust_return_pubkey": (
+ str,
+ None,
+ "The dust return pubkey is used in multi-sig data outputs (as the only real pubkey) to make those the outputs spendable. By default, this pubkey is taken from the pubkey used in the first transaction input. However, it can be overridden here (and is required to be specified if a P2SH input is used and multisig is used as the data output encoding.) If specified, specify the public key (in hex format) where dust will be returned to so that it can be reclaimed. Only valid/useful when used with transactions that utilize multisig data encoding. Note that if this value is set to false, this instructs counterparty-server to use the default dust return pubkey configured at the node level. If this default is not set at the node level, the call will generate an exception",
+ ),
+ "disable_utxo_locks": (
+ bool,
+ False,
+ "By default, UTXO's utilized when creating a transaction are 'locked' for a few seconds, to prevent a case where rapidly generating create_ calls reuse UTXOs due to their spent status not being updated in bitcoind yet. Specify true for this parameter to disable this behavior, and not temporarily lock UTXOs",
+ ),
+ "extended_tx_info": (
+ bool,
+ False,
+ "When this is not specified or false, the create_ calls return only a hex-encoded string. If this is true, the create_ calls return a data object with the following keys: tx_hex, btc_in, btc_out, btc_change, and btc_fee",
+ ),
+ "p2sh_pretx_txid": (
+ str,
+ None,
+ "The previous transaction txid for a two part P2SH message. This txid must be taken from the signed transaction",
+ ),
+ "old_style_api": (bool, True, "Use the old style API"),
+ "segwit": (bool, False, "Use segwit"),
+}
+
+
+def split_compose_arams(**kwargs):
+ transaction_args = {}
+ common_args = {}
+ private_key_wif = None
+ for key, value in kwargs.items():
+ if key in COMPOSE_COMMONS_ARGS:
+ common_args[key] = value
+ elif key == "privkey":
+ private_key_wif = value
+ else:
+ transaction_args[key] = value
+ return transaction_args, common_args, private_key_wif
+
+
+def get_default_args(func):
+ signature = inspect.signature(func)
+ return {
+ k: v.default
+ for k, v in signature.parameters.items()
+ if v.default is not inspect.Parameter.empty
+ }
+
+
+def compose_transaction(
+ db,
+ name,
+ params,
+ encoding="auto",
+ fee_per_kb=None,
+ estimate_fee_per_kb=None,
+ regular_dust_size=config.DEFAULT_REGULAR_DUST_SIZE,
+ multisig_dust_size=config.DEFAULT_MULTISIG_DUST_SIZE,
+ op_return_value=config.DEFAULT_OP_RETURN_VALUE,
+ pubkey=None,
+ allow_unconfirmed_inputs=False,
+ fee=None,
+ fee_provided=0,
+ unspent_tx_hash=None,
+ custom_inputs=None,
+ dust_return_pubkey=None,
+ disable_utxo_locks=False,
+ extended_tx_info=False,
+ p2sh_source_multisig_pubkeys=None,
+ p2sh_source_multisig_pubkeys_required=None,
+ p2sh_pretx_txid=None,
+ old_style_api=True,
+ segwit=False,
+):
+ """Create and return a transaction."""
+
+ # Get provided pubkeys.
+ if isinstance(pubkey, str):
+ provided_pubkeys = [pubkey]
+ elif isinstance(pubkey, list):
+ provided_pubkeys = pubkey
+ elif pubkey is None:
+ provided_pubkeys = []
+ else:
+ raise exceptions.TransactionError("Invalid pubkey.")
+
+ # Get additional pubkeys from `source` and `destination` params.
+ # Convert `source` and `destination` to pubkeyhash form.
+ for address_name in ["source", "destination"]:
+ if address_name in params:
+ address = params[address_name]
+ if isinstance(address, list):
+ # pkhshs = []
+ # for addr in address:
+ # provided_pubkeys += script.extract_pubkeys(addr)
+ # pkhshs.append(script.make_pubkeyhash(addr))
+ # params[address_name] = pkhshs
+ pass
+ else:
+ provided_pubkeys += script.extract_pubkeys(address)
+ params[address_name] = script.make_pubkeyhash(address)
+
+ # Check validity of collected pubkeys.
+ for pubkey in provided_pubkeys:
+ if not script.is_fully_valid(binascii.unhexlify(pubkey)):
+ raise script.AddressError(f"invalid public key: {pubkey}")
+
+ compose_method = sys.modules[f"counterpartycore.lib.messages.{name}"].compose
+ compose_params = inspect.getfullargspec(compose_method)[0]
+ missing_params = [p for p in compose_params if p not in params and p != "db"]
+ if len(missing_params) > 0:
+ default_values = get_default_args(compose_method)
+ for param in missing_params:
+ if param in default_values:
+ params[param] = default_values[param]
+ else:
+ raise exceptions.ComposeError(f"missing parameters: {', '.join(missing_params)}")
+
+ # dont override fee_per_kb if specified
+ if fee_per_kb is not None:
+ estimate_fee_per_kb = False
+ else:
+ fee_per_kb = config.DEFAULT_FEE_PER_KB
+
+ if "extended_tx_info" in params:
+ extended_tx_info = params["extended_tx_info"]
+ del params["extended_tx_info"]
+
+ if "old_style_api" in params:
+ old_style_api = params["old_style_api"]
+ del params["old_style_api"]
+
+ if "segwit" in params:
+ segwit = params["segwit"]
+ del params["segwit"]
+
+ tx_info = compose_method(db, **params)
+ initialise(db)
+ return construct(
+ db,
+ tx_info,
+ encoding=encoding,
+ fee_per_kb=fee_per_kb,
+ estimate_fee_per_kb=estimate_fee_per_kb,
+ regular_dust_size=regular_dust_size,
+ multisig_dust_size=multisig_dust_size,
+ op_return_value=op_return_value,
+ provided_pubkeys=provided_pubkeys,
+ allow_unconfirmed_inputs=allow_unconfirmed_inputs,
+ exact_fee=fee,
+ fee_provided=fee_provided,
+ unspent_tx_hash=unspent_tx_hash,
+ custom_inputs=custom_inputs,
+ dust_return_pubkey=dust_return_pubkey,
+ disable_utxo_locks=disable_utxo_locks,
+ extended_tx_info=extended_tx_info,
+ p2sh_source_multisig_pubkeys=p2sh_source_multisig_pubkeys,
+ p2sh_source_multisig_pubkeys_required=p2sh_source_multisig_pubkeys_required,
+ p2sh_pretx_txid=p2sh_pretx_txid,
+ old_style_api=old_style_api,
+ segwit=segwit,
+ )
+
+
+COMPOSABLE_TRANSACTIONS = [
+ "bet",
+ "broadcast",
+ "btcpay",
+ "burn",
+ "cancel",
+ "destroy",
+ "dispenser",
+ "dividend",
+ "issuance",
+ "mpma",
+ "order",
+ "send",
+ "sweep",
+]
+
+
+def compose(db, source, transaction_name, **kwargs):
+ if transaction_name not in COMPOSABLE_TRANSACTIONS:
+ raise exceptions.TransactionError("Transaction type not composable.")
+ transaction_args, common_args, _ = split_compose_arams(**kwargs)
+ transaction_args["source"] = source
+ return compose_transaction(db, name=transaction_name, params=transaction_args, **common_args)
+
+
+def compose_bet(
+ db,
+ address: str,
+ feed_address: str,
+ bet_type: int,
+ deadline: int,
+ wager_quantity: int,
+ counterwager_quantity: int,
+ expiration: int,
+ leverage: int = 5040,
+ target_value: float = None,
+ **construct_args,
+):
+ """
+ Composes a transaction to issue a bet against a feed.
+ :param address: The address that will make the bet
+ :param feed_address: The address that hosts the feed to be bet on
+ :param bet_type: Bet 0 for Bullish CFD (deprecated), 1 for Bearish CFD (deprecated), 2 for Equal, 3 for NotEqual
+ :param deadline: The time at which the bet should be decided/settled, in Unix time (seconds since epoch)
+ :param wager_quantity: The quantities of XCP to wager (in satoshis, hence integer).
+ :param counterwager_quantity: The minimum quantities of XCP to be wagered against, for the bets to match
+ :param target_value: Target value for Equal/NotEqual bet
+ :param leverage: Leverage, as a fraction of 5040
+ :param expiration: The number of blocks after which the bet expires if it remains unmatched
+ """
+ return compose_transaction(
+ db,
+ name="bet",
+ params={
+ "source": address,
+ "feed_address": feed_address,
+ "bet_type": bet_type,
+ "deadline": deadline,
+ "wager_quantity": wager_quantity,
+ "counterwager_quantity": counterwager_quantity,
+ "target_value": target_value,
+ "leverage": leverage,
+ "expiration": expiration,
+ },
+ **construct_args,
+ )
+
+
+def compose_broadcast(
+ db, address: str, timestamp: int, value: float, fee_fraction: float, text: str, **construct_args
+):
+ """
+ Composes a transaction to broadcast textual and numerical information to the network.
+ :param address: The address that will be sending (must have the necessary quantity of the specified asset)
+ :param timestamp: The timestamp of the broadcast, in Unix time
+ :param value: Numerical value of the broadcast
+ :param fee_fraction: How much of every bet on this feed should go to its operator; a fraction of 1, (i.e. 0.05 is five percent)
+ :param text: The textual part of the broadcast
+ """
+ return compose_transaction(
+ db,
+ name="broadcast",
+ params={
+ "source": address,
+ "timestamp": timestamp,
+ "value": value,
+ "fee_fraction": fee_fraction,
+ "text": text,
+ },
+ **construct_args,
+ )
+
+
+def compose_btcpay(db, address: str, order_match_id: str, **construct_args):
+ """
+ Composes a transaction to pay for a BTC order match.
+ :param address: The address that will be sending the payment
+ :param order_match_id: The ID of the order match to pay for
+ """
+ return compose_transaction(
+ db,
+ name="btcpay",
+ params={"source": address, "order_match_id": order_match_id},
+ **construct_args,
+ )
+
+
+def compose_burn(db, address: str, quantity: int, overburn: bool = False, **construct_args):
+ """
+ Composes a transaction to burn a given quantity of BTC for XCP (on mainnet, possible between blocks 278310 and 283810; on testnet it is still available).
+ :param address: The address with the BTC to burn
+ :param quantity: The quantities of BTC to burn (1 BTC maximum burn per address)
+ :param overburn: Whether to allow the burn to exceed 1 BTC for the address
+ """
+ return compose_transaction(
+ db,
+ name="burn",
+ params={"source": address, "quantity": quantity, "overburn": overburn},
+ **construct_args,
+ )
+
+
+def compose_cancel(db, address: str, offer_hash: str, **construct_args):
+ """
+ Composes a transaction to cancel an open order or bet.
+ :param address: The address that placed the order/bet to be cancelled
+ :param offer_hash: The hash of the order/bet to be cancelled
+ """
+ return compose_transaction(
+ db,
+ name="cancel",
+ params={"source": address, "offer_hash": offer_hash},
+ **construct_args,
+ )
+
+
+def compose_destroy(db, address: str, asset: str, quantity: int, tag: str, **construct_args):
+ """
+ Composes a transaction to destroy a quantity of an asset.
+ :param address: The address that will be sending the asset to be destroyed
+ :param asset: The asset to be destroyed
+ :param quantity: The quantity of the asset to be destroyed
+ :param tag: A tag for the destruction
+ """
+ return compose_transaction(
+ db,
+ name="destroy",
+ params={"source": address, "asset": asset, "quantity": quantity, "tag": tag},
+ **construct_args,
+ )
+
+
+def compose_dispenser(
+ db,
+ address: str,
+ asset: str,
+ give_quantity: int,
+ escrow_quantity: int,
+ mainchainrate: int,
+ status: int,
+ open_address: str = None,
+ oracle_address: str = None,
+ **construct_args,
+):
+ """
+ Opens or closes a dispenser for a given asset at a given rate of main chain asset (BTC). Escrowed quantity on open must be equal or greater than give_quantity. It is suggested that you escrow multiples of give_quantity to ease dispenser operation.
+ :param address: The address that will be dispensing (must have the necessary escrow_quantity of the specified asset)
+ :param asset: The asset or subasset to dispense
+ :param give_quantity: The quantity of the asset to dispense
+ :param escrow_quantity: The quantity of the asset to reserve for this dispenser
+ :param mainchainrate: The quantity of the main chain asset (BTC) per dispensed portion
+ :param status: The state of the dispenser. 0 for open, 1 for open using open_address, 10 for closed
+ :param open_address: The address that you would like to open the dispenser on
+ :param oracle_address: The address that you would like to use as a price oracle for this dispenser
+ """
+ return compose_transaction(
+ db,
+ name="dispenser",
+ params={
+ "source": address,
+ "asset": asset,
+ "give_quantity": give_quantity,
+ "escrow_quantity": escrow_quantity,
+ "mainchainrate": mainchainrate,
+ "status": status,
+ "open_address": open_address,
+ "oracle_address": oracle_address,
+ },
+ **construct_args,
+ )
+
+
+def compose_dividend(
+ db, address: str, quantity_per_unit: int, asset: str, dividend_asset: str, **construct_args
+):
+ """
+ Composes a transaction to issue a dividend to holders of a given asset.
+ :param address: The address that will be issuing the dividend (must have the ownership of the asset which the dividend is being issued on)
+ :param quantity_per_unit: The amount of dividend_asset rewarded
+ :param asset: The asset or subasset that the dividends are being rewarded on
+ :param dividend_asset: The asset or subasset that the dividends are paid in
+ """
+ return compose_transaction(
+ db,
+ name="dividend",
+ params={
+ "source": address,
+ "quantity_per_unit": quantity_per_unit,
+ "asset": asset,
+ "dividend_asset": dividend_asset,
+ },
+ **construct_args,
+ )
+
+
+def compose_issuance(
+ db,
+ address: str,
+ asset: str,
+ quantity: int,
+ transfer_destination: str = None,
+ divisible: bool = True,
+ lock: bool = False,
+ reset: bool = False,
+ description: str = None,
+ **construct_args,
+):
+ """
+ Composes a transaction to Issue a new asset, issue more of an existing asset, lock an asset, reset existing supply, or transfer the ownership of an asset.
+ :param address: The address that will be issuing or transfering the asset
+ :param asset: The assets to issue or transfer. This can also be a subasset longname for new subasset issuances
+ :param quantity: The quantity of the asset to issue (set to 0 if transferring an asset)
+ :param transfer_destination: The address to receive the asset
+ :param divisible: Whether this asset is divisible or not (if a transfer, this value must match the value specified when the asset was originally issued)
+ :param lock: Whether this issuance should lock supply of this asset forever
+ :param reset: Wether this issuance should reset any existing supply
+ :param description: A textual description for the asset
+ """
+ return compose_transaction(
+ db,
+ name="issuance",
+ params={
+ "source": address,
+ "asset": asset,
+ "quantity": quantity,
+ "transfer_destination": transfer_destination,
+ "divisible": divisible,
+ "lock": lock,
+ "reset": reset,
+ "description": description,
+ },
+ **construct_args,
+ )
+
+
+def compose_mpma(
+ db,
+ source: str,
+ assets: str,
+ destinations: str,
+ quantities: str,
+ memo: str,
+ memo_is_hex: bool,
+ **construct_args,
+):
+ """
+ Composes a transaction to send multiple payments to multiple addresses.
+ :param source: The address that will be sending (must have the necessary quantity of the specified asset)
+ :param assets: comma-separated list of assets to send
+ :param destinations: comma-separated list of addresses to send to
+ :param quantities: comma-separated list of quantities to send
+ :param memo: The Memo associated with this transaction
+ :param memo_is_hex: Whether the memo field is a hexadecimal string
+ """
+ asset_list = assets.split(",")
+ destination_list = destinations.split(",")
+ quantity_list = quantities.split(",")
+ if len(asset_list) != len(destination_list) or len(asset_list) != len(quantity_list):
+ raise exceptions.ComposeError(
+ "The number of assets, destinations, and quantities must be equal"
+ )
+ for quantity in quantity_list:
+ if not quantity.isdigit():
+ raise exceptions.ComposeError("Quantity must be an integer")
+ asset_dest_quant_list = list(zip(asset_list, destination_list, quantity_list))
+
+ return compose_transaction(
+ db,
+ name="version.mpma",
+ params={
+ "source": source,
+ "asset_dest_quant_list": asset_dest_quant_list,
+ "memo": memo,
+ "memo_is_hex": memo_is_hex,
+ },
+ **construct_args,
+ )
+
+
+def compose_order(
+ db,
+ address: str,
+ give_asset: str,
+ give_quantity: int,
+ get_asset: str,
+ get_quantity: int,
+ expiration: int,
+ fee_required: int,
+ **construct_args,
+):
+ """
+ Composes a transaction to place an order on the distributed exchange.
+ :param address: The address that will be issuing the order request (must have the necessary quantity of the specified asset to give)
+ :param give_asset: The asset that will be given in the trade
+ :param give_quantity: The quantity of the asset that will be given
+ :param get_asset: The asset that will be received in the trade
+ :param get_quantity: The quantity of the asset that will be received
+ :param expiration: The number of blocks for which the order should be valid
+ :param fee_required: The miners’ fee required to be paid by orders for them to match this one; in BTC; required only if buying BTC (may be zero, though)
+ """
+ return compose_transaction(
+ db,
+ name="order",
+ params={
+ "source": address,
+ "give_asset": give_asset,
+ "give_quantity": give_quantity,
+ "get_asset": get_asset,
+ "get_quantity": get_quantity,
+ "expiration": expiration,
+ "fee_required": fee_required,
+ },
+ **construct_args,
+ )
+
+
+def compose_send(
+ db,
+ address: str,
+ destination: str,
+ asset: str,
+ quantity: int,
+ memo: str = None,
+ memo_is_hex: bool = False,
+ use_enhanced_send: bool = True,
+ **construct_args,
+):
+ """
+ Composes a transaction to send a quantity of an asset to another address.
+ :param address: The address that will be sending (must have the necessary quantity of the specified asset)
+ :param destination: The address that will be receiving the asset
+ :param asset: The asset or subasset to send
+ :param quantity: The quantity of the asset to send
+ :param memo: The Memo associated with this transaction
+ :param memo_is_hex: Whether the memo field is a hexadecimal string
+ :param use_enhanced_send: If this is false, the construct a legacy transaction sending bitcoin dust
+ """
+ return compose_transaction(
+ db,
+ name="send",
+ params={
+ "source": address,
+ "destination": destination,
+ "asset": asset,
+ "quantity": quantity,
+ "memo": memo,
+ "memo_is_hex": memo_is_hex,
+ "use_enhanced_send": use_enhanced_send,
+ },
+ **construct_args,
+ )
+
+
+def compose_sweep(db, address: str, destination: str, flags: int, memo: str, **construct_args):
+ """
+ Composes a transaction to Sends all assets and/or transfer ownerships to a destination address.
+ :param address: The address that will be sending
+ :param destination: The address to receive the assets and/or ownerships
+ :param flags: An OR mask of flags indicating how the sweep should be processed. Possible flags are:
+ - FLAG_BALANCES: (integer) 1, specifies that all balances should be transferred.
+ - FLAG_OWNERSHIP: (integer) 2, specifies that all ownerships should be transferred.
+ - FLAG_BINARY_MEMO: (integer) 4, specifies that the memo is in binary/hex form.
+ :param memo: The Memo associated with this transaction
+ """
+ return compose_transaction(
+ db,
+ name="sweep",
+ params={
+ "source": address,
+ "destination": destination,
+ "flags": flags,
+ "memo": memo,
+ },
+ **construct_args,
+ )
+
+
+def info(db, rawtransaction: str, block_index: int = None):
+ """
+ Returns Counterparty information from a raw transaction in hex format.
+ :param rawtransaction: Raw transaction in hex format
+ :param block_index: Block index mandatory for transactions before block 335000
+ """
+ source, destination, btc_amount, fee, data, extra = gettxinfo.get_tx_info(
+ db, BlockchainParser().deserialize_tx(rawtransaction), block_index=block_index
+ )
+ return {
+ "source": source,
+ "destination": destination,
+ "btc_amount": btc_amount,
+ "fee": fee,
+ "data": util.hexlify(data) if data else "",
+ }
+
+
+def unpack(db, datahex: str, block_index: int = None):
+ """
+ Unpacks Counterparty data in hex format and returns the message type and data.
+ :param datahex: Data in hex format
+ :param block_index: Block index of the transaction containing this data
+ """
+ data = binascii.unhexlify(datahex)
+ message_type_id, message = message_type.unpack(data)
+ block_index = block_index or ledger.CURRENT_BLOCK_INDEX
+
+ issuance_ids = [
+ messages.issuance.ID,
+ messages.issuance.LR_ISSUANCE_ID,
+ messages.issuance.SUBASSET_ID,
+ messages.issuance.LR_SUBASSET_ID,
+ ]
+
+ # Unknown message type
+ message_data = {"error": "Unknown message type"}
+ # Bet
+ if message_type_id == messages.bet.ID:
+ message_type_name = "bet"
+ message_data = messages.bet.unpack(message, return_dict=True)
+ # Broadcast
+ elif message_type_id == messages.broadcast.ID:
+ message_type_name = "broadcast"
+ message_data = messages.broadcast.unpack(message, block_index, return_dict=True)
+ # BTCPay
+ elif message_type_id == messages.btcpay.ID:
+ message_type_name = "btcpay"
+ message_data = messages.btcpay.unpack(message, return_dict=True)
+ # Cancel
+ elif message_type_id == messages.cancel.ID:
+ message_type_name = "cancel"
+ message_data = messages.cancel.unpack(message, return_dict=True)
+ # Destroy
+ elif message_type_id == messages.destroy.ID:
+ message_type_name = "destroy"
+ message_data = messages.destroy.unpack(db, message, return_dict=True)
+ # Dispenser
+ elif message_type_id == messages.dispenser.ID:
+ message_type_name = "dispenser"
+ message_data = messages.dispenser.unpack(message, return_dict=True)
+ # Dividend
+ elif message_type_id == messages.dividend.ID:
+ message_type_name = "dividend"
+ message_data = messages.dividend.unpack(db, message, block_index, return_dict=True)
+ # Issuance
+ elif message_type_id in issuance_ids:
+ message_type_name = "issuance"
+ message_data = messages.issuance.unpack(
+ db, message, message_type_id, block_index, return_dict=True
+ )
+ # Order
+ elif message_type_id == messages.order.ID:
+ message_type_name = "order"
+ message_data = messages.order.unpack(db, message, block_index, return_dict=True)
+ # Send
+ elif message_type_id == messages.send.ID:
+ message_type_name = "send"
+ message_data = messages.send.unpack(db, message, block_index)
+ # Enhanced send
+ elif message_type_id == messages.versions.enhanced_send.ID:
+ message_type_name = "enhanced_send"
+ message_data = messages.versions.enhanced_send.unpack(message, block_index)
+ # MPMA send
+ elif message_type_id == messages.versions.mpma.ID:
+ message_type_name = "mpma_send"
+ message_data = messages.versions.mpma.unpack(message, block_index)
+ # RPS
+ elif message_type_id == messages.rps.ID:
+ message_type_name = "rps"
+ message_data = messages.rps.unpack(message, return_dict=True)
+ # RPS Resolve
+ elif message_type_id == messages.rpsresolve.ID:
+ message_type_name = "rpsresolve"
+ message_data = messages.rpsresolve.unpack(message, return_dict=True)
+ # Sweep
+ elif message_type_id == messages.sweep.ID:
+ message_type_name = "sweep"
+ message_data = messages.sweep.unpack(message)
+
+ return {
+ "message_type": message_type_name,
+ "message_type_id": message_type_id,
+ "message_data": message_data,
+ }
diff --git a/counterparty-core/counterpartycore/lib/util.py b/counterparty-core/counterpartycore/lib/util.py
index 30c65e9681..61c39f417f 100644
--- a/counterparty-core/counterpartycore/lib/util.py
+++ b/counterparty-core/counterpartycore/lib/util.py
@@ -1,12 +1,10 @@
import binascii
import collections
import decimal
-import fractions # noqa: F401
import hashlib
import itertools
import json
import logging
-import os # noqa: F401
import random
import re
import sys
diff --git a/counterparty-core/counterpartycore/server.py b/counterparty-core/counterpartycore/server.py
index 6fb375d715..e0e4ff2986 100755
--- a/counterparty-core/counterpartycore/server.py
+++ b/counterparty-core/counterpartycore/server.py
@@ -19,17 +19,19 @@
from termcolor import colored, cprint
from counterpartycore.lib import (
- api,
backend,
blocks,
check,
config,
database,
ledger,
+ log,
transaction,
util,
)
from counterpartycore.lib import kickstart as kickstarter
+from counterpartycore.lib.api import api_server as api_v2
+from counterpartycore.lib.api import api_v1
logger = logging.getLogger(config.LOGGER_NAME)
D = decimal.Decimal
@@ -152,6 +154,12 @@ def initialise_config(
rpc_user=None,
rpc_password=None,
rpc_no_allow_cors=False,
+ api_host=None,
+ api_port=None,
+ api_user=None,
+ api_password=None,
+ api_no_allow_cors=False,
+ api_not_ready_http_code=503,
force=False,
requests_timeout=config.DEFAULT_REQUESTS_TIMEOUT,
rpc_batch_size=config.DEFAULT_RPC_BATCH_SIZE,
@@ -364,7 +372,7 @@ def initialise_config(
config.RPC_HOST = "localhost"
# The web root directory for API calls, eg. localhost:14000/rpc/
- config.RPC_WEBROOT = "/rpc/"
+ config.RPC_WEBROOT = "/old/rpc/"
# Server API RPC port
if rpc_port:
@@ -417,6 +425,58 @@ def initialise_config(
config.RPC_BATCH_SIZE = rpc_batch_size
+ # Server API RPC host
+ if api_host:
+ config.API_HOST = api_host
+ else:
+ config.API_HOST = "localhost"
+
+ # Server API port
+ if api_port:
+ config.API_PORT = api_port
+ else:
+ if config.TESTNET:
+ if config.TESTCOIN:
+ config.API_PORT = config.DEFAULT_API_PORT_TESTNET + 1
+ else:
+ config.API_PORT = config.DEFAULT_API_PORT_TESTNET
+ elif config.REGTEST:
+ if config.TESTCOIN:
+ config.API_PORT = config.DEFAULT_API_PORT_REGTEST + 1
+ else:
+ config.API_PORT = config.DEFAULT_API_PORT_REGTEST
+ else:
+ if config.TESTCOIN:
+ config.API_PORT = config.DEFAULT_API_PORT + 1
+ else:
+ config.API_PORT = config.DEFAULT_API_PORT
+ try:
+ config.API_PORT = int(config.API_PORT)
+ if not (int(config.API_PORT) > 1 and int(config.API_PORT) < 65535):
+ raise ConfigurationError("invalid server API port number")
+ except: # noqa: E722
+ raise ConfigurationError( # noqa: B904
+ "Please specific a valid port number rpc-port configuration parameter"
+ )
+
+ # Server API user
+ if api_user:
+ config.API_USER = api_user
+ else:
+ config.API_USER = "api"
+
+ if api_password:
+ config.API_PASSWORD = api_password
+ else:
+ config.API_PASSWORD = "api" # noqa: S105
+
+ config.API_NOT_READY_HTTP_CODE = api_not_ready_http_code
+
+ if api_no_allow_cors:
+ config.API_NO_ALLOW_CORS = api_no_allow_cors
+ else:
+ config.API_NO_ALLOW_CORS = False
+
##############
# OTHER SETTINGS
@@ -517,6 +577,67 @@ def initialise_config(
logger.info(f"Running v{config.VERSION_STRING} of counterparty-core.")
+def initialise_log_and_config(args):
+ # Configuration
+ init_args = {
+ "database_file": args.database_file,
+ "testnet": args.testnet,
+ "testcoin": args.testcoin,
+ "regtest": args.regtest,
+ "customnet": args.customnet,
+ "api_limit_rows": args.api_limit_rows,
+ "backend_connect": args.backend_connect,
+ "backend_port": args.backend_port,
+ "backend_user": args.backend_user,
+ "backend_password": args.backend_password,
+ "backend_ssl": args.backend_ssl,
+ "backend_ssl_no_verify": args.backend_ssl_no_verify,
+ "backend_poll_interval": args.backend_poll_interval,
+ "indexd_connect": args.indexd_connect,
+ "indexd_port": args.indexd_port,
+ "rpc_host": args.rpc_host,
+ "rpc_port": args.rpc_port,
+ "rpc_user": args.rpc_user,
+ "rpc_password": args.rpc_password,
+ "rpc_no_allow_cors": args.rpc_no_allow_cors,
+ "api_host": args.api_host,
+ "api_port": args.api_port,
+ "api_user": args.api_user,
+ "api_password": args.api_password,
+ "api_no_allow_cors": args.api_no_allow_cors,
+ "requests_timeout": args.requests_timeout,
+ "rpc_batch_size": args.rpc_batch_size,
+ "check_asset_conservation": args.check_asset_conservation,
+ "force": args.force,
+ "p2sh_dust_return_pubkey": args.p2sh_dust_return_pubkey,
+ "utxo_locks_max_addresses": args.utxo_locks_max_addresses,
+ "utxo_locks_max_age": args.utxo_locks_max_age,
+ "no_mempool": args.no_mempool,
+ "skip_db_check": args.skip_db_check,
+ }
+
+ initialise_log_config(
+ verbose=args.verbose,
+ quiet=args.quiet,
+ log_file=args.log_file,
+ api_log_file=args.api_log_file,
+ no_log_files=args.no_log_files,
+ testnet=args.testnet,
+ testcoin=args.testcoin,
+ regtest=args.regtest,
+ json_log=args.json_log,
+ )
+
+ # set up logging
+ log.set_up(
+ verbose=config.VERBOSE,
+ quiet=config.QUIET,
+ log_file=config.LOG,
+ log_in_console=args.action == "start",
+ )
+ initialise_config(**init_args)
+
+
def initialise_db():
if config.FORCE:
cprint("THE OPTION `--force` IS NOT FOR USE ON PRODUCTION SYSTEMS.", "yellow")
@@ -566,37 +687,53 @@ def connect_to_addrindexrs():
print(f"{OK_GREEN} {step}")
-def start_all(catch_up="normal"):
- try:
- # Backend.
- connect_to_backend()
+def start_all(args):
+ api_status_poller = None
+ api_server_v1 = None
+ api_server_v2 = None
+
+ # Backend.
+ connect_to_backend()
- if not os.path.exists(config.DATABASE) and catch_up == "bootstrap":
+ try:
+ if not os.path.exists(config.DATABASE) and args.catch_up == "bootstrap":
bootstrap(no_confirm=True)
db = initialise_db()
+ # Initialise.
+ blocks.initialise(db)
# Reset UTXO_LOCKS. This previously was done in
# initilise_config
transaction.initialise()
- # API Status Poller.
- api_status_poller = api.APIStatusPoller()
- api_status_poller.daemon = True
- api_status_poller.start()
+ if args.enable_api_v1:
+ # API Status Poller.
+ api_status_poller = api_v1.APIStatusPoller()
+ api_status_poller.daemon = True
+ api_status_poller.start()
+
+ # API Server v1.
+ api_server_v1 = api_v1.APIServer()
+ api_server_v1.daemon = True
+ api_server_v1.start()
- # API Server.
- api_server = api.APIServer()
- api_server.daemon = True
- api_server.start()
+ # API Server v2.
+ api_server_v2 = api_v2.APIServer()
+ api_server_v2.start(args)
# Server
blocks.follow(db)
except KeyboardInterrupt:
pass
finally:
- api_status_poller.stop()
- api_server.stop()
+ if args.enable_api_v1:
+ if api_status_poller:
+ api_status_poller.stop()
+ if api_server_v1:
+ api_server_v1.stop()
+ if api_server_v2:
+ api_server_v2.stop()
backend.stop()
database.optimize(db)
logger.info("Closing database...")
diff --git a/counterparty-core/counterpartycore/test/api_v2_test.py b/counterparty-core/counterpartycore/test/api_v2_test.py
new file mode 100644
index 0000000000..9e53b6a4b4
--- /dev/null
+++ b/counterparty-core/counterpartycore/test/api_v2_test.py
@@ -0,0 +1,249 @@
+import tempfile
+
+import pytest
+import requests
+
+from counterpartycore.lib import util
+
+# this is require near the top to do setup of the test suite
+from counterpartycore.test import (
+ conftest, # noqa: F401
+)
+from counterpartycore.test.fixtures.params import ADDR
+from counterpartycore.test.util_test import CURR_DIR
+
+FIXTURE_SQL_FILE = CURR_DIR + "/fixtures/scenarios/unittest_fixture.sql"
+FIXTURE_DB = tempfile.gettempdir() + "/fixtures.unittest_fixture.db"
+API_ROOT = "http://api:api@localhost:10009"
+
+
+@pytest.mark.usefixtures("api_server_v2")
+def test_new_get_balances_by_address():
+ alice = ADDR[0]
+ url = f"{API_ROOT}/addresses/{alice}/balances"
+ result = requests.get(url) # noqa: S113
+ assert result.json() == [
+ {
+ "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "asset": "A95428956661682277",
+ "quantity": 100000000,
+ },
+ {
+ "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "asset": "CALLABLE",
+ "quantity": 1000,
+ },
+ {
+ "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "asset": "DIVISIBLE",
+ "quantity": 98800000000,
+ },
+ {
+ "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "asset": "LOCKED",
+ "quantity": 1000,
+ },
+ {
+ "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "asset": "MAXI",
+ "quantity": 9223372036854775807,
+ },
+ {
+ "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "asset": "NODIVISIBLE",
+ "quantity": 985,
+ },
+ {
+ "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "asset": "PARENT",
+ "quantity": 100000000,
+ },
+ {
+ "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "asset": "XCP",
+ "quantity": 91875000000,
+ },
+ ]
+
+
+@pytest.mark.usefixtures("api_server_v2")
+def test_new_get_balances_by_asset():
+ asset = "XCP"
+ url = f"{API_ROOT}/assets/{asset}/balances"
+ result = requests.get(url) # noqa: S113
+ assert result.json() == [
+ {
+ "address": "1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2",
+ "asset": "XCP",
+ "quantity": 300000000,
+ },
+ {
+ "address": "2MyJHMUenMWonC35Yi6PHC7i2tkS7PuomCy",
+ "asset": "XCP",
+ "quantity": 46449548498,
+ },
+ {
+ "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "asset": "XCP",
+ "quantity": 91875000000,
+ },
+ {
+ "address": "mqPCfvqTfYctXMUfmniXeG2nyaN8w6tPmj",
+ "asset": "XCP",
+ "quantity": 92945878046,
+ },
+ {
+ "address": "mrPk7hTeZWjjSCrMTC2ET4SAUThQt7C4uK",
+ "asset": "XCP",
+ "quantity": 14999857,
+ },
+ {
+ "address": "mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns",
+ "asset": "XCP",
+ "quantity": 99999990,
+ },
+ {
+ "address": "munimLLHjPhGeSU5rYB2HN79LJa8bRZr5b",
+ "asset": "XCP",
+ "quantity": 92999130360,
+ },
+ {
+ "address": "mwtPsLQxW9xpm7gdLmwWvJK5ABdPUVJm42",
+ "asset": "XCP",
+ "quantity": 92949122099,
+ },
+ {
+ "address": "myAtcJEHAsDLbTkai6ipWDZeeL7VkxXsiM",
+ "asset": "XCP",
+ "quantity": 92999138812,
+ },
+ {
+ "address": "tb1qw508d6qejxtdg4y5r3zarvary0c5xw7kxpjzsx",
+ "asset": "XCP",
+ "quantity": 92999030129,
+ },
+ ]
+
+
+@pytest.mark.usefixtures("api_server")
+@pytest.mark.usefixtures("api_server_v2")
+def test_new_get_balances_vs_old():
+ asset = "XCP"
+ url = f"{API_ROOT}/assets/{asset}/balances"
+ new_balances = requests.get(url).json() # noqa: S113
+ old_balance = util.api(
+ "get_balances",
+ {
+ "filters": [
+ {"field": "asset", "op": "==", "value": asset},
+ {"field": "quantity", "op": "!=", "value": 0},
+ ],
+ },
+ )
+ new_balances = sorted(new_balances, key=lambda x: (x["address"], x["asset"], x["quantity"]))
+ old_balance = sorted(old_balance, key=lambda x: (x["address"], x["asset"], x["quantity"]))
+ assert len(new_balances) == len(old_balance)
+ for new_balance, old_balance in zip(new_balances, old_balance): # noqa: B020
+ assert new_balance["address"] == old_balance["address"]
+ assert new_balance["asset"] == old_balance["asset"]
+ assert new_balance["quantity"] == old_balance["quantity"]
+
+
+@pytest.mark.usefixtures("api_server_v2")
+def test_new_get_asset_info():
+ asset = "NODIVISIBLE"
+ url = f"{API_ROOT}/assets/{asset}"
+ result = requests.get(url) # noqa: S113
+
+ assert result.json() == {
+ "asset": "NODIVISIBLE",
+ "asset_longname": None,
+ "description": "No divisible asset",
+ "divisible": False,
+ "issuer": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "locked": False,
+ "owner": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "supply": 1000,
+ "holder_count": 3,
+ }
+
+
+@pytest.mark.usefixtures("api_server_v2")
+def test_new_get_asset_orders():
+ asset = "XCP"
+ url = f"{API_ROOT}/assets/{asset}/orders"
+ result = requests.get(url).json() # noqa: S113
+ assert len(result) == 6
+ assert result[0] == {
+ "tx_index": 11,
+ "tx_hash": "1899b2e6ec36ba4bc9d035e6640b0a62b08c3a147c77c89183a77d9ed9081b3a",
+ "block_index": 310010,
+ "source": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "give_asset": "XCP",
+ "give_quantity": 100000000,
+ "give_remaining": 100000000,
+ "get_asset": "BTC",
+ "get_quantity": 1000000,
+ "get_remaining": 1000000,
+ "expiration": 2000,
+ "expire_index": 312010,
+ "fee_required": 900000,
+ "fee_required_remaining": 900000,
+ "fee_provided": 6800,
+ "fee_provided_remaining": 6800,
+ "status": "open",
+ }
+
+
+@pytest.mark.usefixtures("api_server_v2")
+def test_new_get_order_info():
+ tx_hash = "1899b2e6ec36ba4bc9d035e6640b0a62b08c3a147c77c89183a77d9ed9081b3a"
+ url = f"{API_ROOT}/orders/{tx_hash}"
+ result = requests.get(url).json() # noqa: S113
+ assert result[0] == {
+ "tx_index": 11,
+ "tx_hash": "1899b2e6ec36ba4bc9d035e6640b0a62b08c3a147c77c89183a77d9ed9081b3a",
+ "block_index": 310010,
+ "source": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "give_asset": "XCP",
+ "give_quantity": 100000000,
+ "give_remaining": 100000000,
+ "get_asset": "BTC",
+ "get_quantity": 1000000,
+ "get_remaining": 1000000,
+ "expiration": 2000,
+ "expire_index": 312010,
+ "fee_required": 900000,
+ "fee_required_remaining": 900000,
+ "fee_provided": 6800,
+ "fee_provided_remaining": 6800,
+ "status": "open",
+ }
+
+
+@pytest.mark.usefixtures("api_server_v2")
+def test_new_get_order_matches():
+ tx_hash = "74db175c4669a3d3a59e3fcddce9e97fcd7d12c35b58ef31845a1b20a1739498"
+ url = f"{API_ROOT}/orders/{tx_hash}/matches"
+ result = requests.get(url).json() # noqa: S113
+ assert result[0] == {
+ "id": "74db175c4669a3d3a59e3fcddce9e97fcd7d12c35b58ef31845a1b20a1739498_1b294dd8592e76899b1c106782e4c96e63114abd8e3fa09ab6d2d52496b5bf81",
+ "tx0_index": 492,
+ "tx0_hash": "74db175c4669a3d3a59e3fcddce9e97fcd7d12c35b58ef31845a1b20a1739498",
+ "tx0_address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
+ "tx1_index": 493,
+ "tx1_hash": "1b294dd8592e76899b1c106782e4c96e63114abd8e3fa09ab6d2d52496b5bf81",
+ "tx1_address": "mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns",
+ "forward_asset": "XCP",
+ "forward_quantity": 100000000,
+ "backward_asset": "BTC",
+ "backward_quantity": 800000,
+ "tx0_block_index": 310491,
+ "tx1_block_index": 310492,
+ "block_index": 310492,
+ "tx0_expiration": 2000,
+ "tx1_expiration": 2000,
+ "match_expire_index": 310512,
+ "fee_paid": 7200,
+ "status": "pending",
+ }
diff --git a/counterparty-core/counterpartycore/test/bytespersigop_test.py b/counterparty-core/counterpartycore/test/bytespersigop_test.py
index 25e336538c..ee246d9471 100644
--- a/counterparty-core/counterpartycore/test/bytespersigop_test.py
+++ b/counterparty-core/counterpartycore/test/bytespersigop_test.py
@@ -1,9 +1,7 @@
import binascii
-import pprint # noqa: F401
import tempfile
import bitcoin as bitcoinlib
-import pytest # noqa: F401
from counterpartycore.lib import api, blocks, exceptions, ledger, transaction, util # noqa: F401
from counterpartycore.test import (
@@ -26,7 +24,7 @@ def test_bytespersigop(server_db):
transaction.initialise()
# ADDR[0], bytespersigop=False, desc 41 bytes, opreturn
- txhex = api.compose_transaction(
+ txhex = transaction.compose_transaction(
server_db,
"issuance",
{
@@ -46,7 +44,7 @@ def test_bytespersigop(server_db):
assert "OP_RETURN" in repr(tx.vout[0].scriptPubKey)
# ADDR[0], bytespersigop=False, desc 42 bytes, multisig
- txhex = api.compose_transaction(
+ txhex = transaction.compose_transaction(
server_db,
"issuance",
{
@@ -71,7 +69,7 @@ def test_bytespersigop(server_db):
assert ledger.enabled("bytespersigop") == True # noqa: E712
# ADDR[0], bytespersigop=True, desc 41 bytes, opreturn
- txhex = api.compose_transaction(
+ txhex = transaction.compose_transaction(
server_db,
"issuance",
{
@@ -91,7 +89,7 @@ def test_bytespersigop(server_db):
assert "OP_RETURN" in repr(tx.vout[0].scriptPubKey)
# ADDR[1], bytespersigop=True, desc 41 bytes, opreturn encoding
- txhex = api.compose_transaction(
+ txhex = transaction.compose_transaction(
server_db,
"issuance",
{
@@ -112,7 +110,7 @@ def test_bytespersigop(server_db):
# ADDR[1], bytespersigop=True, desc 20 bytes, FORCED encoding=multisig
# will use 2 UTXOs to make the bytes:sigop ratio in our favor
- txhex = api.compose_transaction(
+ txhex = transaction.compose_transaction(
server_db,
"issuance",
{
diff --git a/counterparty-core/counterpartycore/test/complex_unit_test.py b/counterparty-core/counterpartycore/test/complex_unit_test.py
index 62087416a3..7373d7ce3f 100644
--- a/counterparty-core/counterpartycore/test/complex_unit_test.py
+++ b/counterparty-core/counterpartycore/test/complex_unit_test.py
@@ -1,12 +1,11 @@
import json
-import pprint # noqa: F401
import tempfile
import pytest
-import requests
from apsw import ConstraintError
-from counterpartycore.lib import api, blocks, config, ledger, util
+from counterpartycore.lib import blocks, ledger, util
+from counterpartycore.lib.api import api_v1
# this is require near the top to do setup of the test suite
from counterpartycore.test import (
@@ -218,7 +217,7 @@ def test_update_lock(server_db):
@pytest.mark.usefixtures("api_server")
def test_updated_tables_endpoints():
- for table in api.API_TABLES:
+ for table in api_v1.API_TABLES:
if table in ["mempool"]:
continue
result = util.api("get_" + table, {})
@@ -331,237 +330,6 @@ def test_updated_tables_endpoints():
}
-@pytest.mark.usefixtures("api_server")
-def test_new_get_balances_by_address():
- alice = ADDR[0]
- url = f"{config.API_ROOT}/addresses/{alice}/balances"
- result = requests.get(url) # noqa: S113
- assert result.json() == [
- {
- "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "asset": "A95428956661682277",
- "quantity": 100000000,
- },
- {
- "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "asset": "CALLABLE",
- "quantity": 1000,
- },
- {
- "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "asset": "DIVISIBLE",
- "quantity": 98800000000,
- },
- {
- "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "asset": "LOCKED",
- "quantity": 1000,
- },
- {
- "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "asset": "MAXI",
- "quantity": 9223372036854775807,
- },
- {
- "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "asset": "NODIVISIBLE",
- "quantity": 985,
- },
- {
- "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "asset": "PARENT",
- "quantity": 100000000,
- },
- {
- "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "asset": "XCP",
- "quantity": 91875000000,
- },
- ]
-
-
-@pytest.mark.usefixtures("api_server")
-def test_new_get_balances_by_asset():
- asset = "XCP"
- url = f"{config.API_ROOT}/assets/{asset}/balances"
- result = requests.get(url) # noqa: S113
- assert result.json() == [
- {
- "address": "1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2",
- "asset": "XCP",
- "quantity": 300000000,
- },
- {
- "address": "2MyJHMUenMWonC35Yi6PHC7i2tkS7PuomCy",
- "asset": "XCP",
- "quantity": 46449548498,
- },
- {
- "address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "asset": "XCP",
- "quantity": 91875000000,
- },
- {
- "address": "mqPCfvqTfYctXMUfmniXeG2nyaN8w6tPmj",
- "asset": "XCP",
- "quantity": 92945878046,
- },
- {
- "address": "mrPk7hTeZWjjSCrMTC2ET4SAUThQt7C4uK",
- "asset": "XCP",
- "quantity": 14999857,
- },
- {
- "address": "mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns",
- "asset": "XCP",
- "quantity": 99999990,
- },
- {
- "address": "munimLLHjPhGeSU5rYB2HN79LJa8bRZr5b",
- "asset": "XCP",
- "quantity": 92999130360,
- },
- {
- "address": "mwtPsLQxW9xpm7gdLmwWvJK5ABdPUVJm42",
- "asset": "XCP",
- "quantity": 92949122099,
- },
- {
- "address": "myAtcJEHAsDLbTkai6ipWDZeeL7VkxXsiM",
- "asset": "XCP",
- "quantity": 92999138812,
- },
- {
- "address": "tb1qw508d6qejxtdg4y5r3zarvary0c5xw7kxpjzsx",
- "asset": "XCP",
- "quantity": 92999030129,
- },
- ]
-
-
-@pytest.mark.usefixtures("api_server")
-def test_new_get_balances_vs_old():
- asset = "XCP"
- url = f"{config.API_ROOT}/assets/{asset}/balances"
- new_balances = requests.get(url).json() # noqa: S113
- old_balance = util.api(
- "get_balances",
- {
- "filters": [
- {"field": "asset", "op": "==", "value": asset},
- {"field": "quantity", "op": "!=", "value": 0},
- ],
- },
- )
- new_balances = sorted(new_balances, key=lambda x: (x["address"], x["asset"], x["quantity"]))
- old_balance = sorted(old_balance, key=lambda x: (x["address"], x["asset"], x["quantity"]))
- assert len(new_balances) == len(old_balance)
- for new_balance, old_balance in zip(new_balances, old_balance): # noqa: B020
- assert new_balance["address"] == old_balance["address"]
- assert new_balance["asset"] == old_balance["asset"]
- assert new_balance["quantity"] == old_balance["quantity"]
-
-
-@pytest.mark.usefixtures("api_server")
-def test_new_get_asset_info():
- asset = "NODIVISIBLE"
- url = f"{config.API_ROOT}/assets/{asset}"
- result = requests.get(url) # noqa: S113
- assert result.json() == [
- {
- "asset": "NODIVISIBLE",
- "asset_longname": None,
- "description": "No divisible asset",
- "divisible": False,
- "issuer": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "locked": False,
- "owner": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "supply": 1000,
- }
- ]
-
-
-@pytest.mark.usefixtures("api_server")
-def test_new_get_asset_orders():
- asset = "XCP"
- url = f"{config.API_ROOT}/assets/{asset}/orders"
- result = requests.get(url).json() # noqa: S113
- assert len(result) == 6
- assert result[0] == {
- "tx_index": 11,
- "tx_hash": "1899b2e6ec36ba4bc9d035e6640b0a62b08c3a147c77c89183a77d9ed9081b3a",
- "block_index": 310010,
- "source": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "give_asset": "XCP",
- "give_quantity": 100000000,
- "give_remaining": 100000000,
- "get_asset": "BTC",
- "get_quantity": 1000000,
- "get_remaining": 1000000,
- "expiration": 2000,
- "expire_index": 312010,
- "fee_required": 900000,
- "fee_required_remaining": 900000,
- "fee_provided": 6800,
- "fee_provided_remaining": 6800,
- "status": "open",
- }
-
-
-@pytest.mark.usefixtures("api_server")
-def test_new_get_order_info():
- tx_hash = "1899b2e6ec36ba4bc9d035e6640b0a62b08c3a147c77c89183a77d9ed9081b3a"
- url = f"{config.API_ROOT}/orders/{tx_hash}"
- result = requests.get(url).json() # noqa: S113
- assert result[0] == {
- "tx_index": 11,
- "tx_hash": "1899b2e6ec36ba4bc9d035e6640b0a62b08c3a147c77c89183a77d9ed9081b3a",
- "block_index": 310010,
- "source": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "give_asset": "XCP",
- "give_quantity": 100000000,
- "give_remaining": 100000000,
- "get_asset": "BTC",
- "get_quantity": 1000000,
- "get_remaining": 1000000,
- "expiration": 2000,
- "expire_index": 312010,
- "fee_required": 900000,
- "fee_required_remaining": 900000,
- "fee_provided": 6800,
- "fee_provided_remaining": 6800,
- "status": "open",
- }
-
-
-@pytest.mark.usefixtures("api_server")
-def test_new_get_order_matches():
- tx_hash = "74db175c4669a3d3a59e3fcddce9e97fcd7d12c35b58ef31845a1b20a1739498"
- url = f"{config.API_ROOT}/orders/{tx_hash}/matches"
- result = requests.get(url).json() # noqa: S113
- assert result[0] == {
- "id": "74db175c4669a3d3a59e3fcddce9e97fcd7d12c35b58ef31845a1b20a1739498_1b294dd8592e76899b1c106782e4c96e63114abd8e3fa09ab6d2d52496b5bf81",
- "tx0_index": 492,
- "tx0_hash": "74db175c4669a3d3a59e3fcddce9e97fcd7d12c35b58ef31845a1b20a1739498",
- "tx0_address": "mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc",
- "tx1_index": 493,
- "tx1_hash": "1b294dd8592e76899b1c106782e4c96e63114abd8e3fa09ab6d2d52496b5bf81",
- "tx1_address": "mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns",
- "forward_asset": "XCP",
- "forward_quantity": 100000000,
- "backward_asset": "BTC",
- "backward_quantity": 800000,
- "tx0_block_index": 310491,
- "tx1_block_index": 310492,
- "block_index": 310492,
- "tx0_expiration": 2000,
- "tx1_expiration": 2000,
- "match_expire_index": 310512,
- "fee_paid": 7200,
- "status": "pending",
- }
-
-
@pytest.mark.usefixtures("server_db")
def test_messages_table(server_db):
cursor = server_db.cursor()
diff --git a/counterparty-core/counterpartycore/test/conftest.py b/counterparty-core/counterpartycore/test/conftest.py
index 1073c4e90c..613db5ca3f 100644
--- a/counterparty-core/counterpartycore/test/conftest.py
+++ b/counterparty-core/counterpartycore/test/conftest.py
@@ -2,11 +2,10 @@
Test suite configuration
"""
+import argparse
import binascii
import json
import logging
-import os # noqa: F401
-import pprint # noqa: F401
import time
from datetime import datetime
@@ -18,7 +17,9 @@
from pycoin.coins.bitcoin import Tx # noqa: F401
from counterpartycore import server
-from counterpartycore.lib import api, arc4, config, database, ledger, log, script, util
+from counterpartycore.lib import arc4, config, database, ledger, log, script, util
+from counterpartycore.lib.api import api_server as api_v2
+from counterpartycore.lib.api import api_v1 as api
from counterpartycore.test import util_test
from counterpartycore.test.fixtures.params import DEFAULT_PARAMS
from counterpartycore.test.fixtures.scenarios import INTEGRATION_SCENARIOS
@@ -234,12 +235,79 @@ def api_server(request, cp_server):
return api_server
+@pytest.fixture(scope="module")
+def api_server_v2(request, cp_server):
+ default_config = {
+ "testnet": False,
+ "testcoin": False,
+ "regtest": False,
+ "api_limit_rows": 1000,
+ "backend_connect": None,
+ "backend_port": None,
+ "backend_user": None,
+ "backend_password": None,
+ "indexd_connect": None,
+ "indexd_port": None,
+ "backend_ssl": False,
+ "backend_ssl_no_verify": False,
+ "backend_poll_interval": None,
+ "rpc_host": None,
+ "rpc_user": None,
+ "rpc_password": None,
+ "rpc_no_allow_cors": False,
+ "api_host": "localhost",
+ "api_user": "api",
+ "api_password": "api",
+ "api_no_allow_cors": False,
+ "api_not_ready_http_code": 503,
+ "force": False,
+ "requests_timeout": config.DEFAULT_REQUESTS_TIMEOUT,
+ "rpc_batch_size": config.DEFAULT_RPC_BATCH_SIZE,
+ "check_asset_conservation": False,
+ "backend_ssl_verify": None,
+ "rpc_allow_cors": None,
+ "p2sh_dust_return_pubkey": None,
+ "utxo_locks_max_addresses": config.DEFAULT_UTXO_LOCKS_MAX_ADDRESSES,
+ "utxo_locks_max_age": config.DEFAULT_UTXO_LOCKS_MAX_AGE,
+ "estimate_fee_per_kb": None,
+ "customnet": None,
+ "verbose": False,
+ "quiet": False,
+ "log_file": None,
+ "api_log_file": None,
+ "no_log_files": False,
+ "json_log": False,
+ "no_check_asset_conservation": True,
+ "action": "",
+ "no_refresh_backend_height": True,
+ "no_mempool": False,
+ "skip_db_check": False,
+ }
+ server_config = (
+ default_config
+ | util_test.COUNTERPARTYD_OPTIONS
+ | {
+ "database_file": request.module.FIXTURE_DB,
+ "api_port": TEST_RPC_PORT + 10,
+ }
+ )
+ args = argparse.Namespace(**server_config)
+ api_server = api_v2.APIServer()
+ api_server.start(args)
+ time.sleep(1)
+
+ request.addfinalizer(lambda: api_server.stop())
+
+ return api_server
+
+
@pytest.fixture(scope="module")
def cp_server(request):
dbfile = request.module.FIXTURE_DB
sqlfile = request.module.FIXTURE_SQL_FILE
options = getattr(request.module, "FIXTURE_OPTIONS", {})
+ print(f"cp_server: {dbfile} {sqlfile} {options}")
db = util_test.init_database(sqlfile, dbfile, options) # noqa: F841
# monkeypatch this here because init_mock_functions can run before cp_server
@@ -454,7 +522,7 @@ def init_arc4(seed):
monkeypatch.setattr("counterpartycore.lib.log.isodt", isodt)
monkeypatch.setattr("counterpartycore.lib.ledger.curr_time", curr_time)
monkeypatch.setattr("counterpartycore.lib.util.date_passed", date_passed)
- monkeypatch.setattr("counterpartycore.lib.api.init_api_access_log", init_api_access_log)
+ monkeypatch.setattr("counterpartycore.lib.api.util.init_api_access_log", init_api_access_log)
if hasattr(config, "PREFIX"):
monkeypatch.setattr("counterpartycore.lib.config.PREFIX", b"TESTXXXX")
monkeypatch.setattr("counterpartycore.lib.backend.getrawtransaction", mocked_getrawtransaction)
diff --git a/counterparty-core/counterpartycore/test/estimate_fee_per_kb_test.py b/counterparty-core/counterpartycore/test/estimate_fee_per_kb_test.py
index 5a05f50b7d..89e8582c4d 100644
--- a/counterparty-core/counterpartycore/test/estimate_fee_per_kb_test.py
+++ b/counterparty-core/counterpartycore/test/estimate_fee_per_kb_test.py
@@ -4,7 +4,7 @@
import bitcoin as bitcoinlib
-from counterpartycore.lib import api, backend, transaction
+from counterpartycore.lib import backend, transaction
from counterpartycore.test import (
util_test,
)
@@ -41,7 +41,7 @@ def _fee_per_kb(conf_target, mode):
with util_test.ConfigContext(ESTIMATE_FEE_PER_KB=True):
transaction.initialise()
- txhex = api.compose_transaction(
+ txhex = transaction.compose_transaction(
server_db,
"send",
{"source": ADDR[0], "destination": ADDR[1], "asset": "XCP", "quantity": 100},
diff --git a/counterparty-core/counterpartycore/test/fixtures/vectors.py b/counterparty-core/counterpartycore/test/fixtures/vectors.py
index 15a554dc60..43717b875b 100644
--- a/counterparty-core/counterpartycore/test/fixtures/vectors.py
+++ b/counterparty-core/counterpartycore/test/fixtures/vectors.py
@@ -9,13 +9,12 @@
"""
import binascii
-import json # noqa: F401
from fractions import Fraction
import bitcoin as bitcoinlib
from counterpartycore.lib import address, config, exceptions, script # noqa: F401
-from counterpartycore.lib.api import APIError
+from counterpartycore.lib.api.api_v1 import APIError
from counterpartycore.lib.kickstart.blocks_parser import BlockchainParser
from counterpartycore.lib.ledger import CreditError, DebitError
from counterpartycore.lib.messages import issuance
@@ -6479,28 +6478,24 @@
{
"in": (
b"o\x9c\x8d\x1fT\x05E\x1d\xe6\x07\x0b\xf1\xdb\x86\xabj\xcc\xb4\x95\xb6%\x01",
- DP["default_block_index"],
),
"out": {"destination": ADDR[5], "flags": 1, "memo": None},
},
{
"in": (
b"o\x9c\x8d\x1fT\x05E\x1d\xe6\x07\x0b\xf1\xdb\x86\xabj\xcc\xb4\x95\xb6%\x02",
- DP["default_block_index"],
),
"out": {"destination": ADDR[5], "flags": 2, "memo": None},
},
{
"in": (
b"o\x9c\x8d\x1fT\x05E\x1d\xe6\x07\x0b\xf1\xdb\x86\xabj\xcc\xb4\x95\xb6%\x03test",
- DP["default_block_index"],
),
"out": {"destination": ADDR[5], "flags": 3, "memo": "test"},
},
{
"in": (
b"o\x9c\x8d\x1fT\x05E\x1d\xe6\x07\x0b\xf1\xdb\x86\xabj\xcc\xb4\x95\xb6%\x07\xca\xfe\xba\xbe",
- DP["default_block_index"],
),
"out": {"destination": ADDR[5], "flags": 7, "memo": b"\xca\xfe\xba\xbe"},
},
@@ -7696,7 +7691,7 @@
},
],
},
- "api": {
+ "api_v1": {
"get_rows": [
{
"in": ("balances", None, "AND", None, None, None, None, None, 1000, 0, True),
diff --git a/counterparty-core/counterpartycore/test/p2sh_encoding_test.py b/counterparty-core/counterpartycore/test/p2sh_encoding_test.py
index 7cf1dc9fdd..06c1bf4088 100644
--- a/counterparty-core/counterpartycore/test/p2sh_encoding_test.py
+++ b/counterparty-core/counterpartycore/test/p2sh_encoding_test.py
@@ -1,8 +1,6 @@
import binascii
import hashlib
import logging
-import math # noqa: F401
-import pprint # noqa: F401
import tempfile
import time
@@ -20,14 +18,13 @@
logger = logging.getLogger(__name__)
from counterpartycore.lib import ( # noqa: E402
- api,
backend,
config,
exceptions,
gettxinfo,
ledger,
script,
- util, # noqa: F401
+ transaction,
)
from counterpartycore.lib.kickstart.blocks_parser import BlockchainParser # noqa: E402
from counterpartycore.lib.transaction_helper import p2sh_encoding, serializer # noqa: E402, F401
@@ -78,7 +75,7 @@ def test_p2sh_encoding(server_db):
fee = 20000
fee_per_kb = 50000
- result = api.compose_transaction(
+ result = transaction.compose_transaction(
server_db,
"send",
{"source": source, "destination": destination, "asset": "XCP", "quantity": 100},
@@ -150,7 +147,7 @@ def test_p2sh_encoding(server_db):
logger.debug(f"pretxid {pretxid}")
# check that when we do another, unrelated, send that it won't use our UTXO
- result = api.compose_transaction(
+ result = transaction.compose_transaction(
server_db,
"send",
{"source": source, "destination": destination, "asset": "XCP", "quantity": 100},
@@ -164,7 +161,7 @@ def test_p2sh_encoding(server_db):
)
# now compose the data transaction
- result = api.compose_transaction(
+ result = transaction.compose_transaction(
server_db,
"send",
{"source": source, "destination": destination, "asset": "XCP", "quantity": 100},
@@ -256,7 +253,7 @@ def test_p2sh_encoding_long_data(server_db):
# pprint.pprint(utxos)
fee_per_kb = 50000
- result = api.compose_transaction(
+ result = transaction.compose_transaction(
server_db,
"broadcast",
{
@@ -333,7 +330,7 @@ def test_p2sh_encoding_long_data(server_db):
logger.debug(f"pretxid {pretxid}")
# now compose the data transaction
- result = api.compose_transaction(
+ result = transaction.compose_transaction(
server_db,
"broadcast",
{
@@ -438,7 +435,7 @@ def test_p2sh_encoding_p2sh_source_not_supported(server_db):
fee_per_kb = 50000
with pytest.raises(exceptions.TransactionError):
- result = api.compose_transaction( # noqa: F841
+ result = transaction.compose_transaction( # noqa: F841
server_db,
"send",
{"source": source, "destination": destination, "asset": "XCP", "quantity": 100},
@@ -480,7 +477,7 @@ def test_p2sh_encoding_manual_multisig_transaction(server_db):
# setup transaction
fee = 20000
fee_per_kb = 50000
- pretxhex = api.compose_transaction(
+ pretxhex = transaction.compose_transaction(
server_db,
"send",
{
@@ -506,7 +503,7 @@ def test_p2sh_encoding_manual_multisig_transaction(server_db):
logger.debug(f"pretxid {pretxid}")
# now compose the data transaction
- result = api.compose_transaction(
+ result = transaction.compose_transaction(
server_db,
"send",
{"source": source, "destination": destination, "asset": "XCP", "quantity": 100},
diff --git a/counterparty-core/counterpartycore/test/util_test.py b/counterparty-core/counterpartycore/test/util_test.py
index 80bbb4493f..70bea57256 100644
--- a/counterparty-core/counterpartycore/test/util_test.py
+++ b/counterparty-core/counterpartycore/test/util_test.py
@@ -67,6 +67,7 @@
"testcoin": False,
"rpc_port": 9999,
"rpc_password": "pass",
+ "api_password": "api",
"backend_port": 18332,
"backend_password": "pass",
"backend_ssl_no_verify": True,
@@ -777,6 +778,9 @@ def exec_tested_method(tx_name, method, tested_method, inputs, server_db):
or tx_name == "backend"
or tx_name == "message_type"
or tx_name == "address"
+ or (tx_name == "versions.enhanced_send" and method == "unpack")
+ or (tx_name == "versions.mpma" and method == "unpack")
+ or (tx_name == "sweep" and method == "unpack")
):
return tested_method(*inputs)
else:
@@ -803,7 +807,10 @@ def check_outputs(
try:
tested_module = sys.modules[f"counterpartycore.lib.{tx_name}"]
except KeyError: # TODO: hack
- tested_module = sys.modules[f"counterpartycore.lib.messages.{tx_name}"]
+ if tx_name == "api_v1":
+ tested_module = sys.modules["counterpartycore.lib.api.api_v1"]
+ else:
+ tested_module = sys.modules[f"counterpartycore.lib.messages.{tx_name}"]
tested_method = getattr(tested_module, method)
with MockProtocolChangesContext(**(mock_protocol_changes or {})):
diff --git a/counterparty-core/counterpartycore/test/utxolocks_test.py b/counterparty-core/counterpartycore/test/utxolocks_test.py
index 733d1502c9..d6ab4cc678 100644
--- a/counterparty-core/counterpartycore/test/utxolocks_test.py
+++ b/counterparty-core/counterpartycore/test/utxolocks_test.py
@@ -4,7 +4,6 @@
from io import BytesIO
import bitcoin
-import pytest # noqa: F401
from counterpartycore.lib import transaction
from counterpartycore.lib.messages import send
diff --git a/counterparty-core/requirements.txt b/counterparty-core/requirements.txt
index 8bc89d009a..65e8050011 100644
--- a/counterparty-core/requirements.txt
+++ b/counterparty-core/requirements.txt
@@ -26,4 +26,6 @@ arc4==0.4.0
halo==0.0.31
termcolor==2.4.0
sentry-sdk==1.45.0
+Flask-Cors==4.0.0
+docstring_parser==0.16
counterparty-rs==10.1.0
diff --git a/counterparty-wallet/counterpartywallet/messages.py b/counterparty-wallet/counterpartywallet/messages.py
index f044a41771..84c2cd0291 100755
--- a/counterparty-wallet/counterpartywallet/messages.py
+++ b/counterparty-wallet/counterpartywallet/messages.py
@@ -350,6 +350,3 @@ def compose(message, args):
return compose_transaction(args, message, param_names)
else:
raise ArgumentError("Invalid message name")
-
-
-# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
diff --git a/counterparty-wallet/counterpartywallet/wallet/__init__.py b/counterparty-wallet/counterpartywallet/wallet/__init__.py
index ae2abc3c1f..06de7fda96 100644
--- a/counterparty-wallet/counterpartywallet/wallet/__init__.py
+++ b/counterparty-wallet/counterpartywallet/wallet/__init__.py
@@ -20,7 +20,7 @@ class LockedWalletError(WalletError):
def wallet():
- return sys.modules[f"counterpartycli.wallet.{config.WALLET_NAME}"]
+ return sys.modules[f"counterpartywallet.wallet.{config.WALLET_NAME}"]
def get_wallet_addresses():