From bc4ded266207df06963405510cb562e5a2d2df71 Mon Sep 17 00:00:00 2001 From: Guillermo Rodriguez Date: Mon, 21 Jun 2021 18:27:53 +0000 Subject: [PATCH 01/73] binance: start drafting live order ctl endpoints First draft originally by @guilledk but update by myself 2 years later xD. Will crash at runtime but at least has the machinery to setup signed requests for auth-ed endpoints B) Also adds a generic `NoSignature` error for when credentials are not present in `brokers.toml` but user is trying to access auth-ed eps with the client. --- piker/brokers/binance.py | 211 ++++++++++++++++++++++++++++++++++++--- piker/config.py | 4 + 2 files changed, 203 insertions(+), 12 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index a8791ae9f..366054e13 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -1,6 +1,6 @@ # piker: trading gear for hackers # Copyright (C) -# Guillermo Rodriguez +# Guillermo Rodriguez (aka ze jefe) # Tyler Goodlet # (in stewardship for pikers) @@ -21,6 +21,7 @@ Binance backend """ +from collections import OrderedDict from contextlib import ( asynccontextmanager as acm, aclosing, @@ -29,11 +30,16 @@ from decimal import Decimal import itertools from typing import ( - Any, Union, Optional, - AsyncGenerator, Callable, + Any, + Union, + AsyncIterator, + AsyncGenerator, + Callable, ) +import hmac import time - +import decimal +import hashlib import trio from trio_typing import TaskStatus import pendulum @@ -42,6 +48,7 @@ import numpy as np import tractor +from .. import config from .._cacheables import async_lifo_cache from ..accounting._mktinfo import ( Asset, @@ -66,6 +73,30 @@ NoBsWs, ) +from ..clearing._messages import ( + BrokerdOrder, + BrokerdOrderAck, + # BrokerdCancel, + #BrokerdStatus, + #BrokerdPosition, + #BrokerdFill, + # BrokerdError, +) + +log = get_logger('piker.brokers.binance') + + +def get_config() -> dict: + conf, path = config.load() + + section = conf.get('binance') + + if not section: + log.warning(f'No config section found for binance in {path}') + return dict() + + return section + log = get_logger(__name__) @@ -197,16 +228,55 @@ def __init__(self) -> None: self._sesh.base_location = _url self._pairs: dict[str, Pair] = {} + conf = get_config() + self.api_key = conf.get('api', {}).get('key') + self.api_secret = conf.get('api', {}).get('secret') + + if self.api_key: + self._sesh.headers.update({'X-MBX-APIKEY': self.api_key}) + + def _get_signature(self, data: OrderedDict) -> str: + if not self.api_secret: + raise config.NoSignature( + "Can't generate a signature without setting up credentials" + ) + + query_str = '&'.join([ + f'{_key}={value}' + for _key, value in data.items()]) + log.info(query_str) + msg_auth = hmac.new( + self.api_secret.encode('utf-8'), + query_str.encode('utf-8'), + hashlib.sha256 + ) + return msg_auth.hexdigest() + async def _api( self, method: str, - params: dict, + params: Union[dict, OrderedDict], + signed: bool = False, + action: str = 'get' ) -> dict[str, Any]: - resp = await self._sesh.get( - path=f'/api/v3/{method}', - params=params, - timeout=float('inf') - ) + + if signed: + params['signature'] = self._get_signature(params) + + if action == 'get': + resp = await self._sesh.get( + path=f'/api/v3/{method}', + params=params, + timeout=float('inf') + ) + + elif action == 'post': + resp = await self._sesh.post( + path=f'/api/v3/{method}', + params=params, + timeout=float('inf') + ) + return resproc(resp, log) async def exch_info( @@ -284,8 +354,8 @@ async def search_symbols( async def bars( self, symbol: str, - start_dt: Optional[datetime] = None, - end_dt: Optional[datetime] = None, + start_dt: datetime | None = None, + end_dt: datetime | None = None, limit: int = 1000, # <- max allowed per query as_np: bool = True, @@ -344,6 +414,60 @@ async def bars( ) if as_np else bars return array + async def submit_limit( + self, + symbol: str, + side: str, # SELL / BUY + quantity: float, + price: float, + # time_in_force: str = 'GTC', + oid: int | None = None, + # iceberg_quantity: float | None = None, + # order_resp_type: str | None = None, + recv_window: int = 60000 + + ) -> int: + symbol = symbol.upper() + + await self.cache_symbols() + + asset_precision = self._pairs[symbol]['baseAssetPrecision'] + quote_precision = self._pairs[symbol]['quoteAssetPrecision'] + + quantity = Decimal(quantity).quantize( + Decimal(1 ** -asset_precision), + rounding=decimal.ROUND_HALF_EVEN + ) + + price = Decimal(price).quantize( + Decimal(1 ** -quote_precision), + rounding=decimal.ROUND_HALF_EVEN + ) + + params = OrderedDict([ + ('symbol', symbol), + ('side', side.upper()), + ('type', 'LIMIT'), + ('timeInForce', 'GTC'), + ('quantity', quantity), + ('price', price), + ('recvWindow', recv_window), + ('newOrderRespType', 'ACK'), + ('timestamp', binance_timestamp(pendulum.now())) + ]) + + if oid: + params['newClientOrderId'] = oid + + resp = await self._api( + 'order/test', # TODO: switch to real `order` endpoint + params=params, + signed=True, + action='post' + ) + + assert resp['orderId'] == oid + return oid @acm async def get_client() -> Client: @@ -660,6 +784,69 @@ async def subscribe(ws: NoBsWs): # last = time.time() +async def handle_order_requests( + ems_order_stream: tractor.MsgStream +) -> None: + async with open_cached_client('binance') as client: + async for request_msg in ems_order_stream: + log.info(f'Received order request {request_msg}') + + action = request_msg['action'] + + if action in {'buy', 'sell'}: + # validate + order = BrokerdOrder(**request_msg) + + # call our client api to submit the order + reqid = await client.submit_limit( + order.symbol, + order.action, + order.size, + order.price, + oid=order.oid + ) + + # deliver ack that order has been submitted to broker routing + await ems_order_stream.send( + BrokerdOrderAck( + # ems order request id + oid=order.oid, + # broker specific request id + reqid=reqid, + time_ns=time.time_ns(), + ).dict() + ) + + elif action == 'cancel': + # msg = BrokerdCancel(**request_msg) + # await run_client_method + ... + + else: + log.error(f'Unknown order command: {request_msg}') + + +@tractor.context +async def trades_dialogue( + ctx: tractor.Context, + loglevel: str = None +) -> AsyncIterator[dict[str, Any]]: + + # XXX: required to propagate ``tractor`` loglevel to piker logging + get_console_log(loglevel or tractor.current_actor().loglevel) + + positions = {} # TODO: get already open pos + + await ctx.started(positions, {}) + + async with ( + ctx.open_stream() as ems_stream, + trio.open_nursery() as n + ): + n.start_soon(handle_order_requests, ems_stream) + await trio.sleep_forever() + + @tractor.context async def open_symbol_search( ctx: tractor.Context, diff --git a/piker/config.py b/piker/config.py index 0220f3e6d..120852617 100644 --- a/piker/config.py +++ b/piker/config.py @@ -173,6 +173,10 @@ def _posixify(name): ) +class NoSignature(Exception): + 'No credentials setup for broker backend!' + + def _override_config_dir( path: str ) -> None: From f99e8fe7eb1abc182ad3b3dd71dfb5665eee7ef9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 14:35:08 -0400 Subject: [PATCH 02/73] binance: dynamically choose the rest method Instead of having a buncha logic branches for 'get', 'post', etc. just pass the `method: str` and do a attr lookup on the `asks` sesh. Also, adjust the `trades_dialogue()` ep to switch to paper mode when no client API key is detected/loaded. --- piker/brokers/binance.py | 71 +++++++++++++++++++++------------------- 1 file changed, 38 insertions(+), 33 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 366054e13..24a781fd5 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -21,6 +21,7 @@ Binance backend """ +from __future__ import annotations from collections import OrderedDict from contextlib import ( asynccontextmanager as acm, @@ -40,6 +41,8 @@ import time import decimal import hashlib +from pathlib import Path + import trio from trio_typing import TaskStatus import pendulum @@ -87,13 +90,16 @@ def get_config() -> dict: + + conf: dict + path: Path conf, path = config.load() - section = conf.get('binance') + section = conf.get('binance') if not section: log.warning(f'No config section found for binance in {path}') - return dict() + return {} return section @@ -225,12 +231,12 @@ class Client: def __init__(self) -> None: self._sesh = asks.Session(connections=4) - self._sesh.base_location = _url - self._pairs: dict[str, Pair] = {} + self._sesh.base_location: str = _url + self._pairs: dict[str, Pair] = {} # mkt info table conf = get_config() - self.api_key = conf.get('api', {}).get('key') - self.api_secret = conf.get('api', {}).get('secret') + self.api_key: str = conf.get('api_key', '') + self.api_secret: str = conf.get('api_secret', '') if self.api_key: self._sesh.headers.update({'X-MBX-APIKEY': self.api_key}) @@ -255,7 +261,7 @@ def _get_signature(self, data: OrderedDict) -> str: async def _api( self, method: str, - params: Union[dict, OrderedDict], + params: dict | OrderedDict, signed: bool = False, action: str = 'get' ) -> dict[str, Any]: @@ -263,19 +269,11 @@ async def _api( if signed: params['signature'] = self._get_signature(params) - if action == 'get': - resp = await self._sesh.get( - path=f'/api/v3/{method}', - params=params, - timeout=float('inf') - ) - - elif action == 'post': - resp = await self._sesh.post( - path=f'/api/v3/{method}', - params=params, - timeout=float('inf') - ) + resp = await getattr(self._sesh, action)( + path=f'/api/v3/{method}', + params=params, + timeout=float('inf') + ) return resproc(resp, log) @@ -830,21 +828,28 @@ async def handle_order_requests( async def trades_dialogue( ctx: tractor.Context, loglevel: str = None -) -> AsyncIterator[dict[str, Any]]: - # XXX: required to propagate ``tractor`` loglevel to piker logging - get_console_log(loglevel or tractor.current_actor().loglevel) - - positions = {} # TODO: get already open pos - - await ctx.started(positions, {}) +) -> AsyncIterator[dict[str, Any]]: - async with ( - ctx.open_stream() as ems_stream, - trio.open_nursery() as n - ): - n.start_soon(handle_order_requests, ems_stream) - await trio.sleep_forever() + async with open_cached_client('binance') as client: + if not client.api_key: + await ctx.started('paper') + return + + # table: PpTable + # ledger: TransactionLedger + + # TODO: load pps and accounts using accounting apis! + # positions: dict = {} + # accounts: set[str] = set() + # await ctx.started((positions, {})) + + # async with ( + # ctx.open_stream() as ems_stream, + # trio.open_nursery() as n + # ): + # n.start_soon(handle_order_requests, ems_stream) + # await trio.sleep_forever() @tractor.context From d3a504864afae8b101420677dee132e27e5c76d5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 14:39:33 -0400 Subject: [PATCH 03/73] Add draft `brokercnf` CLI cmd from @guilledk --- piker/brokers/cli.py | 65 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/piker/brokers/cli.py b/piker/brokers/cli.py index 5ebca3e71..77c69f8ea 100644 --- a/piker/brokers/cli.py +++ b/piker/brokers/cli.py @@ -485,3 +485,68 @@ async def main(func): return click.echo(colorize_json(quotes)) + + +# @cli.command() +# @click.argument('section', required=True) +# @click.argument('value', required=False) +# @click.option('--delete', '-d', flag_value=True, help='Delete section') +# @click.pass_obj +# def brokercfg(config, section, value, delete): +# from .. import config +# conf, path = config.load() + +# # XXX: Recursive getting & setting + +# def get_value(_dict, _section): +# subs = _section.split('.') +# if len(subs) > 1: +# return get_value( +# _dict[subs[0]], +# '.'.join(subs[1:]), +# ) + +# else: +# return _dict[_section] + +# def set_value(_dict, _section, val): +# subs = _section.split('.') +# if len(subs) > 1: +# if subs[0] not in _dict: +# _dict[subs[0]] = {} + +# return set_value( +# _dict[subs[0]], +# '.'.join(subs[1:]), +# val +# ) + +# else: +# _dict[_section] = val + +# def del_value(_dict, _section): +# subs = _section.split('.') +# if len(subs) > 1: +# if subs[0] not in _dict: +# return + +# return del_value( +# _dict[subs[0]], +# '.'.join(subs[1:]) +# ) + +# else: +# if _section not in _dict: +# return + +# del _dict[_section] + +# if not delete: +# if value: +# set_value(conf, section, value) + +# click.echo(colorize_json(get_value(conf, section))) +# else: +# del_value(conf, section) + +# broker_conf.write(conf) From 1d9c195506dfb3200f368309f737c45002030e10 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 14:41:13 -0400 Subject: [PATCH 04/73] kraken: tidy up paper mode activation comments --- piker/brokers/kraken/broker.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index fc2eff62d..8025f0d27 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -427,16 +427,12 @@ async def trades_dialogue( async with get_client() as client: + # make ems flip to paper mode when no creds setup in + # `brokers.toml` B0 if not client._api_key: await ctx.started('paper') return - # TODO: make ems flip to paper mode via - # some returned signal if the user only wants to use - # the data feed or we return this? - # else: - # await ctx.started(({}, ['paper'])) - # NOTE: currently we expect the user to define a "source fiat" # (much like the web UI let's you set an "account currency") # such that all positions (nested or flat) will be translated to From f8af13d0101ad3bd766296bcce1aa5ed19c0201d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 14:51:51 -0400 Subject: [PATCH 05/73] binance: add `submit_cancel()` & listen key mgmt Patch again originally from @guilledk and adds a sesh for futures testnet as well as a order canceller method B) --- piker/brokers/binance.py | 130 +++++++++++++++++++++++++++++++++++---- 1 file changed, 117 insertions(+), 13 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 24a781fd5..8e6cf1e8a 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -108,6 +108,7 @@ def get_config() -> dict: _url = 'https://api.binance.com' +_fapi_url = 'https://testnet.binancefuture.com' # Broker specific ohlc schema (rest) @@ -230,18 +231,31 @@ def binance_timestamp( class Client: def __init__(self) -> None: + + self._pairs: dict[str, Pair] = {} # mkt info table + + # live EP sesh self._sesh = asks.Session(connections=4) self._sesh.base_location: str = _url - self._pairs: dict[str, Pair] = {} # mkt info table - conf = get_config() + # testnet EP sesh + self._fapi_sesh = asks.Session(connections=4) + self._fapi_sesh.base_location = _fapi_url + + conf: dict = get_config() self.api_key: str = conf.get('api_key', '') self.api_secret: str = conf.get('api_secret', '') if self.api_key: - self._sesh.headers.update({'X-MBX-APIKEY': self.api_key}) + api_key_header = {'X-MBX-APIKEY': self.api_key} + self._sesh.headers.update(api_key_header) + self._fapi_sesh.headers.update(api_key_header) def _get_signature(self, data: OrderedDict) -> str: + + # XXX: Info on security and authentification + # https://binance-docs.github.io/apidocs/#endpoint-security-type + if not self.api_secret: raise config.NoSignature( "Can't generate a signature without setting up credentials" @@ -277,8 +291,26 @@ async def _api( return resproc(resp, log) - async def exch_info( + async def _fapi( + self, + method: str, + params: Union[dict, OrderedDict], + signed: bool = False, + action: str = 'get' + ) -> dict[str, Any]: + if signed: + params['signature'] = self._get_signature(params) + + resp = await getattr(self._fapi_sesh, action)( + path=f'/fapi/v1/{method}', + params=params, + timeout=float('inf') + ) + + return resproc(resp, log) + + async def exch_info( self, sym: str | None = None, @@ -467,6 +499,70 @@ async def submit_limit( assert resp['orderId'] == oid return oid + async def submit_cancel( + self, + symbol: str, + oid: str, + recv_window: int = 60000 + ) -> None: + symbol = symbol.upper() + + params = OrderedDict([ + ('symbol', symbol), + ('orderId', oid), + ('recvWindow', recv_window), + ('timestamp', binance_timestamp(pendulum.now())) + ]) + + await self._api( + 'order', + params=params, + signed=True, + action='delete' + ) + + async def get_listen_key(self) -> str: + return await self._api( + 'userDataStream', + params={}, + action='post' + )['listenKey'] + + async def keep_alive_key(self, listen_key: str) -> None: + await self._fapi( + 'userDataStream', + params={'listenKey': listen_key}, + action='put' + ) + + async def close_listen_key(self, listen_key: str) -> None: + await self._fapi( + 'userDataStream', + params={'listenKey': listen_key}, + action='delete' + ) + + @acm + async def manage_listen_key(self): + + async def periodic_keep_alive( + self, + listen_key: str, + timeout=60 * 29 # 29 minutes + ): + while True: + await trio.sleep(timeout) + await self.keep_alive_key(listen_key) + + key = await self.get_listen_key() + + async with trio.open_nursery() as n: + n.start_soon(periodic_keep_alive, key) + yield key + + await self.close_listen_key(key) + + @acm async def get_client() -> Client: client = Client() @@ -484,7 +580,7 @@ class AggTrade(Struct, frozen=True): p: float # Price q: float # Quantity f: int # First trade ID - l: int # Last trade ID + l: int # noqa Last trade ID T: int # Trade time m: bool # Is the buyer the market maker? M: bool # Ignore @@ -783,7 +879,8 @@ async def subscribe(ws: NoBsWs): async def handle_order_requests( - ems_order_stream: tractor.MsgStream + ems_order_stream: tractor.MsgStream, + symbol: str ) -> None: async with open_cached_client('binance') as client: async for request_msg in ems_order_stream: @@ -817,7 +914,8 @@ async def handle_order_requests( elif action == 'cancel': # msg = BrokerdCancel(**request_msg) - # await run_client_method + # + # await client.submit_cancel(symbol, msg.reqid) ... else: @@ -844,12 +942,18 @@ async def trades_dialogue( # accounts: set[str] = set() # await ctx.started((positions, {})) - # async with ( - # ctx.open_stream() as ems_stream, - # trio.open_nursery() as n - # ): - # n.start_soon(handle_order_requests, ems_stream) - # await trio.sleep_forever() + async with ( + ctx.open_stream() as ems_stream, + trio.open_nursery() as n, + open_cached_client('binance') as client, + # client.manage_listen_key() as listen_key, + ): + n.start_soon(handle_order_requests, ems_stream) + await trio.sleep_forever() + # async with open_autorecon_ws( + # f'wss://stream.binance.com:9443/ws/{listen_key}', + # ) as ws: + # ... @tractor.context From e03da40867dd8a38fe76e1b62d7ea0404cb6f7f8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 14:56:51 -0400 Subject: [PATCH 06/73] Add a config get/set API (from @guilledk) ? --- piker/config.py | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/piker/config.py b/piker/config.py index 120852617..a0d403d54 100644 --- a/piker/config.py +++ b/piker/config.py @@ -467,3 +467,51 @@ def load_accounts( accounts['paper'] = None return accounts + + +# XXX: Recursive getting & setting + +def get_value(_dict, _section): + subs = _section.split('.') + if len(subs) > 1: + return get_value( + _dict[subs[0]], + '.'.join(subs[1:]), + ) + + else: + return _dict[_section] + + +def set_value(_dict, _section, val): + subs = _section.split('.') + if len(subs) > 1: + if subs[0] not in _dict: + _dict[subs[0]] = {} + + return set_value( + _dict[subs[0]], + '.'.join(subs[1:]), + val + ) + + else: + _dict[_section] = val + + +def del_value(_dict, _section): + subs = _section.split('.') + if len(subs) > 1: + if subs[0] not in _dict: + return + + return del_value( + _dict[subs[0]], + '.'.join(subs[1:]) + ) + + else: + if _section not in _dict: + return + + del _dict[_section] From e85e031df706c52569a4d49ad962411f7cfc37f5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 14:57:21 -0400 Subject: [PATCH 07/73] Use new config get/set API in `brokercnf` cmd? --- piker/brokers/cli.py | 97 +++++++++++++++----------------------------- 1 file changed, 33 insertions(+), 64 deletions(-) diff --git a/piker/brokers/cli.py b/piker/brokers/cli.py index 77c69f8ea..937e936e1 100644 --- a/piker/brokers/cli.py +++ b/piker/brokers/cli.py @@ -195,7 +195,7 @@ async def bcheck_main(): @cli.command() @click.option('--keys', '-k', multiple=True, - help='Return results only for these keys') + help='Return results only for these keys') @click.argument('meth', nargs=1) @click.argument('kwargs', nargs=-1) @click.pass_obj @@ -487,66 +487,35 @@ async def main(func): click.echo(colorize_json(quotes)) -# @cli.command() -# @click.argument('section', required=True) -# @click.argument('value', required=False) -# @click.option('--delete', '-d', flag_value=True, help='Delete section') -# @click.pass_obj -# def brokercfg(config, section, value, delete): -# from .. import config -# conf, path = config.load() - -# # XXX: Recursive getting & setting - -# def get_value(_dict, _section): -# subs = _section.split('.') -# if len(subs) > 1: -# return get_value( -# _dict[subs[0]], -# '.'.join(subs[1:]), -# ) - -# else: -# return _dict[_section] - -# def set_value(_dict, _section, val): -# subs = _section.split('.') -# if len(subs) > 1: -# if subs[0] not in _dict: -# _dict[subs[0]] = {} - -# return set_value( -# _dict[subs[0]], -# '.'.join(subs[1:]), -# val -# ) - -# else: -# _dict[_section] = val - -# def del_value(_dict, _section): -# subs = _section.split('.') -# if len(subs) > 1: -# if subs[0] not in _dict: -# return - -# return del_value( -# _dict[subs[0]], -# '.'.join(subs[1:]) -# ) - -# else: -# if _section not in _dict: -# return - -# del _dict[_section] - -# if not delete: -# if value: -# set_value(conf, section, value) - -# click.echo(colorize_json(get_value(conf, section))) -# else: -# del_value(conf, section) - -# broker_conf.write(conf) +@cli.command() +@click.argument('section', required=False) +@click.argument('value', required=False) +@click.option('--delete', '-d', flag_value=True, help='Delete section') +@click.pass_obj +def brokercfg(config, section, value, delete): + """If invoked with no arguments, open an editor to edit broker configs file + or get / update an individual section. + """ + from .. import config + + if section: + conf, path = config.load() + + if not delete: + if value: + config.set_value(conf, section, value) + + click.echo( + colorize_json( + config.get_value(conf, section)) + ) + else: + config.del_value(conf, section) + + config.write(config=conf) + + else: + conf, path = config.load(raw=True) + config.write( + raw=click.edit(text=conf) + ) From ef544ba55aa64381f25881b0d4a77331c62fa7c9 Mon Sep 17 00:00:00 2001 From: Guillermo Rodriguez Date: Thu, 1 Jul 2021 01:01:32 +0000 Subject: [PATCH 08/73] Add order status tracking --- piker/brokers/binance.py | 75 +++++++++++++++++++++++++++++++++++----- 1 file changed, 67 insertions(+), 8 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 8e6cf1e8a..cc85307da 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -559,6 +559,7 @@ async def periodic_keep_alive( async with trio.open_nursery() as n: n.start_soon(periodic_keep_alive, key) yield key + n.cancel_scope.cancel() await self.close_listen_key(key) @@ -913,10 +914,9 @@ async def handle_order_requests( ) elif action == 'cancel': - # msg = BrokerdCancel(**request_msg) - # - # await client.submit_cancel(symbol, msg.reqid) - ... + msg = BrokerdCancel(**request_msg) + + await client.submit_cancel(msg.symbol, msg.reqid) else: log.error(f'Unknown order command: {request_msg}') @@ -950,10 +950,69 @@ async def trades_dialogue( ): n.start_soon(handle_order_requests, ems_stream) await trio.sleep_forever() - # async with open_autorecon_ws( - # f'wss://stream.binance.com:9443/ws/{listen_key}', - # ) as ws: - # ... + + async with open_autorecon_ws( + f'wss://stream.binance.com:9443/ws/{listen_key}', + ) as ws: + event = await ws.recv_msg() + + if event.get('e') == 'executionReport': + """ + https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update + """ + + oid = event.get('c') + side = event.get('S').lower() + status = event.get('X') + order_qty = float(event.get('q')) + filled_qty = float(event.get('z')) + cumm_transacted_qty = float(event.get('Z')) + price_avg = cum_transacted_qty / filled_qty + + broker_time = float(event.get('T')) + + commission_amount = float(event.get('n')) + commission_asset = event.get('N') + + if status == 'TRADE': + if order_qty == filled_qty: + msg = BrokerdFill( + reqid=oid, + time_ns=time.time_ns(), + action=side, + price=price_avg, + broker_details={ + 'name': 'binance', + 'commissions': { + 'amount': commission_amount, + 'asset': commission_asset + }, + 'broker_time': broker_time + }, + broker_time=broker_time + ) + + else: + if status == 'NEW': + status = 'submitted' + + elif status == 'CANCELED': + status = 'cancelled' + + msg = BrokerdStatus( + reqid=oid, + time_ns=time.time_ns(), + status=status, + filled=filled_qty, + remaining=order_qty - filled_qty, + broker_details={'name': 'binance'} + ) + + else: + # XXX: temporary, to catch unhandled msgs + breakpoint() + + await ems_stream.send(msg.dict()) @tractor.context From eaaf6e4cc1370819b2bc7c7f1617725d503c4a7c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 15:09:49 -0400 Subject: [PATCH 09/73] kraken: fix `trades2pps()` type sig --- piker/brokers/kraken/broker.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 8025f0d27..8fa321b0b 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -372,10 +372,7 @@ def trades2pps( write_storage: bool = True, -) -> tuple[ - list[BrokerdPosition], - list[Transaction], -]: +) -> list[BrokerdPosition]: if new_trans: updated = table.update_from_trans( new_trans, @@ -644,7 +641,7 @@ def has_pp( # stage a first reqid of `0` reqids2txids[0] = last_trade_dict['ordertxid'] - ppmsgs = trades2pps( + ppmsgs: list[BrokerdPosition] = trades2pps( table, acctid, ) From 7c00ca0254f293054b49955a17a8aab02a1a081b Mon Sep 17 00:00:00 2001 From: Guillermo Rodriguez Date: Sat, 19 Feb 2022 18:03:45 -0300 Subject: [PATCH 10/73] binance: add deposits/withdrawals API support From @guilledk, - Drop Decimal quantize for now - Minor tweaks to trades_dialogue proto --- piker/brokers/binance.py | 189 ++++++++++++++++++++++++++------------- 1 file changed, 128 insertions(+), 61 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index cc85307da..b2150e867 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -39,13 +39,15 @@ ) import hmac import time -import decimal import hashlib from pathlib import Path import trio from trio_typing import TaskStatus -import pendulum +from pendulum import ( + now, + from_timestamp, +) import asks from fuzzywuzzy import process as fuzzy import numpy as np @@ -79,10 +81,10 @@ from ..clearing._messages import ( BrokerdOrder, BrokerdOrderAck, - # BrokerdCancel, - #BrokerdStatus, - #BrokerdPosition, - #BrokerdFill, + BrokerdStatus, + BrokerdPosition, + BrokerdFill, + BrokerdCancel, # BrokerdError, ) @@ -108,6 +110,7 @@ def get_config() -> dict: _url = 'https://api.binance.com' +_sapi_url = 'https://api.binance.com' _fapi_url = 'https://testnet.binancefuture.com' @@ -238,18 +241,25 @@ def __init__(self) -> None: self._sesh = asks.Session(connections=4) self._sesh.base_location: str = _url - # testnet EP sesh + # futes testnet rest EPs self._fapi_sesh = asks.Session(connections=4) self._fapi_sesh.base_location = _fapi_url + # sync rest API + self._sapi_sesh = asks.Session(connections=4) + self._sapi_sesh.base_location = _sapi_url + conf: dict = get_config() self.api_key: str = conf.get('api_key', '') self.api_secret: str = conf.get('api_secret', '') + self.watchlist = conf.get('watchlist', []) + if self.api_key: api_key_header = {'X-MBX-APIKEY': self.api_key} self._sesh.headers.update(api_key_header) self._fapi_sesh.headers.update(api_key_header) + self._sapi_sesh.headers.update(api_key_header) def _get_signature(self, data: OrderedDict) -> str: @@ -310,6 +320,25 @@ async def _fapi( return resproc(resp, log) + async def _sapi( + self, + method: str, + params: Union[dict, OrderedDict], + signed: bool = False, + action: str = 'get' + ) -> dict[str, Any]: + + if signed: + params['signature'] = self._get_signature(params) + + resp = await getattr(self._sapi_sesh, action)( + path=f'/sapi/v1/{method}', + params=params, + timeout=float('inf') + ) + + return resproc(resp, log) + async def exch_info( self, sym: str | None = None, @@ -392,7 +421,7 @@ async def bars( ) -> dict: if end_dt is None: - end_dt = pendulum.now('UTC').add(minutes=1) + end_dt = now('UTC').add(minutes=1) if start_dt is None: start_dt = end_dt.start_of( @@ -444,6 +473,58 @@ async def bars( ) if as_np else bars return array + async def get_positions( + self, + recv_window: int = 60000 + ) -> tuple: + positions = {} + volumes = {} + + for sym in self.watchlist: + log.info(f'doing {sym}...') + params = OrderedDict([ + ('symbol', sym), + ('recvWindow', recv_window), + ('timestamp', binance_timestamp(now())) + ]) + resp = await self._api( + 'allOrders', + params=params, + signed=True + ) + log.info(f'done. len {len(resp)}') + await trio.sleep(3) + + return positions, volumes + + async def get_deposits( + self, + recv_window: int = 60000 + ) -> list: + + params = OrderedDict([ + ('recvWindow', recv_window), + ('timestamp', binance_timestamp(now())) + ]) + return await self._sapi( + 'capital/deposit/hisrec', + params=params, + signed=True) + + async def get_withdrawls( + self, + recv_window: int = 60000 + ) -> list: + + params = OrderedDict([ + ('recvWindow', recv_window), + ('timestamp', binance_timestamp(now())) + ]) + return await self._sapi( + 'capital/withdraw/history', + params=params, + signed=True) + async def submit_limit( self, symbol: str, @@ -461,18 +542,8 @@ async def submit_limit( await self.cache_symbols() - asset_precision = self._pairs[symbol]['baseAssetPrecision'] - quote_precision = self._pairs[symbol]['quoteAssetPrecision'] - - quantity = Decimal(quantity).quantize( - Decimal(1 ** -asset_precision), - rounding=decimal.ROUND_HALF_EVEN - ) - - price = Decimal(price).quantize( - Decimal(1 ** -quote_precision), - rounding=decimal.ROUND_HALF_EVEN - ) + # asset_precision = self._pairs[symbol]['baseAssetPrecision'] + # quote_precision = self._pairs[symbol]['quoteAssetPrecision'] params = OrderedDict([ ('symbol', symbol), @@ -483,21 +554,21 @@ async def submit_limit( ('price', price), ('recvWindow', recv_window), ('newOrderRespType', 'ACK'), - ('timestamp', binance_timestamp(pendulum.now())) + ('timestamp', binance_timestamp(now())) ]) if oid: params['newClientOrderId'] = oid resp = await self._api( - 'order/test', # TODO: switch to real `order` endpoint + 'order', params=params, signed=True, action='post' ) - - assert resp['orderId'] == oid - return oid + log.info(resp) + # return resp['orderId'] + return resp['orderId'] async def submit_cancel( self, @@ -511,10 +582,10 @@ async def submit_cancel( ('symbol', symbol), ('orderId', oid), ('recvWindow', recv_window), - ('timestamp', binance_timestamp(pendulum.now())) + ('timestamp', binance_timestamp(now())) ]) - await self._api( + return await self._api( 'order', params=params, signed=True, @@ -522,11 +593,11 @@ async def submit_cancel( ) async def get_listen_key(self) -> str: - return await self._api( + return (await self._api( 'userDataStream', params={}, action='post' - )['listenKey'] + ))['listenKey'] async def keep_alive_key(self, listen_key: str) -> None: await self._fapi( @@ -557,7 +628,7 @@ async def periodic_keep_alive( key = await self.get_listen_key() async with trio.open_nursery() as n: - n.start_soon(periodic_keep_alive, key) + n.start_soon(periodic_keep_alive, self, key) yield key n.cancel_scope.cancel() @@ -733,8 +804,8 @@ async def get_ohlc( if (inow - times[-1]) > 60: await tractor.breakpoint() - start_dt = pendulum.from_timestamp(times[0]) - end_dt = pendulum.from_timestamp(times[-1]) + start_dt = from_timestamp(times[0]) + end_dt = from_timestamp(times[-1]) return array, start_dt, end_dt @@ -873,15 +944,15 @@ async def subscribe(ws: NoBsWs): # hz = 1/period if period else float('inf') # if hz > 60: # log.info(f'Binance quotez : {hz}') - - topic = msg['symbol'].lower() - await send_chan.send({topic: msg}) + + if typ == 'l1': + topic = msg['symbol'].lower() + await send_chan.send({topic: msg}) # last = time.time() async def handle_order_requests( - ems_order_stream: tractor.MsgStream, - symbol: str + ems_order_stream: tractor.MsgStream ) -> None: async with open_cached_client('binance') as client: async for request_msg in ems_order_stream: @@ -938,43 +1009,39 @@ async def trades_dialogue( # ledger: TransactionLedger # TODO: load pps and accounts using accounting apis! - # positions: dict = {} - # accounts: set[str] = set() - # await ctx.started((positions, {})) + positions: list[BrokerdPosition] = [] + accounts: list[str] = ['binance.default'] + await ctx.started((positions, accounts)) async with ( ctx.open_stream() as ems_stream, trio.open_nursery() as n, open_cached_client('binance') as client, - # client.manage_listen_key() as listen_key, + client.manage_listen_key() as listen_key, ): n.start_soon(handle_order_requests, ems_stream) - await trio.sleep_forever() - + # await trio.sleep_forever() + async with open_autorecon_ws( f'wss://stream.binance.com:9443/ws/{listen_key}', ) as ws: event = await ws.recv_msg() + # https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update if event.get('e') == 'executionReport': - """ - https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update - """ - - oid = event.get('c') - side = event.get('S').lower() - status = event.get('X') - order_qty = float(event.get('q')) - filled_qty = float(event.get('z')) - cumm_transacted_qty = float(event.get('Z')) - price_avg = cum_transacted_qty / filled_qty - - broker_time = float(event.get('T')) - - commission_amount = float(event.get('n')) - commission_asset = event.get('N') - if status == 'TRADE': + oid: str = event.get('c') + side: str = event.get('S').lower() + status: str = event.get('X') + order_qty: float = float(event.get('q')) + filled_qty: float = float(event.get('z')) + cum_transacted_qty: float = float(event.get('Z')) + price_avg: float = cum_transacted_qty / filled_qty + broker_time: float = float(event.get('T')) + commission_amount: float = float(event.get('n')) + commission_asset: float = event.get('N') + + if status == 'TRADE': if order_qty == filled_qty: msg = BrokerdFill( reqid=oid, @@ -993,7 +1060,7 @@ async def trades_dialogue( ) else: - if status == 'NEW': + if status == 'NEW': status = 'submitted' elif status == 'CANCELED': From 06026ec661b0301b3d302cade9ec71e210f1cc3b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 15:55:47 -0400 Subject: [PATCH 11/73] Add `binance` section to broker conf template --- config/brokers.toml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/config/brokers.toml b/config/brokers.toml index 7205d82ce..c9384461e 100644 --- a/config/brokers.toml +++ b/config/brokers.toml @@ -7,6 +7,11 @@ token_type = 'Bearer' expires_at = 1616095326.355846 +[binance] +api_key = '' +api_secret = '' + + [deribit] key_id = '' key_secret = '' From 2dc8ee2b4eac3d8ea6a37384a1cc5fa16b73e7d5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 16:05:31 -0400 Subject: [PATCH 12/73] Don't bother casting `AggTrade` values for now, just floatify the price/quantity --- piker/brokers/binance.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index b2150e867..3bbea3285 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -60,11 +60,11 @@ MktPair, digits_to_dec, ) -from . import ( +from .._cacheables import open_cached_client +from ._util import ( resproc, SymbolNotFound, DataUnavailable, - open_cached_client, ) from ._util import ( get_logger, @@ -288,6 +288,7 @@ async def _api( params: dict | OrderedDict, signed: bool = False, action: str = 'get' + ) -> dict[str, Any]: if signed: @@ -296,7 +297,7 @@ async def _api( resp = await getattr(self._sesh, action)( path=f'/api/v3/{method}', params=params, - timeout=float('inf') + timeout=float('inf'), ) return resproc(resp, log) @@ -476,6 +477,7 @@ async def bars( async def get_positions( self, recv_window: int = 60000 + ) -> tuple: positions = {} volumes = {} @@ -509,7 +511,8 @@ async def get_deposits( return await self._sapi( 'capital/deposit/hisrec', params=params, - signed=True) + signed=True, + ) async def get_withdrawls( self, @@ -523,7 +526,8 @@ async def get_withdrawls( return await self._sapi( 'capital/withdraw/history', params=params, - signed=True) + signed=True, + ) async def submit_limit( self, @@ -732,7 +736,7 @@ async def stream_messages( # ``msgspec.Struct`` does not runtime-validate until you # decode/encode, see: # https://jcristharif.com/msgspec/structs.html#type-validation - msg = AggTrade(**msg) + msg = AggTrade(**msg) # TODO: should we .copy() ? yield 'trade', { 'symbol': msg.s, 'last': msg.p, From e035af2f42516fd238d54cfed9ccfbbd956c8331 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 16:14:42 -0400 Subject: [PATCH 13/73] Don't filter out clearing ticks XD --- piker/brokers/binance.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 3bbea3285..9eb6732fe 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -737,9 +737,9 @@ async def stream_messages( # decode/encode, see: # https://jcristharif.com/msgspec/structs.html#type-validation msg = AggTrade(**msg) # TODO: should we .copy() ? - yield 'trade', { + piker_quote: dict = { 'symbol': msg.s, - 'last': msg.p, + 'last': float(msg.p), 'brokerd_ts': time.time(), 'ticks': [{ 'type': 'trade', @@ -748,6 +748,7 @@ async def stream_messages( 'broker_ts': msg.T, }], } + yield 'trade', piker_quote def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: @@ -948,10 +949,8 @@ async def subscribe(ws: NoBsWs): # hz = 1/period if period else float('inf') # if hz > 60: # log.info(f'Binance quotez : {hz}') - - if typ == 'l1': - topic = msg['symbol'].lower() - await send_chan.send({topic: msg}) + topic = msg['symbol'].lower() + await send_chan.send({topic: msg}) # last = time.time() From 26a8638836f377aa718110f2f8313a6a2d259138 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 16:29:05 -0400 Subject: [PATCH 14/73] binance: convert to subpkg module --- piker/brokers/{binance.py => binance/__init__.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename piker/brokers/{binance.py => binance/__init__.py} (100%) diff --git a/piker/brokers/binance.py b/piker/brokers/binance/__init__.py similarity index 100% rename from piker/brokers/binance.py rename to piker/brokers/binance/__init__.py From ed0c2555fc1ee4469745a207dd553632cfa0bfd8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 16:35:50 -0400 Subject: [PATCH 15/73] binance: make pkgmod expose endpoints from coming submods --- piker/brokers/binance/__init__.py | 1123 +---------------------------- piker/brokers/binance/api.py | 1111 ++++++++++++++++++++++++++++ 2 files changed, 1144 insertions(+), 1090 deletions(-) create mode 100644 piker/brokers/binance/api.py diff --git a/piker/brokers/binance/__init__.py b/piker/brokers/binance/__init__.py index 9eb6732fe..c840f0710 100644 --- a/piker/brokers/binance/__init__.py +++ b/piker/brokers/binance/__init__.py @@ -18,1095 +18,38 @@ # along with this program. If not, see . """ -Binance backend +binancial secs on the floor, in the office, behind the dumpster. """ -from __future__ import annotations -from collections import OrderedDict -from contextlib import ( - asynccontextmanager as acm, - aclosing, -) -from datetime import datetime -from decimal import Decimal -import itertools -from typing import ( - Any, - Union, - AsyncIterator, - AsyncGenerator, - Callable, -) -import hmac -import time -import hashlib -from pathlib import Path - -import trio -from trio_typing import TaskStatus -from pendulum import ( - now, - from_timestamp, -) -import asks -from fuzzywuzzy import process as fuzzy -import numpy as np -import tractor - -from .. import config -from .._cacheables import async_lifo_cache -from ..accounting._mktinfo import ( - Asset, - MktPair, - digits_to_dec, -) -from .._cacheables import open_cached_client -from ._util import ( - resproc, - SymbolNotFound, - DataUnavailable, -) -from ._util import ( - get_logger, - get_console_log, -) -from piker.data.types import Struct -from piker.data.validate import FeedInit -from piker.data import def_iohlcv_fields -from piker.data._web_bs import ( - open_autorecon_ws, - NoBsWs, -) - -from ..clearing._messages import ( - BrokerdOrder, - BrokerdOrderAck, - BrokerdStatus, - BrokerdPosition, - BrokerdFill, - BrokerdCancel, - # BrokerdError, -) - -log = get_logger('piker.brokers.binance') - - -def get_config() -> dict: - - conf: dict - path: Path - conf, path = config.load() - - section = conf.get('binance') - - if not section: - log.warning(f'No config section found for binance in {path}') - return {} - - return section - - -log = get_logger(__name__) - - -_url = 'https://api.binance.com' -_sapi_url = 'https://api.binance.com' -_fapi_url = 'https://testnet.binancefuture.com' - - -# Broker specific ohlc schema (rest) -# XXX TODO? some additional fields are defined in the docs: -# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data - -# _ohlc_dtype = [ - # ('close_time', int), - # ('quote_vol', float), - # ('num_trades', int), - # ('buy_base_vol', float), - # ('buy_quote_vol', float), - # ('ignore', float), -# ] - -# UI components allow this to be declared such that additional -# (historical) fields can be exposed. -# ohlc_dtype = np.dtype(_ohlc_dtype) - -_show_wap_in_history = False - - -# https://binance-docs.github.io/apidocs/spot/en/#exchange-information - -# TODO: make this frozen again by pre-processing the -# filters list to a dict at init time? -class Pair(Struct, frozen=True): - symbol: str - status: str - - baseAsset: str - baseAssetPrecision: int - cancelReplaceAllowed: bool - allowTrailingStop: bool - quoteAsset: str - quotePrecision: int - quoteAssetPrecision: int - - baseCommissionPrecision: int - quoteCommissionPrecision: int - - orderTypes: list[str] - - icebergAllowed: bool - ocoAllowed: bool - quoteOrderQtyMarketAllowed: bool - isSpotTradingAllowed: bool - isMarginTradingAllowed: bool - - defaultSelfTradePreventionMode: str - allowedSelfTradePreventionModes: list[str] - - filters: dict[ - str, - Union[str, int, float] - ] - permissions: list[str] - - @property - def price_tick(self) -> Decimal: - # XXX: lul, after manually inspecting the response format we - # just directly pick out the info we need - step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0') - return Decimal(step_size) - - @property - def size_tick(self) -> Decimal: - step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0') - return Decimal(step_size) - - -class OHLC(Struct): - ''' - Description of the flattened OHLC quote format. - - For schema details see: - https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams - - ''' - time: int - - open: float - high: float - low: float - close: float - volume: float - - close_time: int - - quote_vol: float - num_trades: int - buy_base_vol: float - buy_quote_vol: float - ignore: int - - # null the place holder for `bar_wap` until we - # figure out what to extract for this. - bar_wap: float = 0.0 - - -class L1(Struct): - # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams - - update_id: int - sym: str - - bid: float - bsize: float - ask: float - asize: float - - -# convert datetime obj timestamp to unixtime in milliseconds -def binance_timestamp( - when: datetime -) -> int: - return int((when.timestamp() * 1000) + (when.microsecond / 1000)) - - -class Client: - - def __init__(self) -> None: - - self._pairs: dict[str, Pair] = {} # mkt info table - - # live EP sesh - self._sesh = asks.Session(connections=4) - self._sesh.base_location: str = _url - - # futes testnet rest EPs - self._fapi_sesh = asks.Session(connections=4) - self._fapi_sesh.base_location = _fapi_url - - # sync rest API - self._sapi_sesh = asks.Session(connections=4) - self._sapi_sesh.base_location = _sapi_url - - conf: dict = get_config() - self.api_key: str = conf.get('api_key', '') - self.api_secret: str = conf.get('api_secret', '') - - self.watchlist = conf.get('watchlist', []) - - if self.api_key: - api_key_header = {'X-MBX-APIKEY': self.api_key} - self._sesh.headers.update(api_key_header) - self._fapi_sesh.headers.update(api_key_header) - self._sapi_sesh.headers.update(api_key_header) - - def _get_signature(self, data: OrderedDict) -> str: - - # XXX: Info on security and authentification - # https://binance-docs.github.io/apidocs/#endpoint-security-type - - if not self.api_secret: - raise config.NoSignature( - "Can't generate a signature without setting up credentials" - ) - - query_str = '&'.join([ - f'{_key}={value}' - for _key, value in data.items()]) - log.info(query_str) - msg_auth = hmac.new( - self.api_secret.encode('utf-8'), - query_str.encode('utf-8'), - hashlib.sha256 - ) - return msg_auth.hexdigest() - - async def _api( - self, - method: str, - params: dict | OrderedDict, - signed: bool = False, - action: str = 'get' - - ) -> dict[str, Any]: - - if signed: - params['signature'] = self._get_signature(params) - - resp = await getattr(self._sesh, action)( - path=f'/api/v3/{method}', - params=params, - timeout=float('inf'), - ) - - return resproc(resp, log) - - async def _fapi( - self, - method: str, - params: Union[dict, OrderedDict], - signed: bool = False, - action: str = 'get' - ) -> dict[str, Any]: - - if signed: - params['signature'] = self._get_signature(params) - - resp = await getattr(self._fapi_sesh, action)( - path=f'/fapi/v1/{method}', - params=params, - timeout=float('inf') - ) - - return resproc(resp, log) - - async def _sapi( - self, - method: str, - params: Union[dict, OrderedDict], - signed: bool = False, - action: str = 'get' - ) -> dict[str, Any]: - - if signed: - params['signature'] = self._get_signature(params) - - resp = await getattr(self._sapi_sesh, action)( - path=f'/sapi/v1/{method}', - params=params, - timeout=float('inf') - ) - - return resproc(resp, log) - - async def exch_info( - self, - sym: str | None = None, - - ) -> dict[str, Pair] | Pair: - ''' - Fresh exchange-pairs info query for symbol ``sym: str``: - https://binance-docs.github.io/apidocs/spot/en/#exchange-information - - ''' - cached_pair = self._pairs.get(sym) - if cached_pair: - return cached_pair - - # retrieve all symbols by default - params = {} - if sym is not None: - sym = sym.lower() - params = {'symbol': sym} - - resp = await self._api('exchangeInfo', params=params) - entries = resp['symbols'] - if not entries: - raise SymbolNotFound(f'{sym} not found:\n{resp}') - - # pre-process .filters field into a table - pairs = {} - for item in entries: - symbol = item['symbol'] - filters = {} - filters_ls: list = item.pop('filters') - for entry in filters_ls: - ftype = entry['filterType'] - filters[ftype] = entry - - pairs[symbol] = Pair( - filters=filters, - **item, - ) - - # pairs = { - # item['symbol']: Pair(**item) for item in entries - # } - self._pairs.update(pairs) - - if sym is not None: - return pairs[sym] - else: - return self._pairs - - symbol_info = exch_info - - async def search_symbols( - self, - pattern: str, - limit: int = None, - ) -> dict[str, Any]: - if self._pairs is not None: - data = self._pairs - else: - data = await self.exch_info() - - matches = fuzzy.extractBests( - pattern, - data, - score_cutoff=50, - ) - # repack in dict form - return {item[0]['symbol']: item[0] - for item in matches} - - async def bars( - self, - symbol: str, - start_dt: datetime | None = None, - end_dt: datetime | None = None, - limit: int = 1000, # <- max allowed per query - as_np: bool = True, - - ) -> dict: - - if end_dt is None: - end_dt = now('UTC').add(minutes=1) - - if start_dt is None: - start_dt = end_dt.start_of( - 'minute').subtract(minutes=limit) - - start_time = binance_timestamp(start_dt) - end_time = binance_timestamp(end_dt) - - # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data - bars = await self._api( - 'klines', - params={ - 'symbol': symbol.upper(), - 'interval': '1m', - 'startTime': start_time, - 'endTime': end_time, - 'limit': limit - } - ) - - # TODO: pack this bars scheme into a ``pydantic`` validator type: - # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data - - # TODO: we should port this to ``pydantic`` to avoid doing - # manual validation ourselves.. - new_bars = [] - for i, bar in enumerate(bars): - - bar = OHLC(*bar) - bar.typecast() - - row = [] - for j, (name, ftype) in enumerate(def_iohlcv_fields[1:]): - - # TODO: maybe we should go nanoseconds on all - # history time stamps? - if name == 'time': - # convert to epoch seconds: float - row.append(bar.time / 1000.0) - - else: - row.append(getattr(bar, name)) - - new_bars.append((i,) + tuple(row)) - - array = np.array( - new_bars, - dtype=def_iohlcv_fields, - ) if as_np else bars - return array - - async def get_positions( - self, - recv_window: int = 60000 - - ) -> tuple: - positions = {} - volumes = {} - - for sym in self.watchlist: - log.info(f'doing {sym}...') - params = OrderedDict([ - ('symbol', sym), - ('recvWindow', recv_window), - ('timestamp', binance_timestamp(now())) - ]) - resp = await self._api( - 'allOrders', - params=params, - signed=True - ) - log.info(f'done. len {len(resp)}') - await trio.sleep(3) - - return positions, volumes - - async def get_deposits( - self, - recv_window: int = 60000 - ) -> list: - - params = OrderedDict([ - ('recvWindow', recv_window), - ('timestamp', binance_timestamp(now())) - ]) - return await self._sapi( - 'capital/deposit/hisrec', - params=params, - signed=True, - ) - - async def get_withdrawls( - self, - recv_window: int = 60000 - ) -> list: - - params = OrderedDict([ - ('recvWindow', recv_window), - ('timestamp', binance_timestamp(now())) - ]) - return await self._sapi( - 'capital/withdraw/history', - params=params, - signed=True, - ) - - async def submit_limit( - self, - symbol: str, - side: str, # SELL / BUY - quantity: float, - price: float, - # time_in_force: str = 'GTC', - oid: int | None = None, - # iceberg_quantity: float | None = None, - # order_resp_type: str | None = None, - recv_window: int = 60000 - - ) -> int: - symbol = symbol.upper() - - await self.cache_symbols() - - # asset_precision = self._pairs[symbol]['baseAssetPrecision'] - # quote_precision = self._pairs[symbol]['quoteAssetPrecision'] - - params = OrderedDict([ - ('symbol', symbol), - ('side', side.upper()), - ('type', 'LIMIT'), - ('timeInForce', 'GTC'), - ('quantity', quantity), - ('price', price), - ('recvWindow', recv_window), - ('newOrderRespType', 'ACK'), - ('timestamp', binance_timestamp(now())) - ]) - - if oid: - params['newClientOrderId'] = oid - - resp = await self._api( - 'order', - params=params, - signed=True, - action='post' - ) - log.info(resp) - # return resp['orderId'] - return resp['orderId'] - - async def submit_cancel( - self, - symbol: str, - oid: str, - recv_window: int = 60000 - ) -> None: - symbol = symbol.upper() - - params = OrderedDict([ - ('symbol', symbol), - ('orderId', oid), - ('recvWindow', recv_window), - ('timestamp', binance_timestamp(now())) - ]) - - return await self._api( - 'order', - params=params, - signed=True, - action='delete' - ) - - async def get_listen_key(self) -> str: - return (await self._api( - 'userDataStream', - params={}, - action='post' - ))['listenKey'] - - async def keep_alive_key(self, listen_key: str) -> None: - await self._fapi( - 'userDataStream', - params={'listenKey': listen_key}, - action='put' - ) - - async def close_listen_key(self, listen_key: str) -> None: - await self._fapi( - 'userDataStream', - params={'listenKey': listen_key}, - action='delete' - ) - - @acm - async def manage_listen_key(self): - - async def periodic_keep_alive( - self, - listen_key: str, - timeout=60 * 29 # 29 minutes - ): - while True: - await trio.sleep(timeout) - await self.keep_alive_key(listen_key) - - key = await self.get_listen_key() - - async with trio.open_nursery() as n: - n.start_soon(periodic_keep_alive, self, key) - yield key - n.cancel_scope.cancel() - - await self.close_listen_key(key) - - -@acm -async def get_client() -> Client: - client = Client() - log.info('Caching exchange infos..') - await client.exch_info() - yield client - - -# validation type -class AggTrade(Struct, frozen=True): - e: str # Event type - E: int # Event time - s: str # Symbol - a: int # Aggregate trade ID - p: float # Price - q: float # Quantity - f: int # First trade ID - l: int # noqa Last trade ID - T: int # Trade time - m: bool # Is the buyer the market maker? - M: bool # Ignore - - -async def stream_messages( - ws: NoBsWs, -) -> AsyncGenerator[NoBsWs, dict]: - - # TODO: match syntax here! - msg: dict[str, Any] - async for msg in ws: - match msg: - # for l1 streams binance doesn't add an event type field so - # identify those messages by matching keys - # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams - case { - # NOTE: this is never an old value it seems, so - # they are always sending real L1 spread updates. - 'u': upid, # update id - 's': sym, - 'b': bid, - 'B': bsize, - 'a': ask, - 'A': asize, - }: - # TODO: it would be super nice to have a `L1` piker type - # which "renders" incremental tick updates from a packed - # msg-struct: - # - backend msgs after packed into the type such that we - # can reduce IPC usage but without each backend having - # to do that incremental update logic manually B) - # - would it maybe be more efficient to use this instead? - # https://binance-docs.github.io/apidocs/spot/en/#diff-depth-stream - l1 = L1( - update_id=upid, - sym=sym, - bid=bid, - bsize=bsize, - ask=ask, - asize=asize, - ) - l1.typecast() - - # repack into piker's tick-quote format - yield 'l1', { - 'symbol': l1.sym, - 'ticks': [ - { - 'type': 'bid', - 'price': l1.bid, - 'size': l1.bsize, - }, - { - 'type': 'bsize', - 'price': l1.bid, - 'size': l1.bsize, - }, - { - 'type': 'ask', - 'price': l1.ask, - 'size': l1.asize, - }, - { - 'type': 'asize', - 'price': l1.ask, - 'size': l1.asize, - } - ] - } - - # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams - case { - 'e': 'aggTrade', - }: - # NOTE: this is purely for a definition, - # ``msgspec.Struct`` does not runtime-validate until you - # decode/encode, see: - # https://jcristharif.com/msgspec/structs.html#type-validation - msg = AggTrade(**msg) # TODO: should we .copy() ? - piker_quote: dict = { - 'symbol': msg.s, - 'last': float(msg.p), - 'brokerd_ts': time.time(), - 'ticks': [{ - 'type': 'trade', - 'price': float(msg.p), - 'size': float(msg.q), - 'broker_ts': msg.T, - }], - } - yield 'trade', piker_quote - - -def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: - ''' - Create a request subscription packet dict. - - - spot: - https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams - - - futes: - https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams - - ''' - return { - 'method': 'SUBSCRIBE', - 'params': [ - f'{pair.lower()}@{sub_name}' - for pair in pairs - ], - 'id': uid - } - - -@acm -async def open_history_client( - mkt: MktPair, - -) -> tuple[Callable, int]: - - symbol: str = mkt.bs_fqme - - # TODO implement history getter for the new storage layer. - async with open_cached_client('binance') as client: - - async def get_ohlc( - timeframe: float, - end_dt: datetime | None = None, - start_dt: datetime | None = None, - - ) -> tuple[ - np.ndarray, - datetime, # start - datetime, # end - ]: - if timeframe != 60: - raise DataUnavailable('Only 1m bars are supported') - - array = await client.bars( - symbol, - start_dt=start_dt, - end_dt=end_dt, - ) - times = array['time'] - if ( - end_dt is None - ): - inow = round(time.time()) - if (inow - times[-1]) > 60: - await tractor.breakpoint() - - start_dt = from_timestamp(times[0]) - end_dt = from_timestamp(times[-1]) - - return array, start_dt, end_dt - - yield get_ohlc, {'erlangs': 3, 'rate': 3} - - -@async_lifo_cache() -async def get_mkt_info( - fqme: str, - -) -> tuple[MktPair, Pair]: - - async with open_cached_client('binance') as client: - - pair: Pair = await client.exch_info(fqme.upper()) - mkt = MktPair( - dst=Asset( - name=pair.baseAsset, - atype='crypto', - tx_tick=digits_to_dec(pair.baseAssetPrecision), - ), - src=Asset( - name=pair.quoteAsset, - atype='crypto', - tx_tick=digits_to_dec(pair.quoteAssetPrecision), - ), - price_tick=pair.price_tick, - size_tick=pair.size_tick, - bs_mktid=pair.symbol, - broker='binance', - ) - both = mkt, pair - return both - - -async def stream_quotes( - - send_chan: trio.abc.SendChannel, - symbols: list[str], - feed_is_live: trio.Event, - loglevel: str = None, - - # startup sync - task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, - -) -> None: - # XXX: required to propagate ``tractor`` loglevel to piker logging - get_console_log(loglevel or tractor.current_actor().loglevel) - - async with ( - send_chan as send_chan, - ): - init_msgs: list[FeedInit] = [] - for sym in symbols: - mkt, pair = await get_mkt_info(sym) - - # build out init msgs according to latest spec - init_msgs.append( - FeedInit(mkt_info=mkt) - ) - - iter_subids = itertools.count() - - @acm - async def subscribe(ws: NoBsWs): - # setup subs - - subid: int = next(iter_subids) - - # trade data (aka L1) - # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker - l1_sub = make_sub(symbols, 'bookTicker', subid) - await ws.send_msg(l1_sub) - - # aggregate (each order clear by taker **not** by maker) - # trades data: - # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams - agg_trades_sub = make_sub(symbols, 'aggTrade', subid) - await ws.send_msg(agg_trades_sub) - - # might get ack from ws server, or maybe some - # other msg still in transit.. - res = await ws.recv_msg() - subid: str | None = res.get('id') - if subid: - assert res['id'] == subid - - yield - - subs = [] - for sym in symbols: - subs.append("{sym}@aggTrade") - subs.append("{sym}@bookTicker") - - # unsub from all pairs on teardown - if ws.connected(): - await ws.send_msg({ - "method": "UNSUBSCRIBE", - "params": subs, - "id": subid, - }) - - # XXX: do we need to ack the unsub? - # await ws.recv_msg() - - async with ( - open_autorecon_ws( - # XXX: see api docs which show diff addr? - # https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information - # 'wss://ws-api.binance.com:443/ws-api/v3', - 'wss://stream.binance.com/ws', - fixture=subscribe, - ) as ws, - - # avoid stream-gen closure from breaking trio.. - aclosing(stream_messages(ws)) as msg_gen, - ): - typ, quote = await anext(msg_gen) - - # pull a first quote and deliver - while typ != 'trade': - typ, quote = await anext(msg_gen) - - task_status.started((init_msgs, quote)) - - # signal to caller feed is ready for consumption - feed_is_live.set() - - # import time - # last = time.time() - - # start streaming - async for typ, msg in msg_gen: - - # period = time.time() - last - # hz = 1/period if period else float('inf') - # if hz > 60: - # log.info(f'Binance quotez : {hz}') - topic = msg['symbol'].lower() - await send_chan.send({topic: msg}) - # last = time.time() - - -async def handle_order_requests( - ems_order_stream: tractor.MsgStream -) -> None: - async with open_cached_client('binance') as client: - async for request_msg in ems_order_stream: - log.info(f'Received order request {request_msg}') - - action = request_msg['action'] - - if action in {'buy', 'sell'}: - # validate - order = BrokerdOrder(**request_msg) - - # call our client api to submit the order - reqid = await client.submit_limit( - order.symbol, - order.action, - order.size, - order.price, - oid=order.oid - ) - - # deliver ack that order has been submitted to broker routing - await ems_order_stream.send( - BrokerdOrderAck( - # ems order request id - oid=order.oid, - # broker specific request id - reqid=reqid, - time_ns=time.time_ns(), - ).dict() - ) - - elif action == 'cancel': - msg = BrokerdCancel(**request_msg) - - await client.submit_cancel(msg.symbol, msg.reqid) - - else: - log.error(f'Unknown order command: {request_msg}') - - -@tractor.context -async def trades_dialogue( - ctx: tractor.Context, - loglevel: str = None - -) -> AsyncIterator[dict[str, Any]]: - - async with open_cached_client('binance') as client: - if not client.api_key: - await ctx.started('paper') - return - - # table: PpTable - # ledger: TransactionLedger - - # TODO: load pps and accounts using accounting apis! - positions: list[BrokerdPosition] = [] - accounts: list[str] = ['binance.default'] - await ctx.started((positions, accounts)) - - async with ( - ctx.open_stream() as ems_stream, - trio.open_nursery() as n, - open_cached_client('binance') as client, - client.manage_listen_key() as listen_key, - ): - n.start_soon(handle_order_requests, ems_stream) - # await trio.sleep_forever() - - async with open_autorecon_ws( - f'wss://stream.binance.com:9443/ws/{listen_key}', - ) as ws: - event = await ws.recv_msg() - - # https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update - if event.get('e') == 'executionReport': - - oid: str = event.get('c') - side: str = event.get('S').lower() - status: str = event.get('X') - order_qty: float = float(event.get('q')) - filled_qty: float = float(event.get('z')) - cum_transacted_qty: float = float(event.get('Z')) - price_avg: float = cum_transacted_qty / filled_qty - broker_time: float = float(event.get('T')) - commission_amount: float = float(event.get('n')) - commission_asset: float = event.get('N') - - if status == 'TRADE': - if order_qty == filled_qty: - msg = BrokerdFill( - reqid=oid, - time_ns=time.time_ns(), - action=side, - price=price_avg, - broker_details={ - 'name': 'binance', - 'commissions': { - 'amount': commission_amount, - 'asset': commission_asset - }, - 'broker_time': broker_time - }, - broker_time=broker_time - ) - - else: - if status == 'NEW': - status = 'submitted' - - elif status == 'CANCELED': - status = 'cancelled' - - msg = BrokerdStatus( - reqid=oid, - time_ns=time.time_ns(), - status=status, - filled=filled_qty, - remaining=order_qty - filled_qty, - broker_details={'name': 'binance'} - ) - - else: - # XXX: temporary, to catch unhandled msgs - breakpoint() - - await ems_stream.send(msg.dict()) - - -@tractor.context -async def open_symbol_search( - ctx: tractor.Context, -) -> Client: - async with open_cached_client('binance') as client: - - # load all symbols locally for fast search - cache = await client.exch_info() - await ctx.started() - - async with ctx.open_stream() as stream: - - async for pattern in stream: - # results = await client.exch_info(sym=pattern.upper()) - - matches = fuzzy.extractBests( - pattern, - cache, - score_cutoff=50, - ) - # repack in dict form - await stream.send({ - item[0].symbol: item[0] - for item in matches - }) +from .api import ( + get_client, +# ) +# from .feed import ( + get_mkt_info, + open_history_client, + open_symbol_search, + stream_quotes, +# ) +# from .broker import ( + trades_dialogue, + # norm_trade_records, +) + + +__all__ = [ + 'get_client', + 'get_mkt_info', + 'trades_dialogue', + 'open_history_client', + 'open_symbol_search', + 'stream_quotes', + # 'norm_trade_records', +] + + +# tractor RPC enable arg +__enable_modules__: list[str] = [ + 'api', + # 'feed', + # 'broker', +] diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py new file mode 100644 index 000000000..7b847bf83 --- /dev/null +++ b/piker/brokers/binance/api.py @@ -0,0 +1,1111 @@ +# piker: trading gear for hackers +# Copyright (C) +# Guillermo Rodriguez (aka ze jefe) +# Tyler Goodlet +# (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +""" +Binance clients for http and ws APIs. + +""" +from __future__ import annotations +from collections import OrderedDict +from contextlib import ( + asynccontextmanager as acm, + aclosing, +) +from datetime import datetime +from decimal import Decimal +import itertools +from typing import ( + Any, + Union, + AsyncIterator, + AsyncGenerator, + Callable, +) +import hmac +import time +import hashlib +from pathlib import Path + +import trio +from trio_typing import TaskStatus +from pendulum import ( + now, + from_timestamp, +) +import asks +from fuzzywuzzy import process as fuzzy +import numpy as np +import tractor + +from piker import config +from piker._cacheables import ( + async_lifo_cache, + open_cached_client, +) +from piker.accounting._mktinfo import ( + Asset, + MktPair, + digits_to_dec, +) +from piker.data.types import Struct +from piker.data.validate import FeedInit +from piker.data import def_iohlcv_fields +from piker.data._web_bs import ( + open_autorecon_ws, + NoBsWs, +) +from piker.clearing._messages import ( + BrokerdOrder, + BrokerdOrderAck, + BrokerdStatus, + BrokerdPosition, + BrokerdFill, + BrokerdCancel, + # BrokerdError, +) +from piker.brokers._util import ( + resproc, + SymbolNotFound, + DataUnavailable, + get_logger, + get_console_log, +) + +log = get_logger('piker.brokers.binance') + + +def get_config() -> dict: + + conf: dict + path: Path + conf, path = config.load() + + section = conf.get('binance') + + if not section: + log.warning(f'No config section found for binance in {path}') + return {} + + return section + + +log = get_logger(__name__) + + +_url = 'https://api.binance.com' +_sapi_url = 'https://api.binance.com' +_fapi_url = 'https://testnet.binancefuture.com' + + +# Broker specific ohlc schema (rest) +# XXX TODO? some additional fields are defined in the docs: +# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data + +# _ohlc_dtype = [ + # ('close_time', int), + # ('quote_vol', float), + # ('num_trades', int), + # ('buy_base_vol', float), + # ('buy_quote_vol', float), + # ('ignore', float), +# ] + +# UI components allow this to be declared such that additional +# (historical) fields can be exposed. +# ohlc_dtype = np.dtype(_ohlc_dtype) + +_show_wap_in_history = False + + +# https://binance-docs.github.io/apidocs/spot/en/#exchange-information + +# TODO: make this frozen again by pre-processing the +# filters list to a dict at init time? +class Pair(Struct, frozen=True): + symbol: str + status: str + + baseAsset: str + baseAssetPrecision: int + cancelReplaceAllowed: bool + allowTrailingStop: bool + quoteAsset: str + quotePrecision: int + quoteAssetPrecision: int + + baseCommissionPrecision: int + quoteCommissionPrecision: int + + orderTypes: list[str] + + icebergAllowed: bool + ocoAllowed: bool + quoteOrderQtyMarketAllowed: bool + isSpotTradingAllowed: bool + isMarginTradingAllowed: bool + + defaultSelfTradePreventionMode: str + allowedSelfTradePreventionModes: list[str] + + filters: dict[ + str, + Union[str, int, float] + ] + permissions: list[str] + + @property + def price_tick(self) -> Decimal: + # XXX: lul, after manually inspecting the response format we + # just directly pick out the info we need + step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0') + return Decimal(step_size) + + @property + def size_tick(self) -> Decimal: + step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0') + return Decimal(step_size) + + +class OHLC(Struct): + ''' + Description of the flattened OHLC quote format. + + For schema details see: + https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams + + ''' + time: int + + open: float + high: float + low: float + close: float + volume: float + + close_time: int + + quote_vol: float + num_trades: int + buy_base_vol: float + buy_quote_vol: float + ignore: int + + # null the place holder for `bar_wap` until we + # figure out what to extract for this. + bar_wap: float = 0.0 + + +class L1(Struct): + # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams + + update_id: int + sym: str + + bid: float + bsize: float + ask: float + asize: float + + +# convert datetime obj timestamp to unixtime in milliseconds +def binance_timestamp( + when: datetime +) -> int: + return int((when.timestamp() * 1000) + (when.microsecond / 1000)) + + +class Client: + + def __init__(self) -> None: + + self._pairs: dict[str, Pair] = {} # mkt info table + + # live EP sesh + self._sesh = asks.Session(connections=4) + self._sesh.base_location: str = _url + + # futes testnet rest EPs + self._fapi_sesh = asks.Session(connections=4) + self._fapi_sesh.base_location = _fapi_url + + # sync rest API + self._sapi_sesh = asks.Session(connections=4) + self._sapi_sesh.base_location = _sapi_url + + conf: dict = get_config() + self.api_key: str = conf.get('api_key', '') + self.api_secret: str = conf.get('api_secret', '') + + self.watchlist = conf.get('watchlist', []) + + if self.api_key: + api_key_header = {'X-MBX-APIKEY': self.api_key} + self._sesh.headers.update(api_key_header) + self._fapi_sesh.headers.update(api_key_header) + self._sapi_sesh.headers.update(api_key_header) + + def _get_signature(self, data: OrderedDict) -> str: + + # XXX: Info on security and authentification + # https://binance-docs.github.io/apidocs/#endpoint-security-type + + if not self.api_secret: + raise config.NoSignature( + "Can't generate a signature without setting up credentials" + ) + + query_str = '&'.join([ + f'{_key}={value}' + for _key, value in data.items()]) + log.info(query_str) + msg_auth = hmac.new( + self.api_secret.encode('utf-8'), + query_str.encode('utf-8'), + hashlib.sha256 + ) + return msg_auth.hexdigest() + + async def _api( + self, + method: str, + params: dict | OrderedDict, + signed: bool = False, + action: str = 'get' + + ) -> dict[str, Any]: + + if signed: + params['signature'] = self._get_signature(params) + + resp = await getattr(self._sesh, action)( + path=f'/api/v3/{method}', + params=params, + timeout=float('inf'), + ) + + return resproc(resp, log) + + async def _fapi( + self, + method: str, + params: Union[dict, OrderedDict], + signed: bool = False, + action: str = 'get' + ) -> dict[str, Any]: + + if signed: + params['signature'] = self._get_signature(params) + + resp = await getattr(self._fapi_sesh, action)( + path=f'/fapi/v1/{method}', + params=params, + timeout=float('inf') + ) + + return resproc(resp, log) + + async def _sapi( + self, + method: str, + params: Union[dict, OrderedDict], + signed: bool = False, + action: str = 'get' + ) -> dict[str, Any]: + + if signed: + params['signature'] = self._get_signature(params) + + resp = await getattr(self._sapi_sesh, action)( + path=f'/sapi/v1/{method}', + params=params, + timeout=float('inf') + ) + + return resproc(resp, log) + + async def exch_info( + self, + sym: str | None = None, + + ) -> dict[str, Pair] | Pair: + ''' + Fresh exchange-pairs info query for symbol ``sym: str``: + https://binance-docs.github.io/apidocs/spot/en/#exchange-information + + ''' + cached_pair = self._pairs.get(sym) + if cached_pair: + return cached_pair + + # retrieve all symbols by default + params = {} + if sym is not None: + sym = sym.lower() + params = {'symbol': sym} + + resp = await self._api('exchangeInfo', params=params) + entries = resp['symbols'] + if not entries: + raise SymbolNotFound(f'{sym} not found:\n{resp}') + + # pre-process .filters field into a table + pairs = {} + for item in entries: + symbol = item['symbol'] + filters = {} + filters_ls: list = item.pop('filters') + for entry in filters_ls: + ftype = entry['filterType'] + filters[ftype] = entry + + pairs[symbol] = Pair( + filters=filters, + **item, + ) + + # pairs = { + # item['symbol']: Pair(**item) for item in entries + # } + self._pairs.update(pairs) + + if sym is not None: + return pairs[sym] + else: + return self._pairs + + symbol_info = exch_info + + async def search_symbols( + self, + pattern: str, + limit: int = None, + ) -> dict[str, Any]: + if self._pairs is not None: + data = self._pairs + else: + data = await self.exch_info() + + matches = fuzzy.extractBests( + pattern, + data, + score_cutoff=50, + ) + # repack in dict form + return {item[0]['symbol']: item[0] + for item in matches} + + async def bars( + self, + symbol: str, + start_dt: datetime | None = None, + end_dt: datetime | None = None, + limit: int = 1000, # <- max allowed per query + as_np: bool = True, + + ) -> dict: + + if end_dt is None: + end_dt = now('UTC').add(minutes=1) + + if start_dt is None: + start_dt = end_dt.start_of( + 'minute').subtract(minutes=limit) + + start_time = binance_timestamp(start_dt) + end_time = binance_timestamp(end_dt) + + # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data + bars = await self._api( + 'klines', + params={ + 'symbol': symbol.upper(), + 'interval': '1m', + 'startTime': start_time, + 'endTime': end_time, + 'limit': limit + } + ) + + # TODO: pack this bars scheme into a ``pydantic`` validator type: + # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data + + # TODO: we should port this to ``pydantic`` to avoid doing + # manual validation ourselves.. + new_bars = [] + for i, bar in enumerate(bars): + + bar = OHLC(*bar) + bar.typecast() + + row = [] + for j, (name, ftype) in enumerate(def_iohlcv_fields[1:]): + + # TODO: maybe we should go nanoseconds on all + # history time stamps? + if name == 'time': + # convert to epoch seconds: float + row.append(bar.time / 1000.0) + + else: + row.append(getattr(bar, name)) + + new_bars.append((i,) + tuple(row)) + + array = np.array( + new_bars, + dtype=def_iohlcv_fields, + ) if as_np else bars + return array + + async def get_positions( + self, + recv_window: int = 60000 + + ) -> tuple: + positions = {} + volumes = {} + + for sym in self.watchlist: + log.info(f'doing {sym}...') + params = OrderedDict([ + ('symbol', sym), + ('recvWindow', recv_window), + ('timestamp', binance_timestamp(now())) + ]) + resp = await self._api( + 'allOrders', + params=params, + signed=True + ) + log.info(f'done. len {len(resp)}') + await trio.sleep(3) + + return positions, volumes + + async def get_deposits( + self, + recv_window: int = 60000 + ) -> list: + + params = OrderedDict([ + ('recvWindow', recv_window), + ('timestamp', binance_timestamp(now())) + ]) + return await self._sapi( + 'capital/deposit/hisrec', + params=params, + signed=True, + ) + + async def get_withdrawls( + self, + recv_window: int = 60000 + ) -> list: + + params = OrderedDict([ + ('recvWindow', recv_window), + ('timestamp', binance_timestamp(now())) + ]) + return await self._sapi( + 'capital/withdraw/history', + params=params, + signed=True, + ) + + async def submit_limit( + self, + symbol: str, + side: str, # SELL / BUY + quantity: float, + price: float, + # time_in_force: str = 'GTC', + oid: int | None = None, + # iceberg_quantity: float | None = None, + # order_resp_type: str | None = None, + recv_window: int = 60000 + + ) -> int: + symbol = symbol.upper() + + await self.cache_symbols() + + # asset_precision = self._pairs[symbol]['baseAssetPrecision'] + # quote_precision = self._pairs[symbol]['quoteAssetPrecision'] + + params = OrderedDict([ + ('symbol', symbol), + ('side', side.upper()), + ('type', 'LIMIT'), + ('timeInForce', 'GTC'), + ('quantity', quantity), + ('price', price), + ('recvWindow', recv_window), + ('newOrderRespType', 'ACK'), + ('timestamp', binance_timestamp(now())) + ]) + + if oid: + params['newClientOrderId'] = oid + + resp = await self._api( + 'order', + params=params, + signed=True, + action='post' + ) + log.info(resp) + # return resp['orderId'] + return resp['orderId'] + + async def submit_cancel( + self, + symbol: str, + oid: str, + recv_window: int = 60000 + ) -> None: + symbol = symbol.upper() + + params = OrderedDict([ + ('symbol', symbol), + ('orderId', oid), + ('recvWindow', recv_window), + ('timestamp', binance_timestamp(now())) + ]) + + return await self._api( + 'order', + params=params, + signed=True, + action='delete' + ) + + async def get_listen_key(self) -> str: + return (await self._api( + 'userDataStream', + params={}, + action='post' + ))['listenKey'] + + async def keep_alive_key(self, listen_key: str) -> None: + await self._fapi( + 'userDataStream', + params={'listenKey': listen_key}, + action='put' + ) + + async def close_listen_key(self, listen_key: str) -> None: + await self._fapi( + 'userDataStream', + params={'listenKey': listen_key}, + action='delete' + ) + + @acm + async def manage_listen_key(self): + + async def periodic_keep_alive( + self, + listen_key: str, + timeout=60 * 29 # 29 minutes + ): + while True: + await trio.sleep(timeout) + await self.keep_alive_key(listen_key) + + key = await self.get_listen_key() + + async with trio.open_nursery() as n: + n.start_soon(periodic_keep_alive, self, key) + yield key + n.cancel_scope.cancel() + + await self.close_listen_key(key) + + +@acm +async def get_client() -> Client: + client = Client() + log.info('Caching exchange infos..') + await client.exch_info() + yield client + + +# validation type +class AggTrade(Struct, frozen=True): + e: str # Event type + E: int # Event time + s: str # Symbol + a: int # Aggregate trade ID + p: float # Price + q: float # Quantity + f: int # First trade ID + l: int # noqa Last trade ID + T: int # Trade time + m: bool # Is the buyer the market maker? + M: bool # Ignore + + +async def stream_messages( + ws: NoBsWs, +) -> AsyncGenerator[NoBsWs, dict]: + + # TODO: match syntax here! + msg: dict[str, Any] + async for msg in ws: + match msg: + # for l1 streams binance doesn't add an event type field so + # identify those messages by matching keys + # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams + case { + # NOTE: this is never an old value it seems, so + # they are always sending real L1 spread updates. + 'u': upid, # update id + 's': sym, + 'b': bid, + 'B': bsize, + 'a': ask, + 'A': asize, + }: + # TODO: it would be super nice to have a `L1` piker type + # which "renders" incremental tick updates from a packed + # msg-struct: + # - backend msgs after packed into the type such that we + # can reduce IPC usage but without each backend having + # to do that incremental update logic manually B) + # - would it maybe be more efficient to use this instead? + # https://binance-docs.github.io/apidocs/spot/en/#diff-depth-stream + l1 = L1( + update_id=upid, + sym=sym, + bid=bid, + bsize=bsize, + ask=ask, + asize=asize, + ) + l1.typecast() + + # repack into piker's tick-quote format + yield 'l1', { + 'symbol': l1.sym, + 'ticks': [ + { + 'type': 'bid', + 'price': l1.bid, + 'size': l1.bsize, + }, + { + 'type': 'bsize', + 'price': l1.bid, + 'size': l1.bsize, + }, + { + 'type': 'ask', + 'price': l1.ask, + 'size': l1.asize, + }, + { + 'type': 'asize', + 'price': l1.ask, + 'size': l1.asize, + } + ] + } + + # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams + case { + 'e': 'aggTrade', + }: + # NOTE: this is purely for a definition, + # ``msgspec.Struct`` does not runtime-validate until you + # decode/encode, see: + # https://jcristharif.com/msgspec/structs.html#type-validation + msg = AggTrade(**msg) # TODO: should we .copy() ? + piker_quote: dict = { + 'symbol': msg.s, + 'last': float(msg.p), + 'brokerd_ts': time.time(), + 'ticks': [{ + 'type': 'trade', + 'price': float(msg.p), + 'size': float(msg.q), + 'broker_ts': msg.T, + }], + } + yield 'trade', piker_quote + + +def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: + ''' + Create a request subscription packet dict. + + - spot: + https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams + + - futes: + https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams + + ''' + return { + 'method': 'SUBSCRIBE', + 'params': [ + f'{pair.lower()}@{sub_name}' + for pair in pairs + ], + 'id': uid + } + + +@acm +async def open_history_client( + mkt: MktPair, + +) -> tuple[Callable, int]: + + symbol: str = mkt.bs_fqme + + # TODO implement history getter for the new storage layer. + async with open_cached_client('binance') as client: + + async def get_ohlc( + timeframe: float, + end_dt: datetime | None = None, + start_dt: datetime | None = None, + + ) -> tuple[ + np.ndarray, + datetime, # start + datetime, # end + ]: + if timeframe != 60: + raise DataUnavailable('Only 1m bars are supported') + + array = await client.bars( + symbol, + start_dt=start_dt, + end_dt=end_dt, + ) + times = array['time'] + if ( + end_dt is None + ): + inow = round(time.time()) + if (inow - times[-1]) > 60: + await tractor.breakpoint() + + start_dt = from_timestamp(times[0]) + end_dt = from_timestamp(times[-1]) + + return array, start_dt, end_dt + + yield get_ohlc, {'erlangs': 3, 'rate': 3} + + +@async_lifo_cache() +async def get_mkt_info( + fqme: str, + +) -> tuple[MktPair, Pair]: + + async with open_cached_client('binance') as client: + + pair: Pair = await client.exch_info(fqme.upper()) + mkt = MktPair( + dst=Asset( + name=pair.baseAsset, + atype='crypto', + tx_tick=digits_to_dec(pair.baseAssetPrecision), + ), + src=Asset( + name=pair.quoteAsset, + atype='crypto', + tx_tick=digits_to_dec(pair.quoteAssetPrecision), + ), + price_tick=pair.price_tick, + size_tick=pair.size_tick, + bs_mktid=pair.symbol, + broker='binance', + ) + both = mkt, pair + return both + + +async def stream_quotes( + + send_chan: trio.abc.SendChannel, + symbols: list[str], + feed_is_live: trio.Event, + loglevel: str = None, + + # startup sync + task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, + +) -> None: + # XXX: required to propagate ``tractor`` loglevel to piker logging + get_console_log(loglevel or tractor.current_actor().loglevel) + + async with ( + send_chan as send_chan, + ): + init_msgs: list[FeedInit] = [] + for sym in symbols: + mkt, pair = await get_mkt_info(sym) + + # build out init msgs according to latest spec + init_msgs.append( + FeedInit(mkt_info=mkt) + ) + + iter_subids = itertools.count() + + @acm + async def subscribe(ws: NoBsWs): + # setup subs + + subid: int = next(iter_subids) + + # trade data (aka L1) + # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker + l1_sub = make_sub(symbols, 'bookTicker', subid) + await ws.send_msg(l1_sub) + + # aggregate (each order clear by taker **not** by maker) + # trades data: + # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams + agg_trades_sub = make_sub(symbols, 'aggTrade', subid) + await ws.send_msg(agg_trades_sub) + + # might get ack from ws server, or maybe some + # other msg still in transit.. + res = await ws.recv_msg() + subid: str | None = res.get('id') + if subid: + assert res['id'] == subid + + yield + + subs = [] + for sym in symbols: + subs.append("{sym}@aggTrade") + subs.append("{sym}@bookTicker") + + # unsub from all pairs on teardown + if ws.connected(): + await ws.send_msg({ + "method": "UNSUBSCRIBE", + "params": subs, + "id": subid, + }) + + # XXX: do we need to ack the unsub? + # await ws.recv_msg() + + async with ( + open_autorecon_ws( + # XXX: see api docs which show diff addr? + # https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information + # 'wss://ws-api.binance.com:443/ws-api/v3', + 'wss://stream.binance.com/ws', + fixture=subscribe, + ) as ws, + + # avoid stream-gen closure from breaking trio.. + aclosing(stream_messages(ws)) as msg_gen, + ): + typ, quote = await anext(msg_gen) + + # pull a first quote and deliver + while typ != 'trade': + typ, quote = await anext(msg_gen) + + task_status.started((init_msgs, quote)) + + # signal to caller feed is ready for consumption + feed_is_live.set() + + # import time + # last = time.time() + + # start streaming + async for typ, msg in msg_gen: + + # period = time.time() - last + # hz = 1/period if period else float('inf') + # if hz > 60: + # log.info(f'Binance quotez : {hz}') + topic = msg['symbol'].lower() + await send_chan.send({topic: msg}) + # last = time.time() + + +async def handle_order_requests( + ems_order_stream: tractor.MsgStream +) -> None: + async with open_cached_client('binance') as client: + async for request_msg in ems_order_stream: + log.info(f'Received order request {request_msg}') + + action = request_msg['action'] + + if action in {'buy', 'sell'}: + # validate + order = BrokerdOrder(**request_msg) + + # call our client api to submit the order + reqid = await client.submit_limit( + order.symbol, + order.action, + order.size, + order.price, + oid=order.oid + ) + + # deliver ack that order has been submitted to broker routing + await ems_order_stream.send( + BrokerdOrderAck( + # ems order request id + oid=order.oid, + # broker specific request id + reqid=reqid, + time_ns=time.time_ns(), + ).dict() + ) + + elif action == 'cancel': + msg = BrokerdCancel(**request_msg) + + await client.submit_cancel(msg.symbol, msg.reqid) + + else: + log.error(f'Unknown order command: {request_msg}') + + +@tractor.context +async def trades_dialogue( + ctx: tractor.Context, + loglevel: str = None + +) -> AsyncIterator[dict[str, Any]]: + + async with open_cached_client('binance') as client: + if not client.api_key: + await ctx.started('paper') + return + + # table: PpTable + # ledger: TransactionLedger + + # TODO: load pps and accounts using accounting apis! + positions: list[BrokerdPosition] = [] + accounts: list[str] = ['binance.default'] + await ctx.started((positions, accounts)) + + async with ( + ctx.open_stream() as ems_stream, + trio.open_nursery() as n, + open_cached_client('binance') as client, + client.manage_listen_key() as listen_key, + ): + n.start_soon(handle_order_requests, ems_stream) + # await trio.sleep_forever() + + async with open_autorecon_ws( + f'wss://stream.binance.com:9443/ws/{listen_key}', + ) as ws: + event = await ws.recv_msg() + + # https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update + if event.get('e') == 'executionReport': + + oid: str = event.get('c') + side: str = event.get('S').lower() + status: str = event.get('X') + order_qty: float = float(event.get('q')) + filled_qty: float = float(event.get('z')) + cum_transacted_qty: float = float(event.get('Z')) + price_avg: float = cum_transacted_qty / filled_qty + broker_time: float = float(event.get('T')) + commission_amount: float = float(event.get('n')) + commission_asset: float = event.get('N') + + if status == 'TRADE': + if order_qty == filled_qty: + msg = BrokerdFill( + reqid=oid, + time_ns=time.time_ns(), + action=side, + price=price_avg, + broker_details={ + 'name': 'binance', + 'commissions': { + 'amount': commission_amount, + 'asset': commission_asset + }, + 'broker_time': broker_time + }, + broker_time=broker_time + ) + + else: + if status == 'NEW': + status = 'submitted' + + elif status == 'CANCELED': + status = 'cancelled' + + msg = BrokerdStatus( + reqid=oid, + time_ns=time.time_ns(), + status=status, + filled=filled_qty, + remaining=order_qty - filled_qty, + broker_details={'name': 'binance'} + ) + + else: + # XXX: temporary, to catch unhandled msgs + breakpoint() + + await ems_stream.send(msg.dict()) + + +@tractor.context +async def open_symbol_search( + ctx: tractor.Context, +) -> Client: + async with open_cached_client('binance') as client: + + # load all symbols locally for fast search + cache = await client.exch_info() + await ctx.started() + + async with ctx.open_stream() as stream: + + async for pattern in stream: + # results = await client.exch_info(sym=pattern.upper()) + + matches = fuzzy.extractBests( + pattern, + cache, + score_cutoff=50, + ) + # repack in dict form + await stream.send({ + item[0].symbol: item[0] + for item in matches + }) From ae1c5a0db071ab838addc83b2bbe296dc8bbfcf9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 9 Jun 2023 16:45:02 -0400 Subject: [PATCH 16/73] binance: breakout into `feed` and `broker` mods like other backends --- piker/brokers/binance/__init__.py | 16 +- piker/brokers/binance/api.py | 511 ------------------------------ piker/brokers/binance/broker.py | 188 +++++++++++ piker/brokers/binance/feed.py | 414 ++++++++++++++++++++++++ 4 files changed, 609 insertions(+), 520 deletions(-) create mode 100644 piker/brokers/binance/broker.py create mode 100644 piker/brokers/binance/feed.py diff --git a/piker/brokers/binance/__init__.py b/piker/brokers/binance/__init__.py index c840f0710..cfdbd3a5d 100644 --- a/piker/brokers/binance/__init__.py +++ b/piker/brokers/binance/__init__.py @@ -23,16 +23,15 @@ """ from .api import ( get_client, -# ) -# from .feed import ( +) +from .feed import ( get_mkt_info, open_history_client, open_symbol_search, stream_quotes, -# ) -# from .broker import ( +) +from .broker import ( trades_dialogue, - # norm_trade_records, ) @@ -43,13 +42,12 @@ 'open_history_client', 'open_symbol_search', 'stream_quotes', - # 'norm_trade_records', ] -# tractor RPC enable arg +# `brokerd` modules __enable_modules__: list[str] = [ 'api', - # 'feed', - # 'broker', + 'feed', + 'broker', ] diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 7b847bf83..9ab9f8352 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -25,66 +25,32 @@ from collections import OrderedDict from contextlib import ( asynccontextmanager as acm, - aclosing, ) from datetime import datetime from decimal import Decimal -import itertools from typing import ( Any, Union, - AsyncIterator, - AsyncGenerator, - Callable, ) import hmac -import time import hashlib from pathlib import Path import trio -from trio_typing import TaskStatus from pendulum import ( now, - from_timestamp, ) import asks from fuzzywuzzy import process as fuzzy import numpy as np -import tractor from piker import config -from piker._cacheables import ( - async_lifo_cache, - open_cached_client, -) -from piker.accounting._mktinfo import ( - Asset, - MktPair, - digits_to_dec, -) from piker.data.types import Struct -from piker.data.validate import FeedInit from piker.data import def_iohlcv_fields -from piker.data._web_bs import ( - open_autorecon_ws, - NoBsWs, -) -from piker.clearing._messages import ( - BrokerdOrder, - BrokerdOrderAck, - BrokerdStatus, - BrokerdPosition, - BrokerdFill, - BrokerdCancel, - # BrokerdError, -) from piker.brokers._util import ( resproc, SymbolNotFound, - DataUnavailable, get_logger, - get_console_log, ) log = get_logger('piker.brokers.binance') @@ -211,18 +177,6 @@ class OHLC(Struct): bar_wap: float = 0.0 -class L1(Struct): - # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams - - update_id: int - sym: str - - bid: float - bsize: float - ask: float - asize: float - - # convert datetime obj timestamp to unixtime in milliseconds def binance_timestamp( when: datetime @@ -644,468 +598,3 @@ async def get_client() -> Client: log.info('Caching exchange infos..') await client.exch_info() yield client - - -# validation type -class AggTrade(Struct, frozen=True): - e: str # Event type - E: int # Event time - s: str # Symbol - a: int # Aggregate trade ID - p: float # Price - q: float # Quantity - f: int # First trade ID - l: int # noqa Last trade ID - T: int # Trade time - m: bool # Is the buyer the market maker? - M: bool # Ignore - - -async def stream_messages( - ws: NoBsWs, -) -> AsyncGenerator[NoBsWs, dict]: - - # TODO: match syntax here! - msg: dict[str, Any] - async for msg in ws: - match msg: - # for l1 streams binance doesn't add an event type field so - # identify those messages by matching keys - # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams - case { - # NOTE: this is never an old value it seems, so - # they are always sending real L1 spread updates. - 'u': upid, # update id - 's': sym, - 'b': bid, - 'B': bsize, - 'a': ask, - 'A': asize, - }: - # TODO: it would be super nice to have a `L1` piker type - # which "renders" incremental tick updates from a packed - # msg-struct: - # - backend msgs after packed into the type such that we - # can reduce IPC usage but without each backend having - # to do that incremental update logic manually B) - # - would it maybe be more efficient to use this instead? - # https://binance-docs.github.io/apidocs/spot/en/#diff-depth-stream - l1 = L1( - update_id=upid, - sym=sym, - bid=bid, - bsize=bsize, - ask=ask, - asize=asize, - ) - l1.typecast() - - # repack into piker's tick-quote format - yield 'l1', { - 'symbol': l1.sym, - 'ticks': [ - { - 'type': 'bid', - 'price': l1.bid, - 'size': l1.bsize, - }, - { - 'type': 'bsize', - 'price': l1.bid, - 'size': l1.bsize, - }, - { - 'type': 'ask', - 'price': l1.ask, - 'size': l1.asize, - }, - { - 'type': 'asize', - 'price': l1.ask, - 'size': l1.asize, - } - ] - } - - # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams - case { - 'e': 'aggTrade', - }: - # NOTE: this is purely for a definition, - # ``msgspec.Struct`` does not runtime-validate until you - # decode/encode, see: - # https://jcristharif.com/msgspec/structs.html#type-validation - msg = AggTrade(**msg) # TODO: should we .copy() ? - piker_quote: dict = { - 'symbol': msg.s, - 'last': float(msg.p), - 'brokerd_ts': time.time(), - 'ticks': [{ - 'type': 'trade', - 'price': float(msg.p), - 'size': float(msg.q), - 'broker_ts': msg.T, - }], - } - yield 'trade', piker_quote - - -def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: - ''' - Create a request subscription packet dict. - - - spot: - https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams - - - futes: - https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams - - ''' - return { - 'method': 'SUBSCRIBE', - 'params': [ - f'{pair.lower()}@{sub_name}' - for pair in pairs - ], - 'id': uid - } - - -@acm -async def open_history_client( - mkt: MktPair, - -) -> tuple[Callable, int]: - - symbol: str = mkt.bs_fqme - - # TODO implement history getter for the new storage layer. - async with open_cached_client('binance') as client: - - async def get_ohlc( - timeframe: float, - end_dt: datetime | None = None, - start_dt: datetime | None = None, - - ) -> tuple[ - np.ndarray, - datetime, # start - datetime, # end - ]: - if timeframe != 60: - raise DataUnavailable('Only 1m bars are supported') - - array = await client.bars( - symbol, - start_dt=start_dt, - end_dt=end_dt, - ) - times = array['time'] - if ( - end_dt is None - ): - inow = round(time.time()) - if (inow - times[-1]) > 60: - await tractor.breakpoint() - - start_dt = from_timestamp(times[0]) - end_dt = from_timestamp(times[-1]) - - return array, start_dt, end_dt - - yield get_ohlc, {'erlangs': 3, 'rate': 3} - - -@async_lifo_cache() -async def get_mkt_info( - fqme: str, - -) -> tuple[MktPair, Pair]: - - async with open_cached_client('binance') as client: - - pair: Pair = await client.exch_info(fqme.upper()) - mkt = MktPair( - dst=Asset( - name=pair.baseAsset, - atype='crypto', - tx_tick=digits_to_dec(pair.baseAssetPrecision), - ), - src=Asset( - name=pair.quoteAsset, - atype='crypto', - tx_tick=digits_to_dec(pair.quoteAssetPrecision), - ), - price_tick=pair.price_tick, - size_tick=pair.size_tick, - bs_mktid=pair.symbol, - broker='binance', - ) - both = mkt, pair - return both - - -async def stream_quotes( - - send_chan: trio.abc.SendChannel, - symbols: list[str], - feed_is_live: trio.Event, - loglevel: str = None, - - # startup sync - task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, - -) -> None: - # XXX: required to propagate ``tractor`` loglevel to piker logging - get_console_log(loglevel or tractor.current_actor().loglevel) - - async with ( - send_chan as send_chan, - ): - init_msgs: list[FeedInit] = [] - for sym in symbols: - mkt, pair = await get_mkt_info(sym) - - # build out init msgs according to latest spec - init_msgs.append( - FeedInit(mkt_info=mkt) - ) - - iter_subids = itertools.count() - - @acm - async def subscribe(ws: NoBsWs): - # setup subs - - subid: int = next(iter_subids) - - # trade data (aka L1) - # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker - l1_sub = make_sub(symbols, 'bookTicker', subid) - await ws.send_msg(l1_sub) - - # aggregate (each order clear by taker **not** by maker) - # trades data: - # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams - agg_trades_sub = make_sub(symbols, 'aggTrade', subid) - await ws.send_msg(agg_trades_sub) - - # might get ack from ws server, or maybe some - # other msg still in transit.. - res = await ws.recv_msg() - subid: str | None = res.get('id') - if subid: - assert res['id'] == subid - - yield - - subs = [] - for sym in symbols: - subs.append("{sym}@aggTrade") - subs.append("{sym}@bookTicker") - - # unsub from all pairs on teardown - if ws.connected(): - await ws.send_msg({ - "method": "UNSUBSCRIBE", - "params": subs, - "id": subid, - }) - - # XXX: do we need to ack the unsub? - # await ws.recv_msg() - - async with ( - open_autorecon_ws( - # XXX: see api docs which show diff addr? - # https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information - # 'wss://ws-api.binance.com:443/ws-api/v3', - 'wss://stream.binance.com/ws', - fixture=subscribe, - ) as ws, - - # avoid stream-gen closure from breaking trio.. - aclosing(stream_messages(ws)) as msg_gen, - ): - typ, quote = await anext(msg_gen) - - # pull a first quote and deliver - while typ != 'trade': - typ, quote = await anext(msg_gen) - - task_status.started((init_msgs, quote)) - - # signal to caller feed is ready for consumption - feed_is_live.set() - - # import time - # last = time.time() - - # start streaming - async for typ, msg in msg_gen: - - # period = time.time() - last - # hz = 1/period if period else float('inf') - # if hz > 60: - # log.info(f'Binance quotez : {hz}') - topic = msg['symbol'].lower() - await send_chan.send({topic: msg}) - # last = time.time() - - -async def handle_order_requests( - ems_order_stream: tractor.MsgStream -) -> None: - async with open_cached_client('binance') as client: - async for request_msg in ems_order_stream: - log.info(f'Received order request {request_msg}') - - action = request_msg['action'] - - if action in {'buy', 'sell'}: - # validate - order = BrokerdOrder(**request_msg) - - # call our client api to submit the order - reqid = await client.submit_limit( - order.symbol, - order.action, - order.size, - order.price, - oid=order.oid - ) - - # deliver ack that order has been submitted to broker routing - await ems_order_stream.send( - BrokerdOrderAck( - # ems order request id - oid=order.oid, - # broker specific request id - reqid=reqid, - time_ns=time.time_ns(), - ).dict() - ) - - elif action == 'cancel': - msg = BrokerdCancel(**request_msg) - - await client.submit_cancel(msg.symbol, msg.reqid) - - else: - log.error(f'Unknown order command: {request_msg}') - - -@tractor.context -async def trades_dialogue( - ctx: tractor.Context, - loglevel: str = None - -) -> AsyncIterator[dict[str, Any]]: - - async with open_cached_client('binance') as client: - if not client.api_key: - await ctx.started('paper') - return - - # table: PpTable - # ledger: TransactionLedger - - # TODO: load pps and accounts using accounting apis! - positions: list[BrokerdPosition] = [] - accounts: list[str] = ['binance.default'] - await ctx.started((positions, accounts)) - - async with ( - ctx.open_stream() as ems_stream, - trio.open_nursery() as n, - open_cached_client('binance') as client, - client.manage_listen_key() as listen_key, - ): - n.start_soon(handle_order_requests, ems_stream) - # await trio.sleep_forever() - - async with open_autorecon_ws( - f'wss://stream.binance.com:9443/ws/{listen_key}', - ) as ws: - event = await ws.recv_msg() - - # https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update - if event.get('e') == 'executionReport': - - oid: str = event.get('c') - side: str = event.get('S').lower() - status: str = event.get('X') - order_qty: float = float(event.get('q')) - filled_qty: float = float(event.get('z')) - cum_transacted_qty: float = float(event.get('Z')) - price_avg: float = cum_transacted_qty / filled_qty - broker_time: float = float(event.get('T')) - commission_amount: float = float(event.get('n')) - commission_asset: float = event.get('N') - - if status == 'TRADE': - if order_qty == filled_qty: - msg = BrokerdFill( - reqid=oid, - time_ns=time.time_ns(), - action=side, - price=price_avg, - broker_details={ - 'name': 'binance', - 'commissions': { - 'amount': commission_amount, - 'asset': commission_asset - }, - 'broker_time': broker_time - }, - broker_time=broker_time - ) - - else: - if status == 'NEW': - status = 'submitted' - - elif status == 'CANCELED': - status = 'cancelled' - - msg = BrokerdStatus( - reqid=oid, - time_ns=time.time_ns(), - status=status, - filled=filled_qty, - remaining=order_qty - filled_qty, - broker_details={'name': 'binance'} - ) - - else: - # XXX: temporary, to catch unhandled msgs - breakpoint() - - await ems_stream.send(msg.dict()) - - -@tractor.context -async def open_symbol_search( - ctx: tractor.Context, -) -> Client: - async with open_cached_client('binance') as client: - - # load all symbols locally for fast search - cache = await client.exch_info() - await ctx.started() - - async with ctx.open_stream() as stream: - - async for pattern in stream: - # results = await client.exch_info(sym=pattern.upper()) - - matches = fuzzy.extractBests( - pattern, - cache, - score_cutoff=50, - ) - # repack in dict form - await stream.send({ - item[0].symbol: item[0] - for item in matches - }) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py new file mode 100644 index 000000000..53dd7a64d --- /dev/null +++ b/piker/brokers/binance/broker.py @@ -0,0 +1,188 @@ +# piker: trading gear for hackers +# Copyright (C) +# Guillermo Rodriguez (aka ze jefe) +# Tyler Goodlet +# (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Live order control B) + +''' +from __future__ import annotations +from typing import ( + Any, + AsyncIterator, +) +import time + +import tractor +import trio + +from piker.brokers._util import ( + get_logger, +) +from piker.data._web_bs import ( + open_autorecon_ws, + NoBsWs, +) +from piker._cacheables import ( + open_cached_client, +) +from piker.clearing._messages import ( + BrokerdOrder, + BrokerdOrderAck, + BrokerdStatus, + BrokerdPosition, + BrokerdFill, + BrokerdCancel, + # BrokerdError, +) + +log = get_logger('piker.brokers.binance') + + +async def handle_order_requests( + ems_order_stream: tractor.MsgStream +) -> None: + async with open_cached_client('binance') as client: + async for request_msg in ems_order_stream: + log.info(f'Received order request {request_msg}') + + action = request_msg['action'] + + if action in {'buy', 'sell'}: + # validate + order = BrokerdOrder(**request_msg) + + # call our client api to submit the order + reqid = await client.submit_limit( + order.symbol, + order.action, + order.size, + order.price, + oid=order.oid + ) + + # deliver ack that order has been submitted to broker routing + await ems_order_stream.send( + BrokerdOrderAck( + # ems order request id + oid=order.oid, + # broker specific request id + reqid=reqid, + time_ns=time.time_ns(), + ).dict() + ) + + elif action == 'cancel': + msg = BrokerdCancel(**request_msg) + + await client.submit_cancel(msg.symbol, msg.reqid) + + else: + log.error(f'Unknown order command: {request_msg}') + + +@tractor.context +async def trades_dialogue( + ctx: tractor.Context, + loglevel: str = None + +) -> AsyncIterator[dict[str, Any]]: + + async with open_cached_client('binance') as client: + if not client.api_key: + await ctx.started('paper') + return + + # table: PpTable + # ledger: TransactionLedger + + # TODO: load pps and accounts using accounting apis! + positions: list[BrokerdPosition] = [] + accounts: list[str] = ['binance.default'] + await ctx.started((positions, accounts)) + + async with ( + ctx.open_stream() as ems_stream, + trio.open_nursery() as n, + open_cached_client('binance') as client, + client.manage_listen_key() as listen_key, + ): + n.start_soon(handle_order_requests, ems_stream) + # await trio.sleep_forever() + + ws: NoBsWs + async with open_autorecon_ws( + f'wss://stream.binance.com:9443/ws/{listen_key}', + ) as ws: + event = await ws.recv_msg() + + # https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update + if event.get('e') == 'executionReport': + + oid: str = event.get('c') + side: str = event.get('S').lower() + status: str = event.get('X') + order_qty: float = float(event.get('q')) + filled_qty: float = float(event.get('z')) + cum_transacted_qty: float = float(event.get('Z')) + price_avg: float = cum_transacted_qty / filled_qty + broker_time: float = float(event.get('T')) + commission_amount: float = float(event.get('n')) + commission_asset: float = event.get('N') + + if status == 'TRADE': + if order_qty == filled_qty: + msg = BrokerdFill( + reqid=oid, + time_ns=time.time_ns(), + action=side, + price=price_avg, + broker_details={ + 'name': 'binance', + 'commissions': { + 'amount': commission_amount, + 'asset': commission_asset + }, + 'broker_time': broker_time + }, + broker_time=broker_time + ) + + else: + if status == 'NEW': + status = 'submitted' + + elif status == 'CANCELED': + status = 'cancelled' + + msg = BrokerdStatus( + reqid=oid, + time_ns=time.time_ns(), + status=status, + filled=filled_qty, + remaining=order_qty - filled_qty, + broker_details={'name': 'binance'} + ) + + else: + # XXX: temporary, to catch unhandled msgs + breakpoint() + + await ems_stream.send(msg.dict()) + + diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py new file mode 100644 index 000000000..9ecda184c --- /dev/null +++ b/piker/brokers/binance/feed.py @@ -0,0 +1,414 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Real-time and historical data feed endpoints. + +''' +from __future__ import annotations +from contextlib import ( + asynccontextmanager as acm, + aclosing, +) +from datetime import datetime +import itertools +from typing import ( + Any, + AsyncGenerator, + Callable, +) +import time + +import trio +from trio_typing import TaskStatus +from pendulum import ( + from_timestamp, +) +from fuzzywuzzy import process as fuzzy +import numpy as np +import tractor + +from piker._cacheables import ( + async_lifo_cache, + open_cached_client, +) +from piker.accounting._mktinfo import ( + Asset, + MktPair, + digits_to_dec, +) +from piker.data.types import Struct +from piker.data.validate import FeedInit +from piker.data._web_bs import ( + open_autorecon_ws, + NoBsWs, +) +from piker.brokers._util import ( + DataUnavailable, + get_logger, + get_console_log, +) + +from .api import ( + Client, + Pair, +) + +log = get_logger('piker.brokers.binance') + + +class L1(Struct): + # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams + + update_id: int + sym: str + + bid: float + bsize: float + ask: float + asize: float + + +# validation type +class AggTrade(Struct, frozen=True): + e: str # Event type + E: int # Event time + s: str # Symbol + a: int # Aggregate trade ID + p: float # Price + q: float # Quantity + f: int # First trade ID + l: int # noqa Last trade ID + T: int # Trade time + m: bool # Is the buyer the market maker? + M: bool # Ignore + + +async def stream_messages( + ws: NoBsWs, +) -> AsyncGenerator[NoBsWs, dict]: + + # TODO: match syntax here! + msg: dict[str, Any] + async for msg in ws: + match msg: + # for l1 streams binance doesn't add an event type field so + # identify those messages by matching keys + # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams + case { + # NOTE: this is never an old value it seems, so + # they are always sending real L1 spread updates. + 'u': upid, # update id + 's': sym, + 'b': bid, + 'B': bsize, + 'a': ask, + 'A': asize, + }: + # TODO: it would be super nice to have a `L1` piker type + # which "renders" incremental tick updates from a packed + # msg-struct: + # - backend msgs after packed into the type such that we + # can reduce IPC usage but without each backend having + # to do that incremental update logic manually B) + # - would it maybe be more efficient to use this instead? + # https://binance-docs.github.io/apidocs/spot/en/#diff-depth-stream + l1 = L1( + update_id=upid, + sym=sym, + bid=bid, + bsize=bsize, + ask=ask, + asize=asize, + ) + l1.typecast() + + # repack into piker's tick-quote format + yield 'l1', { + 'symbol': l1.sym, + 'ticks': [ + { + 'type': 'bid', + 'price': l1.bid, + 'size': l1.bsize, + }, + { + 'type': 'bsize', + 'price': l1.bid, + 'size': l1.bsize, + }, + { + 'type': 'ask', + 'price': l1.ask, + 'size': l1.asize, + }, + { + 'type': 'asize', + 'price': l1.ask, + 'size': l1.asize, + } + ] + } + + # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams + case { + 'e': 'aggTrade', + }: + # NOTE: this is purely for a definition, + # ``msgspec.Struct`` does not runtime-validate until you + # decode/encode, see: + # https://jcristharif.com/msgspec/structs.html#type-validation + msg = AggTrade(**msg) # TODO: should we .copy() ? + piker_quote: dict = { + 'symbol': msg.s, + 'last': float(msg.p), + 'brokerd_ts': time.time(), + 'ticks': [{ + 'type': 'trade', + 'price': float(msg.p), + 'size': float(msg.q), + 'broker_ts': msg.T, + }], + } + yield 'trade', piker_quote + + +def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: + ''' + Create a request subscription packet dict. + + - spot: + https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams + + - futes: + https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams + + ''' + return { + 'method': 'SUBSCRIBE', + 'params': [ + f'{pair.lower()}@{sub_name}' + for pair in pairs + ], + 'id': uid + } + + +@acm +async def open_history_client( + mkt: MktPair, + +) -> tuple[Callable, int]: + + symbol: str = mkt.bs_fqme + + # TODO implement history getter for the new storage layer. + async with open_cached_client('binance') as client: + + async def get_ohlc( + timeframe: float, + end_dt: datetime | None = None, + start_dt: datetime | None = None, + + ) -> tuple[ + np.ndarray, + datetime, # start + datetime, # end + ]: + if timeframe != 60: + raise DataUnavailable('Only 1m bars are supported') + + array = await client.bars( + symbol, + start_dt=start_dt, + end_dt=end_dt, + ) + times = array['time'] + if ( + end_dt is None + ): + inow = round(time.time()) + if (inow - times[-1]) > 60: + await tractor.breakpoint() + + start_dt = from_timestamp(times[0]) + end_dt = from_timestamp(times[-1]) + + return array, start_dt, end_dt + + yield get_ohlc, {'erlangs': 3, 'rate': 3} + + +@async_lifo_cache() +async def get_mkt_info( + fqme: str, + +) -> tuple[MktPair, Pair]: + + async with open_cached_client('binance') as client: + + pair: Pair = await client.exch_info(fqme.upper()) + mkt = MktPair( + dst=Asset( + name=pair.baseAsset, + atype='crypto', + tx_tick=digits_to_dec(pair.baseAssetPrecision), + ), + src=Asset( + name=pair.quoteAsset, + atype='crypto', + tx_tick=digits_to_dec(pair.quoteAssetPrecision), + ), + price_tick=pair.price_tick, + size_tick=pair.size_tick, + bs_mktid=pair.symbol, + broker='binance', + ) + both = mkt, pair + return both + + +async def stream_quotes( + + send_chan: trio.abc.SendChannel, + symbols: list[str], + feed_is_live: trio.Event, + loglevel: str = None, + + # startup sync + task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, + +) -> None: + # XXX: required to propagate ``tractor`` loglevel to piker logging + get_console_log(loglevel or tractor.current_actor().loglevel) + + async with ( + send_chan as send_chan, + ): + init_msgs: list[FeedInit] = [] + for sym in symbols: + mkt, pair = await get_mkt_info(sym) + + # build out init msgs according to latest spec + init_msgs.append( + FeedInit(mkt_info=mkt) + ) + + iter_subids = itertools.count() + + @acm + async def subscribe(ws: NoBsWs): + # setup subs + + subid: int = next(iter_subids) + + # trade data (aka L1) + # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker + l1_sub = make_sub(symbols, 'bookTicker', subid) + await ws.send_msg(l1_sub) + + # aggregate (each order clear by taker **not** by maker) + # trades data: + # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams + agg_trades_sub = make_sub(symbols, 'aggTrade', subid) + await ws.send_msg(agg_trades_sub) + + # might get ack from ws server, or maybe some + # other msg still in transit.. + res = await ws.recv_msg() + subid: str | None = res.get('id') + if subid: + assert res['id'] == subid + + yield + + subs = [] + for sym in symbols: + subs.append("{sym}@aggTrade") + subs.append("{sym}@bookTicker") + + # unsub from all pairs on teardown + if ws.connected(): + await ws.send_msg({ + "method": "UNSUBSCRIBE", + "params": subs, + "id": subid, + }) + + # XXX: do we need to ack the unsub? + # await ws.recv_msg() + + async with ( + open_autorecon_ws( + # XXX: see api docs which show diff addr? + # https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information + # 'wss://ws-api.binance.com:443/ws-api/v3', + 'wss://stream.binance.com/ws', + fixture=subscribe, + ) as ws, + + # avoid stream-gen closure from breaking trio.. + aclosing(stream_messages(ws)) as msg_gen, + ): + typ, quote = await anext(msg_gen) + + # pull a first quote and deliver + while typ != 'trade': + typ, quote = await anext(msg_gen) + + task_status.started((init_msgs, quote)) + + # signal to caller feed is ready for consumption + feed_is_live.set() + + # import time + # last = time.time() + + # start streaming + async for typ, msg in msg_gen: + + # period = time.time() - last + # hz = 1/period if period else float('inf') + # if hz > 60: + # log.info(f'Binance quotez : {hz}') + topic = msg['symbol'].lower() + await send_chan.send({topic: msg}) + # last = time.time() +@tractor.context +async def open_symbol_search( + ctx: tractor.Context, +) -> Client: + async with open_cached_client('binance') as client: + + # load all symbols locally for fast search + cache = await client.exch_info() + await ctx.started() + + async with ctx.open_stream() as stream: + + async for pattern in stream: + # results = await client.exch_info(sym=pattern.upper()) + + matches = fuzzy.extractBests( + pattern, + cache, + score_cutoff=50, + ) + # repack in dict form + await stream.send({ + item[0].symbol: item[0] + for item in matches + }) From dac93dd8f823a2b40c04a4513d0a92ec021e8f59 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 10 Jun 2023 18:25:22 -0400 Subject: [PATCH 17/73] Support USD-M futes live feeds and exchange info Add the usd-futes "Pair" type and thus ability to load all exchange (info for) contracts settled in USDT. Luckily we don't seem to have to modify anything in the `Client` interface (yet) other then a new `.mkt_mode: str` which determines which endpoint set to make requests. Obviously data received from endpoints will likely need diff handling as per below. Deats: - add a bunch more API and WSS top level domains to `.api` with comments - start a `.binance.schemas` module to house the structs for loading different `Pair` subtypes depending on target market: `SpotPair`, `FutesPair`, .. etc. and implement required `MktPair` fields on the new futes type for compatibility with the clearing layer. - add `Client.mkt_mode: str` and a method lookup for endpoint parent paths depending on market via `.mkt_req: dict` Also related to live feeds, - drop `Struct` typecasting instead opting for specific fields both for speed and simplicity atm. - breakout `subscribe()` into module level acm from being embedded closure. - for now swap over the ws feed to be strictly the futes ep (while testing) and set the `.mkt_mode = 'usd_futes'`. - hack in `Client._pairs` to only load `FutesPair`s until we figure out whether we want separate `Client` instances per market or not.. --- piker/brokers/binance/api.py | 218 ++++++++++++++++++------------- piker/brokers/binance/broker.py | 2 - piker/brokers/binance/feed.py | 139 +++++++++++--------- piker/brokers/binance/schemas.py | 114 ++++++++++++++++ 4 files changed, 322 insertions(+), 151 deletions(-) create mode 100644 piker/brokers/binance/schemas.py diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 9ab9f8352..fd0a0c829 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -27,10 +27,10 @@ asynccontextmanager as acm, ) from datetime import datetime -from decimal import Decimal from typing import ( Any, - Union, + Callable, + Literal, ) import hmac import hashlib @@ -52,6 +52,10 @@ SymbolNotFound, get_logger, ) +from .schemas import ( + SpotPair, + FutesPair, +) log = get_logger('piker.brokers.binance') @@ -74,9 +78,26 @@ def get_config() -> dict: log = get_logger(__name__) -_url = 'https://api.binance.com' -_sapi_url = 'https://api.binance.com' -_fapi_url = 'https://testnet.binancefuture.com' +_domain: str = 'binance.com' +_spot_url = _url = f'https://api.{_domain}' +_futes_url = f'https://fapi.{_domain}' + +# test nets +_testnet_futes_url = 'https://testnet.binancefuture.com' + +# WEBsocketz +# NOTE XXX: see api docs which show diff addr? +# https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information +_spot_ws: str = 'wss://stream.binance.com/ws' +# 'wss://ws-api.binance.com:443/ws-api/v3', + +# NOTE: spot test network only allows certain ep sets: +# https://testnet.binance.vision/ +_testnet_spot_ws: str = 'wss://testnet.binance.vision/ws-api/v3' + +# https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams +_futes_ws: str = f'wss://fstream.{_domain}/ws/' +_auth_futes_ws: str = 'wss://fstream-auth.{_domain}/ws/' # Broker specific ohlc schema (rest) @@ -92,61 +113,6 @@ def get_config() -> dict: # ('ignore', float), # ] -# UI components allow this to be declared such that additional -# (historical) fields can be exposed. -# ohlc_dtype = np.dtype(_ohlc_dtype) - -_show_wap_in_history = False - - -# https://binance-docs.github.io/apidocs/spot/en/#exchange-information - -# TODO: make this frozen again by pre-processing the -# filters list to a dict at init time? -class Pair(Struct, frozen=True): - symbol: str - status: str - - baseAsset: str - baseAssetPrecision: int - cancelReplaceAllowed: bool - allowTrailingStop: bool - quoteAsset: str - quotePrecision: int - quoteAssetPrecision: int - - baseCommissionPrecision: int - quoteCommissionPrecision: int - - orderTypes: list[str] - - icebergAllowed: bool - ocoAllowed: bool - quoteOrderQtyMarketAllowed: bool - isSpotTradingAllowed: bool - isMarginTradingAllowed: bool - - defaultSelfTradePreventionMode: str - allowedSelfTradePreventionModes: list[str] - - filters: dict[ - str, - Union[str, int, float] - ] - permissions: list[str] - - @property - def price_tick(self) -> Decimal: - # XXX: lul, after manually inspecting the response format we - # just directly pick out the info we need - step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0') - return Decimal(step_size) - - @property - def size_tick(self) -> Decimal: - step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0') - return Decimal(step_size) - class OHLC(Struct): ''' @@ -184,24 +150,43 @@ def binance_timestamp( return int((when.timestamp() * 1000) + (when.microsecond / 1000)) +MarketType: Literal[ + 'spot', + 'margin', + 'usd_futes', + 'coin_futes', +] + + class Client: + ''' + Async ReST API client using ``trio`` + ``asks`` B) - def __init__(self) -> None: + Supports all of the spot, margin and futures endpoints depending + on method. + + ''' + def __init__( + self, + mkt_mode: MarketType = 'spot', + ) -> None: self._pairs: dict[str, Pair] = {} # mkt info table - # live EP sesh + # spot EPs sesh self._sesh = asks.Session(connections=4) self._sesh.base_location: str = _url - # futes testnet rest EPs - self._fapi_sesh = asks.Session(connections=4) - self._fapi_sesh.base_location = _fapi_url - - # sync rest API + # margin and extended spot endpoints session. self._sapi_sesh = asks.Session(connections=4) - self._sapi_sesh.base_location = _sapi_url + self._sapi_sesh.base_location: str = _url + # futes EPs sesh + self._fapi_sesh = asks.Session(connections=4) + self._fapi_sesh.base_location: str = _futes_url + + # for creating API keys see, + # https://www.binance.com/en/support/faq/how-to-create-api-keys-on-binance-360002502072 conf: dict = get_config() self.api_key: str = conf.get('api_key', '') self.api_secret: str = conf.get('api_secret', '') @@ -211,8 +196,16 @@ def __init__(self) -> None: if self.api_key: api_key_header = {'X-MBX-APIKEY': self.api_key} self._sesh.headers.update(api_key_header) - self._fapi_sesh.headers.update(api_key_header) self._sapi_sesh.headers.update(api_key_header) + self._fapi_sesh.headers.update(api_key_header) + + self.mkt_mode: MarketType = mkt_mode + self.mkt_req: dict[str, Callable] = { + 'spot': self._api, + 'margin': self._sapi, + 'usd_futes': self._fapi, + # 'futes_coin': self._dapi, # TODO + } def _get_signature(self, data: OrderedDict) -> str: @@ -235,6 +228,9 @@ def _get_signature(self, data: OrderedDict) -> str: ) return msg_auth.hexdigest() + # TODO: factor all these _api methods into a single impl + # which looks up the parent path for eps depending on a + # mkt_mode: MarketType input! async def _api( self, method: str, @@ -243,7 +239,15 @@ async def _api( action: str = 'get' ) -> dict[str, Any]: + ''' + Make a /api/v3/ SPOT account/market endpoint request. + + For eg. rest market-data and spot-account-trade eps use + this endpoing parent path: + - https://binance-docs.github.io/apidocs/spot/en/#market-data-endpoints + - https://binance-docs.github.io/apidocs/spot/en/#spot-account-trade + ''' if signed: params['signature'] = self._get_signature(params) @@ -258,11 +262,19 @@ async def _api( async def _fapi( self, method: str, - params: Union[dict, OrderedDict], + params: dict | OrderedDict, signed: bool = False, action: str = 'get' + ) -> dict[str, Any]: + ''' + Make a /fapi/v3/ USD-M FUTURES account/market endpoint + request. + + For all USD-M futures endpoints use this parent path: + https://binance-docs.github.io/apidocs/futures/en/#market-data-endpoints + ''' if signed: params['signature'] = self._get_signature(params) @@ -277,11 +289,21 @@ async def _fapi( async def _sapi( self, method: str, - params: Union[dict, OrderedDict], + params: dict | OrderedDict, signed: bool = False, action: str = 'get' + ) -> dict[str, Any]: + ''' + Make a /api/v3/ SPOT/MARGIN account/market endpoint request. + + For eg. all margin and advancecd spot account eps use this + endpoing parent path: + - https://binance-docs.github.io/apidocs/spot/en/#margin-account-trade + - https://binance-docs.github.io/apidocs/spot/en/#listen-key-spot + - https://binance-docs.github.io/apidocs/spot/en/#spot-algo-endpoints + ''' if signed: params['signature'] = self._get_signature(params) @@ -297,10 +319,19 @@ async def exch_info( self, sym: str | None = None, + mkt_type: MarketType = 'spot', + ) -> dict[str, Pair] | Pair: ''' - Fresh exchange-pairs info query for symbol ``sym: str``: - https://binance-docs.github.io/apidocs/spot/en/#exchange-information + Fresh exchange-pairs info query for symbol ``sym: str``. + + Depending on `mkt_type` different api eps are used: + - spot: + https://binance-docs.github.io/apidocs/spot/en/#exchange-information + - usd futes: + https://binance-docs.github.io/apidocs/futures/en/#check-server-time + - coin futes: + https://binance-docs.github.io/apidocs/delivery/en/#exchange-information ''' cached_pair = self._pairs.get(sym) @@ -313,25 +344,33 @@ async def exch_info( sym = sym.lower() params = {'symbol': sym} - resp = await self._api('exchangeInfo', params=params) + resp = await self.mkt_req[self.mkt_mode]('exchangeInfo', params=params) entries = resp['symbols'] if not entries: raise SymbolNotFound(f'{sym} not found:\n{resp}') - # pre-process .filters field into a table + # import tractor + # await tractor.breakpoint() pairs = {} for item in entries: symbol = item['symbol'] - filters = {} - filters_ls: list = item.pop('filters') - for entry in filters_ls: - ftype = entry['filterType'] - filters[ftype] = entry - - pairs[symbol] = Pair( - filters=filters, - **item, - ) + + # for spot mkts, pre-process .filters field into + # a table.. + filters_ls: list = item.pop('filters', False) + if filters_ls: + filters = {} + for entry in filters_ls: + ftype = entry['filterType'] + filters[ftype] = entry + + # TODO: lookup pair schema by mkt type + # pair_type = mkt_type + + # pairs[symbol] = SpotPair( + # filters=filters, + # ) + pairs[symbol] = FutesPair(**item) # pairs = { # item['symbol']: Pair(**item) for item in entries @@ -343,8 +382,6 @@ async def exch_info( else: return self._pairs - symbol_info = exch_info - async def search_symbols( self, pattern: str, @@ -448,7 +485,8 @@ async def get_positions( signed=True ) log.info(f'done. len {len(resp)}') - await trio.sleep(3) + + # await trio.sleep(3) return positions, volumes @@ -457,6 +495,8 @@ async def get_deposits( recv_window: int = 60000 ) -> list: + # TODO: can't we drop this since normal dicts are + # ordered implicitly in mordern python? params = OrderedDict([ ('recvWindow', recv_window), ('timestamp', binance_timestamp(now())) @@ -594,7 +634,7 @@ async def periodic_keep_alive( @acm async def get_client() -> Client: - client = Client() + client = Client(mkt_mode='usd_futes') log.info('Caching exchange infos..') await client.exch_info() yield client diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 53dd7a64d..d2edbd9af 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -184,5 +184,3 @@ async def trades_dialogue( breakpoint() await ems_stream.send(msg.dict()) - - diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index 9ecda184c..83e7bee03 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -24,11 +24,13 @@ aclosing, ) from datetime import datetime +from functools import partial import itertools from typing import ( Any, AsyncGenerator, Callable, + Generator, ) import time @@ -59,11 +61,12 @@ from piker.brokers._util import ( DataUnavailable, get_logger, - get_console_log, ) from .api import ( Client, +) +from .schemas import ( Pair, ) @@ -94,7 +97,7 @@ class AggTrade(Struct, frozen=True): l: int # noqa Last trade ID T: int # Trade time m: bool # Is the buyer the market maker? - M: bool # Ignore + M: bool | None = None # Ignore async def stream_messages( @@ -134,7 +137,9 @@ async def stream_messages( ask=ask, asize=asize, ) - l1.typecast() + # for speed probably better to only specifically + # cast fields we need in numerical form? + # l1.typecast() # repack into piker's tick-quote format yield 'l1', { @@ -142,23 +147,23 @@ async def stream_messages( 'ticks': [ { 'type': 'bid', - 'price': l1.bid, - 'size': l1.bsize, + 'price': float(l1.bid), + 'size': float(l1.bsize), }, { 'type': 'bsize', - 'price': l1.bid, - 'size': l1.bsize, + 'price': float(l1.bid), + 'size': float(l1.bsize), }, { 'type': 'ask', - 'price': l1.ask, - 'size': l1.asize, + 'price': float(l1.ask), + 'size': float(l1.asize), }, { 'type': 'asize', - 'price': l1.ask, - 'size': l1.asize, + 'price': float(l1.ask), + 'size': float(l1.asize), } ] } @@ -281,6 +286,56 @@ async def get_mkt_info( return both +@acm +async def subscribe( + ws: NoBsWs, + symbols: list[str], + + # defined once at import time to keep a global state B) + iter_subids: Generator[int, None, None] = itertools.count(), + +): + # setup subs + + subid: int = next(iter_subids) + + # trade data (aka L1) + # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker + l1_sub = make_sub(symbols, 'bookTicker', subid) + await ws.send_msg(l1_sub) + + # aggregate (each order clear by taker **not** by maker) + # trades data: + # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams + agg_trades_sub = make_sub(symbols, 'aggTrade', subid) + await ws.send_msg(agg_trades_sub) + + # might get ack from ws server, or maybe some + # other msg still in transit.. + res = await ws.recv_msg() + subid: str | None = res.get('id') + if subid: + assert res['id'] == subid + + yield + + subs = [] + for sym in symbols: + subs.append("{sym}@aggTrade") + subs.append("{sym}@bookTicker") + + # unsub from all pairs on teardown + if ws.connected(): + await ws.send_msg({ + "method": "UNSUBSCRIBE", + "params": subs, + "id": subid, + }) + + # XXX: do we need to ack the unsub? + # await ws.recv_msg() + + async def stream_quotes( send_chan: trio.abc.SendChannel, @@ -292,8 +347,6 @@ async def stream_quotes( task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, ) -> None: - # XXX: required to propagate ``tractor`` loglevel to piker logging - get_console_log(loglevel or tractor.current_actor().loglevel) async with ( send_chan as send_chan, @@ -307,57 +360,21 @@ async def stream_quotes( FeedInit(mkt_info=mkt) ) - iter_subids = itertools.count() - - @acm - async def subscribe(ws: NoBsWs): - # setup subs - - subid: int = next(iter_subids) - - # trade data (aka L1) - # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker - l1_sub = make_sub(symbols, 'bookTicker', subid) - await ws.send_msg(l1_sub) - # aggregate (each order clear by taker **not** by maker) - # trades data: - # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams - agg_trades_sub = make_sub(symbols, 'aggTrade', subid) - await ws.send_msg(agg_trades_sub) - - # might get ack from ws server, or maybe some - # other msg still in transit.. - res = await ws.recv_msg() - subid: str | None = res.get('id') - if subid: - assert res['id'] == subid - - yield - - subs = [] - for sym in symbols: - subs.append("{sym}@aggTrade") - subs.append("{sym}@bookTicker") - - # unsub from all pairs on teardown - if ws.connected(): - await ws.send_msg({ - "method": "UNSUBSCRIBE", - "params": subs, - "id": subid, - }) - - # XXX: do we need to ack the unsub? - # await ws.recv_msg() + # TODO: detect whether futes or spot contact was requested + from .api import ( + _futes_ws, + # _spot_ws, + ) + wsep: str = _futes_ws async with ( open_autorecon_ws( - # XXX: see api docs which show diff addr? - # https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information - # 'wss://ws-api.binance.com:443/ws-api/v3', - 'wss://stream.binance.com/ws', - fixture=subscribe, + wsep, + fixture=partial( + subscribe, + symbols=symbols, + ), ) as ws, # avoid stream-gen closure from breaking trio.. @@ -387,6 +404,8 @@ async def subscribe(ws: NoBsWs): topic = msg['symbol'].lower() await send_chan.send({topic: msg}) # last = time.time() + + @tractor.context async def open_symbol_search( ctx: tractor.Context, diff --git a/piker/brokers/binance/schemas.py b/piker/brokers/binance/schemas.py new file mode 100644 index 000000000..df072ba1f --- /dev/null +++ b/piker/brokers/binance/schemas.py @@ -0,0 +1,114 @@ +# piker: trading gear for hackers +# Copyright (C) +# Guillermo Rodriguez (aka ze jefe) +# Tyler Goodlet +# (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +""" +Per market data-type definitions and schemas types. + +""" +from decimal import Decimal + +from piker.data.types import Struct + +class Pair(Struct, frozen=True): + symbol: str + status: str + orderTypes: list[str] + + # src + quoteAsset: str + quotePrecision: int + + # dst + baseAsset: str + baseAssetPrecision: int + + +class SpotPair(Struct, frozen=True): + + cancelReplaceAllowed: bool + allowTrailingStop: bool + quoteAssetPrecision: int + + baseCommissionPrecision: int + quoteCommissionPrecision: int + + icebergAllowed: bool + ocoAllowed: bool + quoteOrderQtyMarketAllowed: bool + isSpotTradingAllowed: bool + isMarginTradingAllowed: bool + + defaultSelfTradePreventionMode: str + allowedSelfTradePreventionModes: list[str] + + filters: dict[ + str, + str | int | float, + ] + permissions: list[str] + + @property + def price_tick(self) -> Decimal: + # XXX: lul, after manually inspecting the response format we + # just directly pick out the info we need + step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0') + return Decimal(step_size) + + @property + def size_tick(self) -> Decimal: + step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0') + return Decimal(step_size) + + +class FutesPair(Pair): + symbol: str # 'BTCUSDT', + pair: str # 'BTCUSDT', + baseAssetPrecision: int # 8, + contractType: str # 'PERPETUAL', + deliveryDate: int # 4133404800000, + liquidationFee: float # '0.012500', + maintMarginPercent: float # '2.5000', + marginAsset: str # 'USDT', + marketTakeBound: float # '0.05', + maxMoveOrderLimit: int # 10000, + onboardDate: int # 1569398400000, + pricePrecision: int # 2, + quantityPrecision: int # 3, + quoteAsset: str # 'USDT', + quotePrecision: int # 8, + requiredMarginPercent: float # '5.0000', + settlePlan: int # 0, + timeInForce: list[str] # ['GTC', 'IOC', 'FOK', 'GTX'], + triggerProtect: float # '0.0500', + underlyingSubType: list[str] # ['PoW'], + underlyingType: str # 'COIN' + + # NOTE: for compat with spot pairs and `MktPair.src: Asset` + # processing.. + @property + def quoteAssetPrecision(self) -> int: + return self.quotePrecision + + @property + def price_tick(self) -> Decimal: + return Decimal(self.pricePrecision) + + @property + def size_tick(self) -> Decimal: + return Decimal(self.quantityPrecision) From aa49c38d55eacdb30e981ef71e367076f5c2228d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 12 Jun 2023 14:03:37 -0400 Subject: [PATCH 18/73] Add `binance` section to `brokers.toml` --- config/brokers.toml | 33 ++++++++++++++++++++++----------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/config/brokers.toml b/config/brokers.toml index c9384461e..96c67b45d 100644 --- a/config/brokers.toml +++ b/config/brokers.toml @@ -1,15 +1,14 @@ -[questrade] -refresh_token = '' -access_token = '' -api_server = 'https://api06.iq.questrade.com/' -expires_in = 1800 -token_type = 'Bearer' -expires_at = 1616095326.355846 - - +################ +# ---- CEXY ---- +################ [binance] -api_key = '' -api_secret = '' +futes.use_testnet = true +futes.api_key = '' +futes.api_secret = '' + +spot.use_testnet = true +spot.api_key = '' +spot.api_secret = '' [deribit] @@ -29,6 +28,18 @@ key_secret = '' key_passphrase = '' +################ +# -- BROKERZ --- +################ +[questrade] +refresh_token = '' +access_token = '' +api_server = 'https://api06.iq.questrade.com/' +expires_in = 1800 +token_type = 'Bearer' +expires_at = 1616095326.355846 + + [ib] hosts = [ '127.0.0.1', From 8220bd152e771e0df6be74efc26a0035a53a5543 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 12 Jun 2023 14:04:09 -0400 Subject: [PATCH 19/73] Extend `MktPair` doc string to refer to binance pairs --- piker/accounting/_mktinfo.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 2d2ebccdc..be1cc5356 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -207,6 +207,33 @@ class MktPair(Struct, frozen=True): /.... -> .. ^ -- optional tokens ------------------------------- ^ + + Notes: + ------ + + Some venues provide a different semantic (which we frankly find + confusing and non-general) such as "base" and "quote" asset. + For example this is how `binance` defines the terms: + + https://binance-docs.github.io/apidocs/websocket_api/en/#public-api-definitions + https://binance-docs.github.io/apidocs/futures/en/#public-endpoints-info + + - *base* asset refers to the asset that is the *quantity* of a symbol. + - *quote* asset refers to the asset that is the *price* of a symbol. + + In other words the "quote" asset is the asset that the market + is pricing "buys" *in*, and the *base* asset it the one that the market + allows you to "buy" an *amount of*. Put more simply the *quote* + asset is our "source" asset and the *base* asset is our "destination" + asset. + + This defintion can be further understood reading our + `.brokers.binance.api.Pair` type wherein the + `Pair.[quote/base]AssetPrecision` field determines the (transfer) + transaction precision available per asset; i.e. the satoshis + unit in bitcoin for representing the minimum size of a + transaction that can take place on the blockchain. + ''' dst: str | Asset # "destination asset" (name) used to buy *to* From d173d373cba3f57c333f1e306c73c3be3c3ca97e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 12 Jun 2023 20:25:09 -0400 Subject: [PATCH 20/73] kraken: raise `SymbolNotFound` on symbology query errors --- piker/brokers/kraken/api.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index de2be68c1..e9a3f607e 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -638,7 +638,10 @@ def normalize_symbol( the 'AssetPairs' endpoint, see methods above. ''' - return cls._ntable[ticker].lower() + try: + return cls._ntable[ticker].lower() + except KeyError as ke: + raise SymbolNotFound(f'kraken has no {ke.args[0]}') @acm From dc23f1c9bdd731ac979e84e8c37dfd1ca0b38117 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 12 Jun 2023 21:55:45 -0400 Subject: [PATCH 21/73] binance: fix `FutesPair` to have `.filters` Not sure why it seemed like futures pairs didn't have this field but add it to the parent `Pair` type as well as drop the overridden `.price/size_tick` fields instead doing the same as in spot as well. Also moves the `MarketType: Literal` (for the `Client.mkt_mode: str`) and adds a pair type lookup table for exchange info loading. --- piker/brokers/binance/schemas.py | 58 +++++++++++++++++++------------- 1 file changed, 35 insertions(+), 23 deletions(-) diff --git a/piker/brokers/binance/schemas.py b/piker/brokers/binance/schemas.py index df072ba1f..c909c39cc 100644 --- a/piker/brokers/binance/schemas.py +++ b/piker/brokers/binance/schemas.py @@ -21,10 +21,15 @@ Per market data-type definitions and schemas types. """ +from __future__ import annotations +from typing import ( + Literal, +) from decimal import Decimal from piker.data.types import Struct + class Pair(Struct, frozen=True): symbol: str status: str @@ -38,8 +43,25 @@ class Pair(Struct, frozen=True): baseAsset: str baseAssetPrecision: int + filters: dict[ + str, + str | int | float, + ] + + @property + def price_tick(self) -> Decimal: + # XXX: lul, after manually inspecting the response format we + # just directly pick out the info we need + step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0') + return Decimal(step_size) + + @property + def size_tick(self) -> Decimal: + step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0') + return Decimal(step_size) + -class SpotPair(Struct, frozen=True): +class SpotPair(Pair, frozen=True): cancelReplaceAllowed: bool allowTrailingStop: bool @@ -56,24 +78,8 @@ class SpotPair(Struct, frozen=True): defaultSelfTradePreventionMode: str allowedSelfTradePreventionModes: list[str] - - filters: dict[ - str, - str | int | float, - ] permissions: list[str] - @property - def price_tick(self) -> Decimal: - # XXX: lul, after manually inspecting the response format we - # just directly pick out the info we need - step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0') - return Decimal(step_size) - - @property - def size_tick(self) -> Decimal: - step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0') - return Decimal(step_size) class FutesPair(Pair): @@ -105,10 +111,16 @@ class FutesPair(Pair): def quoteAssetPrecision(self) -> int: return self.quotePrecision - @property - def price_tick(self) -> Decimal: - return Decimal(self.pricePrecision) - @property - def size_tick(self) -> Decimal: - return Decimal(self.quantityPrecision) +MarketType = Literal[ + 'spot', + 'margin', + 'usd_futes', + 'coin_futes', +] + + +PAIRTYPES: dict[MarketType, Pair] = { + 'spot': SpotPair, + 'usd_futes': FutesPair, +} From e68c55e9bd56bfcdfec17665e1db37a1c1049577 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 12 Jun 2023 21:58:46 -0400 Subject: [PATCH 22/73] Switch `Client.mkt_mode` to 'usd_futes' if 'perp' in fqme The beginning of supporting multi-markets through a common API client. Change to futes market mode in the client if `.perp.` is matched in the fqme. Currently the exchange info and live feed ws impl will swap out for their usd-margin futures market equivalent (endpoints). --- piker/brokers/binance/api.py | 51 +++++++++++++++------------------ piker/brokers/binance/broker.py | 1 + piker/brokers/binance/feed.py | 23 +++++++++++++-- 3 files changed, 45 insertions(+), 30 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index fd0a0c829..8caaa7608 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -30,7 +30,6 @@ from typing import ( Any, Callable, - Literal, ) import hmac import hashlib @@ -53,8 +52,9 @@ get_logger, ) from .schemas import ( - SpotPair, - FutesPair, + PAIRTYPES, + Pair, + MarketType, ) log = get_logger('piker.brokers.binance') @@ -150,14 +150,6 @@ def binance_timestamp( return int((when.timestamp() * 1000) + (when.microsecond / 1000)) -MarketType: Literal[ - 'spot', - 'margin', - 'usd_futes', - 'coin_futes', -] - - class Client: ''' Async ReST API client using ``trio`` + ``asks`` B) @@ -319,7 +311,7 @@ async def exch_info( self, sym: str | None = None, - mkt_type: MarketType = 'spot', + mkt_type: MarketType | None = None, ) -> dict[str, Pair] | Pair: ''' @@ -334,7 +326,10 @@ async def exch_info( https://binance-docs.github.io/apidocs/delivery/en/#exchange-information ''' - cached_pair = self._pairs.get(sym) + mkt_type: MarketType = mkt_type or self.mkt_mode + cached_pair = self._pairs.get( + (sym, mkt_type) + ) if cached_pair: return cached_pair @@ -344,16 +339,15 @@ async def exch_info( sym = sym.lower() params = {'symbol': sym} - resp = await self.mkt_req[self.mkt_mode]('exchangeInfo', params=params) + resp = await self.mkt_req[mkt_type]('exchangeInfo', params=params) entries = resp['symbols'] if not entries: raise SymbolNotFound(f'{sym} not found:\n{resp}') # import tractor # await tractor.breakpoint() - pairs = {} + pairs: dict[str, Pair] = {} for item in entries: - symbol = item['symbol'] # for spot mkts, pre-process .filters field into # a table.. @@ -364,17 +358,14 @@ async def exch_info( ftype = entry['filterType'] filters[ftype] = entry - # TODO: lookup pair schema by mkt type - # pair_type = mkt_type + item['filters'] = filters - # pairs[symbol] = SpotPair( - # filters=filters, - # ) - pairs[symbol] = FutesPair(**item) + symbol = item['symbol'] + pair_type: Pair = PAIRTYPES[mkt_type or self.mkt_mode] + pairs[(symbol, mkt_type)] = pair_type( + **item, + ) - # pairs = { - # item['symbol']: Pair(**item) for item in entries - # } self._pairs.update(pairs) if sym is not None: @@ -633,8 +624,12 @@ async def periodic_keep_alive( @acm -async def get_client() -> Client: - client = Client(mkt_mode='usd_futes') - log.info('Caching exchange infos..') +async def get_client( + mkt_mode: str = 'spot', +) -> Client: + client = Client(mkt_mode=mkt_mode) + + log.info(f'{client} in {mkt_mode} mode: caching exchange infos..') await client.exch_info() + yield client diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index d2edbd9af..5c17b1947 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -104,6 +104,7 @@ async def trades_dialogue( ) -> AsyncIterator[dict[str, Any]]: async with open_cached_client('binance') as client: + await tractor.breakpoint() if not client.api_key: await ctx.started('paper') return diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index 83e7bee03..561b5fbca 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -50,6 +50,7 @@ from piker.accounting._mktinfo import ( Asset, MktPair, + unpack_fqme, digits_to_dec, ) from piker.data.types import Struct @@ -263,9 +264,27 @@ async def get_mkt_info( ) -> tuple[MktPair, Pair]: - async with open_cached_client('binance') as client: + # uppercase since kraken bs_mktid is always upper + if 'binance' not in fqme: + fqme += '.binance' + + bs_fqme, _, broker = fqme.rpartition('.') + broker, mkt_ep, venue, suffix = unpack_fqme(fqme) + # bs_fqme, _, broker = fqme.partition('.') + + mkt_mode: str = 'spot' + if 'perp' in bs_fqme: + mkt_mode = 'usd_futes' + + async with open_cached_client( + 'binance', + mkt_mode=mkt_mode, + ) as client: + + pair_str: str = mkt_ep.upper() + pair: Pair = await client.exch_info(pair_str) - pair: Pair = await client.exch_info(fqme.upper()) + await tractor.breakpoint() mkt = MktPair( dst=Asset( name=pair.baseAsset, From 4c4787ce583ca674b75efa01b962bd785675159d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 13 Jun 2023 15:20:27 -0400 Subject: [PATCH 23/73] Add a "perpetual_future" mkt info type --- piker/accounting/__init__.py | 2 ++ piker/accounting/_mktinfo.py | 9 ++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index 778bdd4ee..8f55217c5 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -40,6 +40,7 @@ MktPair, Symbol, unpack_fqme, + _derivs as DerivTypes, ) from ._allocate import ( mk_allocator, @@ -65,6 +66,7 @@ 'open_pps', 'open_trade_ledger', 'unpack_fqme', + 'DerivTypes', ] diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index be1cc5356..1ac853691 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -39,6 +39,7 @@ from ..data.types import Struct +# TODO: make these literals.. _underlyings: list[str] = [ 'stock', 'bond', @@ -52,6 +53,7 @@ 'swap', 'future', 'continuous_future', + 'perpetual_future', 'option', 'futures_option', @@ -540,10 +542,15 @@ def quantize( # TODO: BACKWARD COMPAT, TO REMOVE? @property def type_key(self) -> str: + + # if set explicitly then use it! + if self._atype: + return self._atype + if isinstance(self.dst, Asset): return str(self.dst.atype) - return self._atype + return 'unknown' @property def price_tick_digits(self) -> int: From 8e03212e40c22b5d78088e941e720be95248388a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 14 Jun 2023 13:16:13 -0400 Subject: [PATCH 24/73] Always expand FQMEs with .venue and .expiry values Since there are indeed multiple futures (perp swaps) contracts including a set with expiry, we need a way to distinguish through search and `FutesPair` lookup which contract we're requesting. To solve this extend the `FutesPair` and `SpotPair` to include a `.bs_fqme` field similar to `MktPair` and key the `Client._pairs: ChainMap`'s backing tables with these expanded fqmes. For example the perp swap now expands to `btcusdt.usdtm.perp` which fills in the venue as `'usdtm'` (the usd-margined fututes market) and the expiry as `'perp'` (as before). This allows distinguishing explicitly from, for ex., coin-margined contracts which could instead (since we haven't added the support yet) fqmes of the sort `btcusdt.m.perp.binance` thus making it explicit and obvious which contract is which B) Further we interpolate the venue token to `spot` for spot markets going forward, which again makes cex spot markets explicit in symbology; we'll need to add this as well to other cex backends ;) Other misc detalles: - change USD-M futes `MarketType` key to `'usdtm_futes'`. - add `Pair.bs_fqme: str` for all pair subtypes with particular special contract handling for futes including quarterlies, perps and the weird "DEFI" ones.. - drop `OHLC.bar_wap` since it's no longer in the default time-series schema and we weren't filling it in here anyway.. - `Client._pairs: ChainMap` is now a read-only fqme-re-keyed view into the underlying pairs tables (which themselves are ideally keyed identically cross-venue) which we populate inside `Client.exch_info()` which itself now does concurrent pairs info fetching via a new `._cache_pairs()` using a `trio` task per API-venue. - support klines history query across all venues using same `Client.mkt_mode_req[Client.mkt_mode]` style as we're doing for `.exch_info()` B) - use the venue specific klines history query limits where documented. - handle new FQME venue / expiry fields inside `get_mkt_info()` ep such that again the correct `Client.mkt_mode` is selected based on parsing the desired spot vs. derivative contract. - do venue-specific-WSS-addr lookup based on output from `get_mkt_info()`; use usdtm venue WSS addr if a `FutesPair` is loaded. - set `topic: str` to the `.bs_fqme` value in live feed quotes! - use `Pair.bs_fqme: str` values for fuzzy-search input set. --- piker/brokers/_util.py | 2 +- piker/brokers/binance/api.py | 192 +++++++++++++++++++------------ piker/brokers/binance/broker.py | 3 +- piker/brokers/binance/feed.py | 94 +++++++++++---- piker/brokers/binance/schemas.py | 44 ++++++- 5 files changed, 231 insertions(+), 104 deletions(-) diff --git a/piker/brokers/_util.py b/piker/brokers/_util.py index 7e7a3ec7d..baf2c7b2e 100644 --- a/piker/brokers/_util.py +++ b/piker/brokers/_util.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0) +# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 8caaa7608..52c510a6c 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -22,7 +22,10 @@ """ from __future__ import annotations -from collections import OrderedDict +from collections import ( + OrderedDict, + ChainMap, +) from contextlib import ( asynccontextmanager as acm, ) @@ -30,6 +33,7 @@ from typing import ( Any, Callable, + Type, ) import hmac import hashlib @@ -138,10 +142,6 @@ class OHLC(Struct): buy_quote_vol: float ignore: int - # null the place holder for `bar_wap` until we - # figure out what to extract for this. - bar_wap: float = 0.0 - # convert datetime obj timestamp to unixtime in milliseconds def binance_timestamp( @@ -160,10 +160,24 @@ class Client: ''' def __init__( self, + + # TODO: change this to `Client.[mkt_]venue: MarketType`? mkt_mode: MarketType = 'spot', + ) -> None: - self._pairs: dict[str, Pair] = {} # mkt info table + # build out pair info tables for each market type + # and wrap in a chain-map view for search / query. + self._spot_pairs: dict[str, Pair] = {} # spot info table + self._ufutes_pairs: dict[str, Pair] = {} # usd-futures table + self._mkt2pairs: dict[str, dict] = { + 'spot': self._spot_pairs, + 'usdtm_futes': self._ufutes_pairs, + } + # NOTE: only stick in the spot table for now until exchange info + # is loaded, since at that point we'll suffix all the futes + # market symbols for use by search. See `.exch_info()`. + self._pairs: ChainMap[str, Pair] = ChainMap() # spot EPs sesh self._sesh = asks.Session(connections=4) @@ -192,10 +206,10 @@ def __init__( self._fapi_sesh.headers.update(api_key_header) self.mkt_mode: MarketType = mkt_mode - self.mkt_req: dict[str, Callable] = { + self.mkt_mode_req: dict[str, Callable] = { 'spot': self._api, 'margin': self._sapi, - 'usd_futes': self._fapi, + 'usdtm_futes': self._fapi, # 'futes_coin': self._dapi, # TODO } @@ -307,6 +321,37 @@ async def _sapi( return resproc(resp, log) + async def _cache_pairs( + self, + mkt_type: str, + + ) -> None: + # lookup internal mkt-specific pair table to update + pair_table: dict[str, Pair] = self._mkt2pairs[mkt_type] + + # make API request(s) + resp = await self.mkt_mode_req[mkt_type]( + 'exchangeInfo', + params={}, # NOTE: retrieve all symbols by default + ) + entries = resp['symbols'] + if not entries: + raise SymbolNotFound(f'No market pairs found!?:\n{resp}') + + for item in entries: + filters_ls: list = item.pop('filters', False) + if filters_ls: + filters = {} + for entry in filters_ls: + ftype = entry['filterType'] + filters[ftype] = entry + + item['filters'] = filters + + pair_type: Type = PAIRTYPES[mkt_type] + pair: Pair = pair_type(**item) + pair_table[pair.symbol.upper()] = pair + async def exch_info( self, sym: str | None = None, @@ -326,66 +371,51 @@ async def exch_info( https://binance-docs.github.io/apidocs/delivery/en/#exchange-information ''' - mkt_type: MarketType = mkt_type or self.mkt_mode - cached_pair = self._pairs.get( - (sym, mkt_type) - ) - if cached_pair: + pair_table: dict[str, Pair] = self._mkt2pairs[ + mkt_type or self.mkt_mode + ] + if cached_pair := pair_table.get(sym): return cached_pair - # retrieve all symbols by default - params = {} - if sym is not None: - sym = sym.lower() - params = {'symbol': sym} - - resp = await self.mkt_req[mkt_type]('exchangeInfo', params=params) - entries = resp['symbols'] - if not entries: - raise SymbolNotFound(f'{sym} not found:\n{resp}') - - # import tractor - # await tractor.breakpoint() - pairs: dict[str, Pair] = {} - for item in entries: - - # for spot mkts, pre-process .filters field into - # a table.. - filters_ls: list = item.pop('filters', False) - if filters_ls: - filters = {} - for entry in filters_ls: - ftype = entry['filterType'] - filters[ftype] = entry - - item['filters'] = filters - - symbol = item['symbol'] - pair_type: Pair = PAIRTYPES[mkt_type or self.mkt_mode] - pairs[(symbol, mkt_type)] = pair_type( - **item, + # params = {} + # if sym is not None: + # params = {'symbol': sym} + + mkts: list[str] = ['spot', 'usdtm_futes'] + if mkt_type: + mkts: list[str] = [mkt_type] + + async with trio.open_nursery() as rn: + for mkt_type in mkts: + rn.start_soon( + self._cache_pairs, + mkt_type, + ) + + # make merged view of all market-type pairs but + # use market specific `Pair.bs_fqme` for keys! + for venue, venue_pairs_table in self._mkt2pairs.items(): + self._pairs.maps.append( + {pair.bs_fqme: pair + for pair in venue_pairs_table.values()} ) - self._pairs.update(pairs) - - if sym is not None: - return pairs[sym] - else: - return self._pairs + return pair_table[sym] if sym else self._pairs async def search_symbols( self, pattern: str, limit: int = None, ) -> dict[str, Any]: - if self._pairs is not None: - data = self._pairs - else: - data = await self.exch_info() + + # if self._spot_pairs is not None: + # data = self._spot_pairs + # else: + fq_pairs: dict = await self.exch_info() matches = fuzzy.extractBests( pattern, - data, + fq_pairs, score_cutoff=50, ) # repack in dict form @@ -395,12 +425,24 @@ async def search_symbols( async def bars( self, symbol: str, + start_dt: datetime | None = None, end_dt: datetime | None = None, - limit: int = 1000, # <- max allowed per query + as_np: bool = True, - ) -> dict: + ) -> list[tuple] | np.ndarray: + + # NOTE: diff market-venues have diff datums limits: + # - spot max is 1k + # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data + # - usdm futes max is 1500 + # https://binance-docs.github.io/apidocs/futures/en/#kline-candlestick-data + limits: dict[str, int] = { + 'spot': 1000, + 'usdtm_futes': 1500, + } + limit = limits[self.mkt_mode] if end_dt is None: end_dt = now('UTC').add(minutes=1) @@ -413,7 +455,8 @@ async def bars( end_time = binance_timestamp(end_dt) # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data - bars = await self._api( + bars = await self.mkt_mode_req[self.mkt_mode]( + # bars = await self._api( 'klines', params={ 'symbol': symbol.upper(), @@ -423,13 +466,7 @@ async def bars( 'limit': limit } ) - - # TODO: pack this bars scheme into a ``pydantic`` validator type: - # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data - - # TODO: we should port this to ``pydantic`` to avoid doing - # manual validation ourselves.. - new_bars = [] + new_bars: list[tuple] = [] for i, bar in enumerate(bars): bar = OHLC(*bar) @@ -449,11 +486,13 @@ async def bars( new_bars.append((i,) + tuple(row)) - array = np.array( + if not as_np: + return bars + + return np.array( new_bars, dtype=def_iohlcv_fields, - ) if as_np else bars - return array + ) async def get_positions( self, @@ -476,7 +515,6 @@ async def get_positions( signed=True ) log.info(f'done. len {len(resp)}') - # await trio.sleep(3) return positions, volumes @@ -530,7 +568,7 @@ async def submit_limit( await self.cache_symbols() - # asset_precision = self._pairs[symbol]['baseAssetPrecision'] + # asset_precision = self._spot_pairs[symbol]['baseAssetPrecision'] # quote_precision = self._pairs[symbol]['quoteAssetPrecision'] params = OrderedDict([ @@ -624,12 +662,16 @@ async def periodic_keep_alive( @acm -async def get_client( - mkt_mode: str = 'spot', -) -> Client: - client = Client(mkt_mode=mkt_mode) +async def get_client() -> Client: - log.info(f'{client} in {mkt_mode} mode: caching exchange infos..') + client = Client() await client.exch_info() + log.info( + f'{client} in {client.mkt_mode} mode: caching exchange infos..\n' + 'Cached multi-market pairs:\n' + f'spot: {len(client._spot_pairs)}\n' + f'usdtm_futes: {len(client._ufutes_pairs)}\n' + f'Total: {len(client._pairs)}\n' + ) yield client diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 5c17b1947..c5c845499 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -38,7 +38,7 @@ open_autorecon_ws, NoBsWs, ) -from piker._cacheables import ( +from piker.brokers import ( open_cached_client, ) from piker.clearing._messages import ( @@ -104,7 +104,6 @@ async def trades_dialogue( ) -> AsyncIterator[dict[str, Any]]: async with open_cached_client('binance') as client: - await tractor.breakpoint() if not client.api_key: await ctx.started('paper') return diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index 561b5fbca..fafff4119 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -43,12 +43,15 @@ import numpy as np import tractor +from piker.brokers import ( + open_cached_client, +) from piker._cacheables import ( async_lifo_cache, - open_cached_client, ) -from piker.accounting._mktinfo import ( +from piker.accounting import ( Asset, + DerivTypes, MktPair, unpack_fqme, digits_to_dec, @@ -69,6 +72,7 @@ ) from .schemas import ( Pair, + FutesPair, ) log = get_logger('piker.brokers.binance') @@ -219,8 +223,6 @@ async def open_history_client( ) -> tuple[Callable, int]: - symbol: str = mkt.bs_fqme - # TODO implement history getter for the new storage layer. async with open_cached_client('binance') as client: @@ -237,8 +239,20 @@ async def get_ohlc( if timeframe != 60: raise DataUnavailable('Only 1m bars are supported') + # TODO: better wrapping for venue / mode? + # - eventually logic for usd vs. coin settled futes + # based on `MktPair.src` type/value? + # - maybe something like `async with + # Client.use_venue('usdtm_futes')` + if mkt.type_key in DerivTypes: + client.mkt_mode = 'usdtm_futes' + else: + client.mkt_mode = 'spot' + + # NOTE: always query using their native symbology! + mktid: str = mkt.bs_mktid array = await client.bars( - symbol, + mktid, start_dt=start_dt, end_dt=end_dt, ) @@ -269,22 +283,42 @@ async def get_mkt_info( fqme += '.binance' bs_fqme, _, broker = fqme.rpartition('.') - broker, mkt_ep, venue, suffix = unpack_fqme(fqme) - # bs_fqme, _, broker = fqme.partition('.') + broker, mkt_ep, venue, expiry = unpack_fqme(fqme) + + # NOTE: see the `FutesPair.bs_fqme: str` implementation + # to understand the reverse market info lookup below. + venue: str = venue or 'spot' + mkt_mode: str = venue or 'spot' + _atype: str = '' + if ( + venue + and 'spot' not in venue.lower() + + # XXX: catch all in case user doesn't know which + # venue they want (usdtm vs. coinm) and we can choose + # a default (via config?) once we support coin-m APIs. + or 'perp' in bs_fqme.lower() + ): + mkt_mode: str = f'{venue.lower()}_futes' + if 'perp' in expiry: + _atype = 'perpetual_future' - mkt_mode: str = 'spot' - if 'perp' in bs_fqme: - mkt_mode = 'usd_futes' + else: + _atype = 'future' async with open_cached_client( 'binance', - mkt_mode=mkt_mode, ) as client: + # switch mode depending on input pattern parsing + client.mkt_mode = mkt_mode + pair_str: str = mkt_ep.upper() pair: Pair = await client.exch_info(pair_str) - await tractor.breakpoint() + if 'futes' in mkt_mode: + assert isinstance(pair, FutesPair) + mkt = MktPair( dst=Asset( name=pair.baseAsset, @@ -299,7 +333,10 @@ async def get_mkt_info( price_tick=pair.price_tick, size_tick=pair.size_tick, bs_mktid=pair.symbol, + expiry=expiry, + venue=venue, broker='binance', + _atype=_atype, ) both = mkt, pair return both @@ -379,26 +416,33 @@ async def stream_quotes( FeedInit(mkt_info=mkt) ) - # TODO: detect whether futes or spot contact was requested from .api import ( _futes_ws, - # _spot_ws, + _spot_ws, ) - wsep: str = _futes_ws + + async with open_cached_client( + 'binance', + ) as client: + wsep: str = { + 'usdtm_futes': _futes_ws, + 'spot': _spot_ws, + }[client.mkt_mode] async with ( open_autorecon_ws( wsep, fixture=partial( subscribe, - symbols=symbols, + symbols=[mkt.bs_mktid], ), ) as ws, # avoid stream-gen closure from breaking trio.. aclosing(stream_messages(ws)) as msg_gen, ): + # log.info('WAITING ON FIRST LIVE QUOTE..') typ, quote = await anext(msg_gen) # pull a first quote and deliver @@ -413,15 +457,20 @@ async def stream_quotes( # import time # last = time.time() + # XXX NOTE: can't include the `.binance` suffix + # or the sampling loop will not broadcast correctly + # since `bus._subscribers.setdefault(bs_fqme, set())` + # is used inside `.data.open_feed_bus()` !!! + topic: str = mkt.bs_fqme + # start streaming - async for typ, msg in msg_gen: + async for typ, quote in msg_gen: # period = time.time() - last # hz = 1/period if period else float('inf') # if hz > 60: # log.info(f'Binance quotez : {hz}') - topic = msg['symbol'].lower() - await send_chan.send({topic: msg}) + await send_chan.send({topic: quote}) # last = time.time() @@ -429,10 +478,11 @@ async def stream_quotes( async def open_symbol_search( ctx: tractor.Context, ) -> Client: + async with open_cached_client('binance') as client: # load all symbols locally for fast search - cache = await client.exch_info() + fqpairs_cache = await client.exch_info() await ctx.started() async with ctx.open_stream() as stream: @@ -442,11 +492,11 @@ async def open_symbol_search( matches = fuzzy.extractBests( pattern, - cache, + fqpairs_cache, score_cutoff=50, ) # repack in dict form await stream.send({ - item[0].symbol: item[0] + item[0].bs_fqme: item[0] for item in matches }) diff --git a/piker/brokers/binance/schemas.py b/piker/brokers/binance/schemas.py index c909c39cc..fbd6f944d 100644 --- a/piker/brokers/binance/schemas.py +++ b/piker/brokers/binance/schemas.py @@ -60,6 +60,10 @@ def size_tick(self) -> Decimal: step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0') return Decimal(step_size) + @property + def bs_fqme(self) -> str: + return self.symbol + class SpotPair(Pair, frozen=True): @@ -80,6 +84,10 @@ class SpotPair(Pair, frozen=True): allowedSelfTradePreventionModes: list[str] permissions: list[str] + @property + def bs_fqme(self) -> str: + return f'{self.symbol}.SPOT' + class FutesPair(Pair): @@ -111,16 +119,44 @@ class FutesPair(Pair): def quoteAssetPrecision(self) -> int: return self.quotePrecision + @property + def bs_fqme(self) -> str: + symbol: str = self.symbol + ctype: str = self.contractType + margin: str = self.marginAsset + + match ctype: + case 'PERPETUAL': + return f'{symbol}.{margin}M.PERP' + + case 'CURRENT_QUARTER': + pair, _, expiry = symbol.partition('_') + return f'{pair}.{margin}M.{expiry}' + + case '': + subtype: str = self.underlyingSubType[0] + match subtype: + case 'DEFI': + return f'{symbol}.{subtype}.PERP' + + breakpoint() + return f'{symbol}.WTFPWNEDBBQ' + + MarketType = Literal[ 'spot', - 'margin', - 'usd_futes', - 'coin_futes', + # 'margin', + 'usdtm_futes', + # 'coin_futes', ] PAIRTYPES: dict[MarketType, Pair] = { 'spot': SpotPair, - 'usd_futes': FutesPair, + 'usdtm_futes': FutesPair, + + # TODO: support coin-margined venue: + # https://binance-docs.github.io/apidocs/delivery/en/#change-log + # 'coinm_futes': CoinFutesPair, } From 9ff03ba00c4b576d14ccbc2eb2677cd8c3ac031b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 14 Jun 2023 13:44:47 -0400 Subject: [PATCH 25/73] kraken: add `.spot.kraken` fqme interpolation As just added for binance move to using an explicit `..kraken` style for spot markets which makes the current spot symbology expand to `.SPOT` from the new `Pair.bs_fqme: str`. Reasons for why are laid out in the equivalent patch for binance. Obviously this also primes for supporting kraken's futures venue APIs as well :surfer: https://docs.futures.kraken.com/#introduction Detalles: - add `.spot.kraken` parsing to `get_mkt_info()` so that if the venue token is not passed by caller we implicitly expand it in. - change `normalize()` to only return the `quote: dict` not the topic key. - rewrite live feed msg loop to use `match:` syntax B) --- piker/brokers/kraken/api.py | 6 +- piker/brokers/kraken/broker.py | 4 +- piker/brokers/kraken/feed.py | 111 +++++++++++++++++++-------------- 3 files changed, 72 insertions(+), 49 deletions(-) diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index e9a3f607e..a82714cf7 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -153,6 +153,10 @@ def price_tick(self) -> Decimal: def size_tick(self) -> Decimal: return digits_to_dec(self.lot_decimals) + @property + def bs_fqme(self) -> str: + return f'{self.symbol}.SPOT' + class Client: @@ -639,7 +643,7 @@ def normalize_symbol( ''' try: - return cls._ntable[ticker].lower() + return cls._ntable[ticker] except KeyError as ke: raise SymbolNotFound(f'kraken has no {ke.args[0]}') diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 8fa321b0b..814826be5 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -1194,8 +1194,8 @@ async def norm_trade_records( }[record['type']] # we normalize to kraken's `altname` always.. - bs_mktid = Client.normalize_symbol(record['pair']) - fqme = f'{bs_mktid}.kraken' + bs_mktid: str = Client.normalize_symbol(record['pair']) + fqme = f'{bs_mktid.lower()}.kraken' mkt: MktPair = (await get_mkt_info(fqme))[0] records[tid] = Transaction( diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index dc70672fc..4830914f4 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -41,9 +41,11 @@ from piker.accounting._mktinfo import ( Asset, MktPair, + unpack_fqme, ) from piker.brokers import ( open_cached_client, + SymbolNotFound, ) from piker._cacheables import ( async_lifo_cache, @@ -195,24 +197,18 @@ async def process_data_feed_msgs( # yield msg -def normalize( - ohlc: OHLC, +def normalize(ohlc: OHLC) -> dict: + ''' + Norm an `OHLC` msg to piker's minimal (live-)quote schema. -) -> dict: + ''' quote = ohlc.to_dict() quote['broker_ts'] = quote['time'] quote['brokerd_ts'] = time.time() quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '') quote['last'] = quote['close'] quote['bar_wap'] = ohlc.vwap - - # seriously eh? what's with this non-symmetry everywhere - # in subscription systems... - # XXX: piker style is always lowercases symbols. - topic = quote['pair'].replace('/', '').lower() - - # print(quote) - return topic, quote + return quote @acm @@ -221,7 +217,7 @@ async def open_history_client( ) -> AsyncGenerator[Callable, None]: - symbol: str = mkt.bs_fqme + symbol: str = mkt.bs_mktid # TODO implement history getter for the new storage layer. async with open_cached_client('kraken') as client: @@ -284,6 +280,18 @@ async def get_mkt_info( key-strs to `MktPair`s. ''' + venue: str = 'spot' + expiry: str = '' + if '.kraken' in fqme: + broker, pair, venue, expiry = unpack_fqme(fqme) + venue: str = venue or 'spot' + + if venue != 'spot': + raise SymbolNotFound( + 'kraken only supports spot markets right now!\n' + f'{fqme}\n' + ) + async with open_cached_client('kraken') as client: # uppercase since kraken bs_mktid is always upper @@ -304,6 +312,12 @@ async def get_mkt_info( size_tick=pair.size_tick, bs_mktid=bs_mktid, + expiry=expiry, + venue=venue or 'spot', + + # TODO: futes + # _atype=_atype, + broker='kraken', ) return mkt, pair @@ -410,7 +424,7 @@ async def subscribe(ws: NoBsWs): ): # pull a first quote and deliver typ, ohlc_last = await anext(msg_gen) - topic, quote = normalize(ohlc_last) + quote = normalize(ohlc_last) task_status.started((init_msgs, quote)) feed_is_live.set() @@ -419,41 +433,46 @@ async def subscribe(ws: NoBsWs): last_interval_start = ohlc_last.etime # start streaming - async for typ, ohlc in msg_gen: - - if typ == 'ohlc': + topic: str = mkt.bs_fqme + async for typ, quote in msg_gen: + match typ: # TODO: can get rid of all this by using - # ``trades`` subscription... - - # generate tick values to match time & sales pane: - # https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m - volume = ohlc.volume - - # new OHLC sample interval - if ohlc.etime > last_interval_start: - last_interval_start = ohlc.etime - tick_volume = volume - - else: - # this is the tick volume *within the interval* - tick_volume = volume - ohlc_last.volume - - ohlc_last = ohlc - last = ohlc.close - - if tick_volume: - ohlc.ticks.append({ - 'type': 'trade', - 'price': last, - 'size': tick_volume, - }) - - topic, quote = normalize(ohlc) - - elif typ == 'l1': - quote = ohlc - topic = quote['symbol'].lower() + # ``trades`` subscription..? Not sure why this + # wasn't used originally? (music queues) zoltannn.. + # https://docs.kraken.com/websockets/#message-trade + case 'ohlc': + # generate tick values to match time & sales pane: + # https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m + volume = quote.volume + + # new OHLC sample interval + if quote.etime > last_interval_start: + last_interval_start = quote.etime + tick_volume = volume + + else: + # this is the tick volume *within the interval* + tick_volume = volume - ohlc_last.volume + + ohlc_last = quote + last = quote.close + + if tick_volume: + quote.ticks.append({ + 'type': 'trade', + 'price': last, + 'size': tick_volume, + }) + + quote = normalize(quote) + + case 'l1': + # passthrough quote msg + pass + + case _: + log.warning(f'Unknown WSS message: {typ}, {quote}') await send_chan.send({topic: quote}) From 3c89295efe185de43eeaaa3a9fd501fd5e780342 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 14 Jun 2023 15:34:17 -0400 Subject: [PATCH 26/73] Rename `.binance.schemas` -> `.venues` --- piker/brokers/binance/api.py | 2 +- piker/brokers/binance/feed.py | 2 +- piker/brokers/binance/{schemas.py => venues.py} | 0 3 files changed, 2 insertions(+), 2 deletions(-) rename piker/brokers/binance/{schemas.py => venues.py} (100%) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 52c510a6c..8d7d48df9 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -55,7 +55,7 @@ SymbolNotFound, get_logger, ) -from .schemas import ( +from .venues import ( PAIRTYPES, Pair, MarketType, diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index fafff4119..e3ccaa908 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -70,7 +70,7 @@ from .api import ( Client, ) -from .schemas import ( +from .venues import ( Pair, FutesPair, ) diff --git a/piker/brokers/binance/schemas.py b/piker/brokers/binance/venues.py similarity index 100% rename from piker/brokers/binance/schemas.py rename to piker/brokers/binance/venues.py From f792ecf3af6c22f2ded489d7db2c63a706b9c9db Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 14 Jun 2023 16:48:57 -0400 Subject: [PATCH 27/73] binance: use new `open_trade_dialog()` endpoint name B) --- piker/brokers/binance/__init__.py | 4 ++-- piker/brokers/binance/broker.py | 7 +++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/piker/brokers/binance/__init__.py b/piker/brokers/binance/__init__.py index cfdbd3a5d..fb5844685 100644 --- a/piker/brokers/binance/__init__.py +++ b/piker/brokers/binance/__init__.py @@ -31,14 +31,14 @@ stream_quotes, ) from .broker import ( - trades_dialogue, + open_trade_dialog, ) __all__ = [ 'get_client', 'get_mkt_info', - 'trades_dialogue', + 'open_trade_dialog', 'open_history_client', 'open_symbol_search', 'stream_quotes', diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index c5c845499..ec4edfc48 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -58,6 +58,7 @@ async def handle_order_requests( ems_order_stream: tractor.MsgStream ) -> None: async with open_cached_client('binance') as client: + async for request_msg in ems_order_stream: log.info(f'Received order request {request_msg}') @@ -79,11 +80,14 @@ async def handle_order_requests( # deliver ack that order has been submitted to broker routing await ems_order_stream.send( BrokerdOrderAck( + # ems order request id oid=order.oid, + # broker specific request id reqid=reqid, time_ns=time.time_ns(), + ).dict() ) @@ -97,7 +101,7 @@ async def handle_order_requests( @tractor.context -async def trades_dialogue( +async def open_trade_dialog( ctx: tractor.Context, loglevel: str = None @@ -123,7 +127,6 @@ async def trades_dialogue( client.manage_listen_key() as listen_key, ): n.start_soon(handle_order_requests, ems_stream) - # await trio.sleep_forever() ws: NoBsWs async with open_autorecon_ws( From 9972bd387afc4c1b167243d032979fb9f303ba11 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 14 Jun 2023 17:23:20 -0400 Subject: [PATCH 28/73] kraken: use new `open_trade_dialog()` ep name B) --- piker/brokers/kraken/__init__.py | 4 ++-- piker/brokers/kraken/broker.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/piker/brokers/kraken/__init__.py b/piker/brokers/kraken/__init__.py index 0589981b7..8ec19bcfa 100644 --- a/piker/brokers/kraken/__init__.py +++ b/piker/brokers/kraken/__init__.py @@ -35,7 +35,7 @@ stream_quotes, ) from .broker import ( - trades_dialogue, + open_trade_dialog, norm_trade_records, ) @@ -43,7 +43,7 @@ __all__ = [ 'get_client', 'get_mkt_info', - 'trades_dialogue', + 'open_trade_dialog', 'open_history_client', 'open_symbol_search', 'stream_quotes', diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 814826be5..62477e9c1 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -417,7 +417,7 @@ def trades2pps( @tractor.context -async def trades_dialogue( +async def open_trade_dialog( ctx: tractor.Context, ) -> AsyncIterator[dict[str, Any]]: From 0c74a67ee17248cb064167acb085d5e628ef52e2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 14 Jun 2023 18:27:26 -0400 Subject: [PATCH 29/73] Move API urls to `.venues` Also add a lookup helper for getting addrs by venue: `get_api_eps()` which returns the rest and wss values. --- piker/brokers/binance/api.py | 44 ++++++++----------------- piker/brokers/binance/broker.py | 1 - piker/brokers/binance/feed.py | 18 ++--------- piker/brokers/binance/venues.py | 57 ++++++++++++++++++++++++++++----- 4 files changed, 66 insertions(+), 54 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 8d7d48df9..0c88142b0 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -59,6 +59,8 @@ PAIRTYPES, Pair, MarketType, + _spot_url, + _futes_url, ) log = get_logger('piker.brokers.binance') @@ -81,29 +83,6 @@ def get_config() -> dict: log = get_logger(__name__) - -_domain: str = 'binance.com' -_spot_url = _url = f'https://api.{_domain}' -_futes_url = f'https://fapi.{_domain}' - -# test nets -_testnet_futes_url = 'https://testnet.binancefuture.com' - -# WEBsocketz -# NOTE XXX: see api docs which show diff addr? -# https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information -_spot_ws: str = 'wss://stream.binance.com/ws' -# 'wss://ws-api.binance.com:443/ws-api/v3', - -# NOTE: spot test network only allows certain ep sets: -# https://testnet.binance.vision/ -_testnet_spot_ws: str = 'wss://testnet.binance.vision/ws-api/v3' - -# https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams -_futes_ws: str = f'wss://fstream.{_domain}/ws/' -_auth_futes_ws: str = 'wss://fstream-auth.{_domain}/ws/' - - # Broker specific ohlc schema (rest) # XXX TODO? some additional fields are defined in the docs: # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data @@ -181,11 +160,11 @@ def __init__( # spot EPs sesh self._sesh = asks.Session(connections=4) - self._sesh.base_location: str = _url + self._sesh.base_location: str = _spot_url # margin and extended spot endpoints session. self._sapi_sesh = asks.Session(connections=4) - self._sapi_sesh.base_location: str = _url + self._sapi_sesh.base_location: str = _spot_url # futes EPs sesh self._fapi_sesh = asks.Session(connections=4) @@ -193,7 +172,9 @@ def __init__( # for creating API keys see, # https://www.binance.com/en/support/faq/how-to-create-api-keys-on-binance-360002502072 - conf: dict = get_config() + root_conf: dict = get_config() + conf: dict = root_conf['futes'] + self.api_key: str = conf.get('api_key', '') self.api_secret: str = conf.get('api_secret', '') @@ -213,7 +194,7 @@ def __init__( # 'futes_coin': self._dapi, # TODO } - def _get_signature(self, data: OrderedDict) -> str: + def _mk_sig(self, data: OrderedDict) -> str: # XXX: Info on security and authentification # https://binance-docs.github.io/apidocs/#endpoint-security-type @@ -226,7 +207,9 @@ def _get_signature(self, data: OrderedDict) -> str: query_str = '&'.join([ f'{_key}={value}' for _key, value in data.items()]) + log.info(query_str) + msg_auth = hmac.new( self.api_secret.encode('utf-8'), query_str.encode('utf-8'), @@ -255,7 +238,7 @@ async def _api( ''' if signed: - params['signature'] = self._get_signature(params) + params['signature'] = self._mk_sig(params) resp = await getattr(self._sesh, action)( path=f'/api/v3/{method}', @@ -282,7 +265,7 @@ async def _fapi( ''' if signed: - params['signature'] = self._get_signature(params) + params['signature'] = self._mk_sig(params) resp = await getattr(self._fapi_sesh, action)( path=f'/fapi/v1/{method}', @@ -311,7 +294,7 @@ async def _sapi( ''' if signed: - params['signature'] = self._get_signature(params) + params['signature'] = self._mk_sig(params) resp = await getattr(self._sapi_sesh, action)( path=f'/sapi/v1/{method}', @@ -539,6 +522,7 @@ async def get_deposits( async def get_withdrawls( self, recv_window: int = 60000 + ) -> list: params = OrderedDict([ diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index ec4edfc48..6dcfd8d0c 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -103,7 +103,6 @@ async def handle_order_requests( @tractor.context async def open_trade_dialog( ctx: tractor.Context, - loglevel: str = None ) -> AsyncIterator[dict[str, Any]]: diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index e3ccaa908..8505d76f0 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -73,6 +73,7 @@ from .venues import ( Pair, FutesPair, + get_api_eps, ) log = get_logger('piker.brokers.binance') @@ -416,23 +417,10 @@ async def stream_quotes( FeedInit(mkt_info=mkt) ) - # TODO: detect whether futes or spot contact was requested - from .api import ( - _futes_ws, - _spot_ws, - ) - - async with open_cached_client( - 'binance', - ) as client: - wsep: str = { - 'usdtm_futes': _futes_ws, - 'spot': _spot_ws, - }[client.mkt_mode] - async with ( + open_cached_client('binance') as client, open_autorecon_ws( - wsep, + url=get_api_eps(client.mkt_mode)[1], # 2nd elem is wss url fixture=partial( subscribe, symbols=[mkt.bs_mktid], diff --git a/piker/brokers/binance/venues.py b/piker/brokers/binance/venues.py index fbd6f944d..0b645b663 100644 --- a/piker/brokers/binance/venues.py +++ b/piker/brokers/binance/venues.py @@ -30,6 +30,55 @@ from piker.data.types import Struct +# API endpoint paths by venue / sub-API +_domain: str = 'binance.com' +_spot_url = f'https://api.{_domain}' +_futes_url = f'https://fapi.{_domain}' + +# WEBsocketz +# NOTE XXX: see api docs which show diff addr? +# https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information +_spot_ws: str = 'wss://stream.binance.com/ws' +# 'wss://ws-api.binance.com:443/ws-api/v3', + +# NOTE: spot test network only allows certain ep sets: +# https://testnet.binance.vision/ +_testnet_spot_ws: str = 'wss://testnet.binance.vision/ws-api/v3' + +# https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams +_futes_ws: str = f'wss://fstream.{_domain}/ws/' +_auth_futes_ws: str = 'wss://fstream-auth.{_domain}/ws/' + +# test nets +_testnet_futes_url: str = 'https://testnet.binancefuture.com' +_testnet_futes_ws: str = 'wss://stream.binancefuture.com' + + +MarketType = Literal[ + 'spot', + # 'margin', + 'usdtm_futes', + # 'coin_futes', +] + + +def get_api_eps(venue: MarketType) -> tuple[str, str]: + ''' + Return API ep root paths per venue. + + ''' + return { + 'spot': ( + _spot_url, + _spot_ws, + ), + 'usdtm_futes': ( + _futes_url, + _futes_ws, + ), + }[venue] + + class Pair(Struct, frozen=True): symbol: str status: str @@ -144,14 +193,6 @@ def bs_fqme(self) -> str: -MarketType = Literal[ - 'spot', - # 'margin', - 'usdtm_futes', - # 'coin_futes', -] - - PAIRTYPES: dict[MarketType, Pair] = { 'spot': SpotPair, 'usdtm_futes': FutesPair, From 2ee11f65f02682e245acfdea473b61274bcf00a5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 14 Jun 2023 18:56:56 -0400 Subject: [PATCH 30/73] binance: facepalm, always lower case venue token.. --- piker/brokers/binance/feed.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index 8505d76f0..7a0a80b3a 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -288,8 +288,7 @@ async def get_mkt_info( # NOTE: see the `FutesPair.bs_fqme: str` implementation # to understand the reverse market info lookup below. - venue: str = venue or 'spot' - mkt_mode: str = venue or 'spot' + mkt_mode = venue = venue.lower() or 'spot' _atype: str = '' if ( venue From 1bb7c9a2e441cc2e618c453de5418f872bee0372 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 16 Jun 2023 20:42:40 -0400 Subject: [PATCH 31/73] Handle pending futes, optional `.filters` add testnet urls --- piker/brokers/binance/venues.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/piker/brokers/binance/venues.py b/piker/brokers/binance/venues.py index 0b645b663..4b530122b 100644 --- a/piker/brokers/binance/venues.py +++ b/piker/brokers/binance/venues.py @@ -27,6 +27,8 @@ ) from decimal import Decimal +from msgspec import field + from piker.data.types import Struct @@ -50,6 +52,9 @@ _auth_futes_ws: str = 'wss://fstream-auth.{_domain}/ws/' # test nets +_testnet_spot_url: str = 'https://testnet.binance.vision/api' +_testnet_spot_ws: str = 'wss://testnet.binance.vision/ws' + _testnet_futes_url: str = 'https://testnet.binancefuture.com' _testnet_futes_ws: str = 'wss://stream.binancefuture.com' @@ -79,7 +84,7 @@ def get_api_eps(venue: MarketType) -> tuple[str, str]: }[venue] -class Pair(Struct, frozen=True): +class Pair(Struct, frozen=True, kw_only=True): symbol: str status: str orderTypes: list[str] @@ -95,7 +100,7 @@ class Pair(Struct, frozen=True): filters: dict[ str, str | int | float, - ] + ] = field(default_factory=dict) @property def price_tick(self) -> Decimal: @@ -183,13 +188,17 @@ def bs_fqme(self) -> str: return f'{pair}.{margin}M.{expiry}' case '': - subtype: str = self.underlyingSubType[0] - match subtype: + subtype: list[str] = self.underlyingSubType + if not subtype: + if self.status == 'PENDING_TRADING': + return f'{symbol}.{margin}M.PENDING' + + match subtype[0]: case 'DEFI': return f'{symbol}.{subtype}.PERP' - breakpoint() - return f'{symbol}.WTFPWNEDBBQ' + # XXX: yeah no clue then.. + return f'{symbol}.WTF.PWNED.BBQ' From c6d1007e663087d382690ae6dcb50d91792be448 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 16 Jun 2023 20:43:07 -0400 Subject: [PATCH 32/73] Load `Asset`s during echange info queries Since we need them for accounting and since we can get them directly from the usdtm futes `exchangeInfo` ep, just preload all asset info that we can during initial `Pair` caching. Cache the asset infos inside a new per venue `Client._venues2assets: dict[str, dict[str, Asset | None]]` and mostly be pedantic with the spot asset list for now since futes seems much smaller and doesn't include transaction precision info. Further: - load a testnet http session if `binance.use_testnet.futes = true`. - add testnet support for all non-data endpoints. - hardcode user stream methods to work for usdtm futes for the moment. - add logging around order request calls. --- piker/brokers/binance/api.py | 231 +++++++++++++++++++++++++---------- 1 file changed, 166 insertions(+), 65 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 0c88142b0..39a3e711b 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -30,6 +30,7 @@ asynccontextmanager as acm, ) from datetime import datetime +from pprint import pformat from typing import ( Any, Callable, @@ -48,6 +49,10 @@ import numpy as np from piker import config +from piker.accounting import ( + Asset, + digits_to_dec, +) from piker.data.types import Struct from piker.data import def_iohlcv_fields from piker.brokers._util import ( @@ -59,8 +64,11 @@ PAIRTYPES, Pair, MarketType, + _spot_url, _futes_url, + + _testnet_futes_url, ) log = get_logger('piker.brokers.binance') @@ -144,15 +152,27 @@ def __init__( mkt_mode: MarketType = 'spot', ) -> None: - # build out pair info tables for each market type # and wrap in a chain-map view for search / query. self._spot_pairs: dict[str, Pair] = {} # spot info table self._ufutes_pairs: dict[str, Pair] = {} # usd-futures table - self._mkt2pairs: dict[str, dict] = { + self._venue2pairs: dict[str, dict] = { 'spot': self._spot_pairs, 'usdtm_futes': self._ufutes_pairs, } + + self._venue2assets: dict[ + str, + dict[str, dict] | None, + ] = { + # NOTE: only the spot table contains a dict[str, Asset] + # since others (like futes, opts) can just do lookups + # from a list of names to the spot equivalent. + 'spot': {}, + 'usdtm_futes': {}, + # 'coinm_futes': {}, + } + # NOTE: only stick in the spot table for now until exchange info # is loaded, since at that point we'll suffix all the futes # market symbols for use by search. See `.exch_info()`. @@ -177,15 +197,32 @@ def __init__( self.api_key: str = conf.get('api_key', '') self.api_secret: str = conf.get('api_secret', '') + self.use_testnet: bool = conf.get('use_testnet', False) + + if self.use_testnet: + self._test_fapi_sesh = asks.Session(connections=4) + self._test_fapi_sesh.base_location: str = _testnet_futes_url self.watchlist = conf.get('watchlist', []) if self.api_key: - api_key_header = {'X-MBX-APIKEY': self.api_key} + api_key_header: dict = { + # taken from official: + # https://github.com/binance/binance-futures-connector-python/blob/main/binance/api.py#L47 + "Content-Type": "application/json;charset=utf-8", + + # TODO: prolly should just always query and copy + # in the real latest ver? + "User-Agent": "binance-connector/6.1.6smbz6", + "X-MBX-APIKEY": self.api_key, + } self._sesh.headers.update(api_key_header) self._sapi_sesh.headers.update(api_key_header) self._fapi_sesh.headers.update(api_key_header) + if self.use_testnet: + self._test_fapi_sesh.headers.update(api_key_header) + self.mkt_mode: MarketType = mkt_mode self.mkt_mode_req: dict[str, Callable] = { 'spot': self._api, @@ -204,11 +241,12 @@ def _mk_sig(self, data: OrderedDict) -> str: "Can't generate a signature without setting up credentials" ) - query_str = '&'.join([ - f'{_key}={value}' - for _key, value in data.items()]) + query_str: str = '&'.join([ + f'{key}={value}' + for key, value in data.items() + ]) - log.info(query_str) + # log.info(query_str) msg_auth = hmac.new( self.api_secret.encode('utf-8'), @@ -253,7 +291,8 @@ async def _fapi( method: str, params: dict | OrderedDict, signed: bool = False, - action: str = 'get' + action: str = 'get', + testnet: bool = True, ) -> dict[str, Any]: ''' @@ -267,7 +306,23 @@ async def _fapi( if signed: params['signature'] = self._mk_sig(params) - resp = await getattr(self._fapi_sesh, action)( + # NOTE: only use testnet if user set brokers.toml config + # var to true **and** it's not one of the market data + # endpoints since we basically never want to display the + # test net feeds, we only are using it for testing order + # ctl machinery B) + if ( + self.use_testnet + and method not in { + 'klines', + 'exchangeInfo', + } + ): + meth = getattr(self._test_fapi_sesh, action) + else: + meth = getattr(self._fapi_sesh, action) + + resp = await meth( path=f'/fapi/v1/{method}', params=params, timeout=float('inf') @@ -306,22 +361,28 @@ async def _sapi( async def _cache_pairs( self, - mkt_type: str, + venue: str, ) -> None: # lookup internal mkt-specific pair table to update - pair_table: dict[str, Pair] = self._mkt2pairs[mkt_type] + pair_table: dict[str, Pair] = self._venue2pairs[venue] + asset_table: dict[str, Asset] = self._venue2assets[venue] # make API request(s) - resp = await self.mkt_mode_req[mkt_type]( + resp = await self.mkt_mode_req[venue]( 'exchangeInfo', params={}, # NOTE: retrieve all symbols by default ) - entries = resp['symbols'] - if not entries: + mkt_pairs = resp['symbols'] + if not mkt_pairs: raise SymbolNotFound(f'No market pairs found!?:\n{resp}') - for item in entries: + pairs_view_subtable: dict[str, Pair] = {} + # if venue == 'spot': + # import tractor + # await tractor.breakpoint() + + for item in mkt_pairs: filters_ls: list = item.pop('filters', False) if filters_ls: filters = {} @@ -331,15 +392,50 @@ async def _cache_pairs( item['filters'] = filters - pair_type: Type = PAIRTYPES[mkt_type] + pair_type: Type = PAIRTYPES[venue] pair: Pair = pair_type(**item) pair_table[pair.symbol.upper()] = pair + # update an additional top-level-cross-venue-table + # `._pairs: ChainMap` for search B0 + pairs_view_subtable[pair.bs_fqme] = pair + + if venue == 'spot': + if (name := pair.quoteAsset) not in asset_table: + asset_table[name] = Asset( + name=name, + atype='crypto_currency', + tx_tick=digits_to_dec(pair.quoteAssetPrecision), + ) + + if (name := pair.baseAsset) not in asset_table: + asset_table[name] = Asset( + name=name, + atype='crypto_currency', + tx_tick=digits_to_dec(pair.baseAssetPrecision), + ) + + # NOTE: make merged view of all market-type pairs but + # use market specific `Pair.bs_fqme` for keys! + # this allows searching for market pairs with different + # suffixes easily, for ex. `BTCUSDT.USDTM.PERP` will show + # up when a user uses the search endpoint with pattern + # `btc` B) + self._pairs.maps.append(pairs_view_subtable) + + if venue == 'spot': + return + + assets: list[dict] = resp.get('assets', ()) + for entry in assets: + name: str = entry['asset'] + asset_table[name] = self._venue2assets['spot'].get(name) + async def exch_info( self, sym: str | None = None, - mkt_type: MarketType | None = None, + venue: MarketType | None = None, ) -> dict[str, Pair] | Pair: ''' @@ -354,46 +450,34 @@ async def exch_info( https://binance-docs.github.io/apidocs/delivery/en/#exchange-information ''' - pair_table: dict[str, Pair] = self._mkt2pairs[ - mkt_type or self.mkt_mode + pair_table: dict[str, Pair] = self._venue2pairs[ + venue or self.mkt_mode ] if cached_pair := pair_table.get(sym): return cached_pair - # params = {} - # if sym is not None: - # params = {'symbol': sym} - - mkts: list[str] = ['spot', 'usdtm_futes'] - if mkt_type: - mkts: list[str] = [mkt_type] + venues: list[str] = ['spot', 'usdtm_futes'] + if venue: + venues: list[str] = [venue] + # batch per-venue download of all exchange infos async with trio.open_nursery() as rn: - for mkt_type in mkts: + for ven in venues: rn.start_soon( self._cache_pairs, - mkt_type, + ven, ) - # make merged view of all market-type pairs but - # use market specific `Pair.bs_fqme` for keys! - for venue, venue_pairs_table in self._mkt2pairs.items(): - self._pairs.maps.append( - {pair.bs_fqme: pair - for pair in venue_pairs_table.values()} - ) - return pair_table[sym] if sym else self._pairs + # TODO: unused except by `brokers.core.search_symbols()`? async def search_symbols( self, pattern: str, limit: int = None, + ) -> dict[str, Any]: - # if self._spot_pairs is not None: - # data = self._spot_pairs - # else: fq_pairs: dict = await self.exch_info() matches = fuzzy.extractBests( @@ -538,56 +622,60 @@ async def get_withdrawls( async def submit_limit( self, symbol: str, - side: str, # SELL / BUY + side: str, # sell / buy quantity: float, price: float, - # time_in_force: str = 'GTC', + oid: int | None = None, + tif: str = 'GTC', + recv_window: int = 60000 # iceberg_quantity: float | None = None, # order_resp_type: str | None = None, - recv_window: int = 60000 - - ) -> int: - symbol = symbol.upper() - await self.cache_symbols() - - # asset_precision = self._spot_pairs[symbol]['baseAssetPrecision'] - # quote_precision = self._pairs[symbol]['quoteAssetPrecision'] + ) -> str: + ''' + Submit a live limit order to ze binance. - params = OrderedDict([ - ('symbol', symbol), + ''' + params: dict = OrderedDict([ + ('symbol', symbol.upper()), ('side', side.upper()), ('type', 'LIMIT'), - ('timeInForce', 'GTC'), + ('timeInForce', tif), ('quantity', quantity), ('price', price), ('recvWindow', recv_window), ('newOrderRespType', 'ACK'), ('timestamp', binance_timestamp(now())) ]) - if oid: params['newClientOrderId'] = oid + log.info( + 'Submitting ReST order request:\n' + f'{pformat(params)}' + ) resp = await self._api( 'order', params=params, signed=True, action='post' ) - log.info(resp) - # return resp['orderId'] - return resp['orderId'] + reqid: str = resp['orderId'] + if oid: + assert oid == reqid + + return reqid async def submit_cancel( self, symbol: str, oid: str, + recv_window: int = 60000 + ) -> None: symbol = symbol.upper() - params = OrderedDict([ ('symbol', symbol), ('orderId', oid), @@ -595,6 +683,10 @@ async def submit_cancel( ('timestamp', binance_timestamp(now())) ]) + log.cancel( + 'Submitting ReST order cancel: {oid}\n' + f'{pformat(params)}' + ) return await self._api( 'order', params=params, @@ -603,22 +695,31 @@ async def submit_cancel( ) async def get_listen_key(self) -> str: - return (await self._api( - 'userDataStream', + + # resp = await self._api( + resp = await self.mkt_mode_req[self.mkt_mode]( + # 'userDataStream', # spot + 'listenKey', params={}, - action='post' - ))['listenKey'] + action='post', + signed=True, + ) + return resp['listenKey'] async def keep_alive_key(self, listen_key: str) -> None: - await self._fapi( - 'userDataStream', + # await self._fapi( + await self.mkt_mode_req[self.mkt_mode]( + # 'userDataStream', + 'listenKey', params={'listenKey': listen_key}, action='put' ) async def close_listen_key(self, listen_key: str) -> None: - await self._fapi( - 'userDataStream', + # await self._fapi( + await self.mkt_mode_req[self.mkt_mode]( + # 'userDataStream', + 'listenKey', params={'listenKey': listen_key}, action='delete' ) From 43494e4994ea176858092273277900d036db77cd Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 16 Jun 2023 20:48:19 -0400 Subject: [PATCH 33/73] Add note about expecting client side to cache search domain? --- piker/brokers/binance/feed.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index 7a0a80b3a..84e0914bb 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -470,18 +470,22 @@ async def open_symbol_search( # load all symbols locally for fast search fqpairs_cache = await client.exch_info() + # TODO: maybe we should deliver the cache + # so that client's can always do a local-lookup-first + # style try and then update async as (new) match results + # are delivered from here? await ctx.started() async with ctx.open_stream() as stream: + pattern: str async for pattern in stream: - # results = await client.exch_info(sym=pattern.upper()) - matches = fuzzy.extractBests( pattern, fqpairs_cache, score_cutoff=50, ) + # repack in dict form await stream.send({ item[0].bs_fqme: item[0] From f36061a1492f9dbb00d71b851d78f208b3a026b3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 16 Jun 2023 20:48:38 -0400 Subject: [PATCH 34/73] binance: first draft live order ctl support B) Untested fully but has ostensibly working position and balance loading (by delegating entirely to binance's internals for that) and an MVP ems order request handler; still need to fill out the order status update task implementation.. Notes: - uses user data stream for all per account balance and position tracking. - no support yet for `piker.accounting` position tracking. - no support yet for full order / position real-time update via user stream. --- piker/brokers/binance/broker.py | 465 +++++++++++++++++++++++++------- 1 file changed, 370 insertions(+), 95 deletions(-) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 6dcfd8d0c..a9f0a0a99 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -22,15 +22,23 @@ ''' from __future__ import annotations +from collections import ChainMap +from pprint import pformat from typing import ( Any, AsyncIterator, ) import time +from time import time_ns +from bidict import bidict import tractor import trio +from piker.accounting import ( + Asset, + # MktPair, +) from piker.brokers._util import ( get_logger, ) @@ -48,57 +56,111 @@ BrokerdPosition, BrokerdFill, BrokerdCancel, - # BrokerdError, + BrokerdError, ) +from .venues import Pair +from .api import Client log = get_logger('piker.brokers.binance') async def handle_order_requests( - ems_order_stream: tractor.MsgStream + ems_order_stream: tractor.MsgStream, + client: Client, + + # TODO: update this from open orders loaded at boot! + dialogs: ChainMap[str, BrokerdOrder] = ChainMap(), + ) -> None: - async with open_cached_client('binance') as client: + ''' + Receive order requests from `emsd`, translate tramsit API calls and transmit. + + ''' + msg: dict | BrokerdOrder | BrokerdCancel + async for msg in ems_order_stream: + log.info(f'Rx order request:\n{pformat(msg)}') + match msg: + case { + 'action': 'cancel', + }: + cancel = BrokerdCancel(**msg) + existing: BrokerdOrder | None = dialogs.get(cancel.oid) + if not existing: + log.error( + f'NO Existing order-dialog for {cancel.oid}!?' + ) + await ems_order_stream.send(BrokerdError( + oid=cancel.oid, + symbol=cancel.symbol, + reason=( + 'Invalid `binance` order request dialog oid', + ) + )) + continue - async for request_msg in ems_order_stream: - log.info(f'Received order request {request_msg}') + else: + await client.submit_cancel( + cancel.symbol, + cancel.oid, + ) - action = request_msg['action'] + case { + 'account': ('binance.futes' | 'binance.spot') as account, + 'action': action, + } if action in {'buy', 'sell'}: - if action in {'buy', 'sell'}: # validate - order = BrokerdOrder(**request_msg) + order = BrokerdOrder(**msg) + + # NOTE: check and report edits + if existing := dialogs.get(order.oid): + log.info( + f'Existing order for {existing.oid} updated:\n' + f'{pformat(existing.to_dict())} -> {pformat(msg)}' + ) + # TODO: figure out what special params we have to send? + # https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade + + # XXX: ACK the request **immediately** before sending + # the api side request to ensure the ems maps the oid -> + # reqid correctly! + resp = BrokerdOrderAck( + oid=order.oid, # ems order request id + reqid=order.oid, # our custom int mapping + account='binance', # piker account + ) + await ems_order_stream.send(resp) # call our client api to submit the order reqid = await client.submit_limit( - order.symbol, - order.action, - order.size, - order.price, + symbol=order.symbol, + side=order.action, + quantity=order.size, + price=order.price, oid=order.oid ) - - # deliver ack that order has been submitted to broker routing + # thank god at least someone lets us do this XD + assert reqid == order.oid + + # track latest request state + dialogs[reqid].maps.append(msg) + + case _: + account = msg.get('account') + if account not in {'binance.spot', 'binance.futes'}: + log.error( + 'This is a binance account, \ + only a `binance.spot/.futes` selection is valid' + ) await ems_order_stream.send( - BrokerdOrderAck( - - # ems order request id - oid=order.oid, - - # broker specific request id - reqid=reqid, - time_ns=time.time_ns(), - - ).dict() + BrokerdError( + oid=msg['oid'], + symbol=msg['symbol'], + reason=( + 'Invalid `binance` broker request msg:\n{msg}' + )) ) - elif action == 'cancel': - msg = BrokerdCancel(**request_msg) - - await client.submit_cancel(msg.symbol, msg.reqid) - - else: - log.error(f'Unknown order command: {request_msg}') - @tractor.context async def open_trade_dialog( @@ -115,74 +177,287 @@ async def open_trade_dialog( # ledger: TransactionLedger # TODO: load pps and accounts using accounting apis! - positions: list[BrokerdPosition] = [] - accounts: list[str] = ['binance.default'] - await ctx.started((positions, accounts)) - async with ( - ctx.open_stream() as ems_stream, - trio.open_nursery() as n, open_cached_client('binance') as client, - client.manage_listen_key() as listen_key, ): - n.start_soon(handle_order_requests, ems_stream) - - ws: NoBsWs - async with open_autorecon_ws( - f'wss://stream.binance.com:9443/ws/{listen_key}', - ) as ws: - event = await ws.recv_msg() - - # https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update - if event.get('e') == 'executionReport': - - oid: str = event.get('c') - side: str = event.get('S').lower() - status: str = event.get('X') - order_qty: float = float(event.get('q')) - filled_qty: float = float(event.get('z')) - cum_transacted_qty: float = float(event.get('Z')) - price_avg: float = cum_transacted_qty / filled_qty - broker_time: float = float(event.get('T')) - commission_amount: float = float(event.get('n')) - commission_asset: float = event.get('N') - - if status == 'TRADE': - if order_qty == filled_qty: - msg = BrokerdFill( + client.mkt_mode: str = 'usdtm_futes' + + # if client. + account: str = client.mkt_mode + + wss: NoBsWs + async with ( + client.manage_listen_key() as listen_key, + open_autorecon_ws( + f'wss://stream.binancefuture.com/ws/{listen_key}', + # f'wss://stream.binance.com:9443/ws/{listen_key}', + ) as wss, + + ): + nsid: int = time_ns() + await wss.send_msg({ + # "method": "SUBSCRIBE", + "method": "REQUEST", + "params": + [ + f"{listen_key}@account", + f"{listen_key}@balance", + f"{listen_key}@position", + ], + "id": nsid + }) + + with trio.fail_after(1): + msg = await wss.recv_msg() + assert msg['id'] == nsid + + # TODO: load other market wide data / statistics: + # - OI: https://binance-docs.github.io/apidocs/futures/en/#open-interest + # - OI stats: https://binance-docs.github.io/apidocs/futures/en/#open-interest-statistics + accounts: bidict[str, str] = bidict() + balances: dict[Asset, float] = {} + positions: list[BrokerdPosition] = [] + + for resp_dict in msg['result']: + resp = resp_dict['res'] + req: str = resp_dict['req'] + + # @account response should be something like: + # {'accountAlias': 'sRFzFzAuuXsR', + # 'canDeposit': True, + # 'canTrade': True, + # 'canWithdraw': True, + # 'feeTier': 0} + if 'account' in req: + alias: str = resp['accountAlias'] + accounts['binance.usdtm_futes'] = alias + + # @balance response: + # {'accountAlias': 'sRFzFzAuuXsR', + # 'balances': [{'asset': 'BTC', + # 'availableBalance': '0.00000000', + # 'balance': '0.00000000', + # 'crossUnPnl': '0.00000000', + # 'crossWalletBalance': '0.00000000', + # 'maxWithdrawAmount': '0.00000000', + # 'updateTime': 0}] + # ... + # } + elif 'balance' in req: + for entry in resp['balances']: + name: str = entry['asset'] + balance: float = float(entry['balance']) + last_update_t: int = entry['updateTime'] + + spot_asset: Asset = client._venue2assets['spot'][name] + + if balance > 0: + balances[spot_asset] = (balance, last_update_t) + # await tractor.breakpoint() + + # @position response: + # {'positions': [{'entryPrice': '0.0', + # 'isAutoAddMargin': False, + # 'isolatedMargin': '0', + # 'leverage': 20, + # 'liquidationPrice': '0', + # 'marginType': 'CROSSED', + # 'markPrice': '0.60289650', + # 'markPrice': '0.00000000', + # 'maxNotionalValue': '25000', + # 'notional': '0', + # 'positionAmt': '0', + # 'positionSide': 'BOTH', + # 'symbol': 'ETHUSDT_230630', + # 'unRealizedProfit': '0.00000000', + # 'updateTime': 1672741444894} + # ... + # } + elif 'position' in req: + for entry in resp['positions']: + bs_mktid: str = entry['symbol'] + entry_size: float = float(entry['positionAmt']) + + pair: Pair | None + if ( + pair := client._venue2pairs[account].get(bs_mktid) + and entry_size > 0 + ): + entry_price: float = float(entry['entryPrice']) + + ppmsg = BrokerdPosition( + broker='binance', + account='binance.futes', + + # TODO: maybe we should be passing back + # a `MktPair` here? + symbol=pair.bs_fqme.lower() + '.binance', + + size=entry_size, + avg_price=entry_price, + ) + positions.append(ppmsg) + + if pair is None: + log.warning( + f'`{bs_mktid}` Position entry but no market pair?\n' + f'{pformat(entry)}\n' + ) + + await ctx.started((positions, list(accounts))) + + async with ( + trio.open_nursery() as tn, + ctx.open_stream() as ems_stream, + ): + + tn.start_soon( + handle_order_requests, + ems_stream, + client, + ) + tn.start_soon( + handle_order_updates, + ems_stream, + wss, + + ) + + await trio.sleep_forever() + + +async def handle_order_updates( + ems_stream: tractor.MsgStream, + wss: NoBsWs, + + # apiflows: dict[int, ChainMap[dict[str, dict]]], + # ids: bidict[str, int], + # reqids2txids: bidict[int, str], + + # table: PpTable, + # ledger_trans: dict[str, Transaction], + + # acctid: str, + # acc_name: str, + # token: str, + +) -> None: + ''' + Main msg handling loop for all things order management. + + This code is broken out to make the context explicit and state + variables defined in the signature clear to the reader. + + ''' + async for msg in wss: + match msg: + + # TODO: + # POSITION update + # futes: https://binance-docs.github.io/apidocs/futures/en/#event-balance-and-position-update + + # ORDER update + # spot: https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update + # futes: https://binance-docs.github.io/apidocs/futures/en/#event-order-update + case { + 'e': 'executionReport', + 'T': float(epoch_ms), + 'o': { + 's': bs_mktid, + + # XXX NOTE XXX see special ids for market + # events or margin calls: + # // special client order id: + # // starts with "autoclose-": liquidation order + # // "adl_autoclose": ADL auto close order + # // "settlement_autoclose-": settlement order + # for delisting or delivery + 'c': oid, + + # prices + 'a': float(submit_price), + 'ap': float(avg_price), + 'L': float(fill_price), + + # sizing + 'q': float(req_size), + 'l': float(clear_size_filled), # this event + 'z': float(accum_size_filled), # accum + + # commissions + 'n': float(cost), + 'N': str(cost_asset), + + # state + 'S': str(side), + 'X': str(status), + }, + } as order_msg: + log.info( + f'{status} for {side} ORDER oid: {oid}\n' + f'bs_mktid: {bs_mktid}\n\n' + + f'order size: {req_size}\n' + f'cleared size: {clear_size_filled}\n' + f'accum filled size: {accum_size_filled}\n\n' + + f'submit price: {submit_price}\n' + f'fill_price: {fill_price}\n' + f'avg clearing price: {avg_price}\n\n' + + f'cost: {cost}@{cost_asset}\n' + ) + + # status remap from binance to piker's + # status set: + # - NEW + # - PARTIALLY_FILLED + # - FILLED + # - CANCELED + # - EXPIRED + # https://binance-docs.github.io/apidocs/futures/en/#event-order-update + match status: + case 'PARTIALLY_FILLED' | 'FILLED': + status = 'fill' + + fill_msg = BrokerdFill( + time_ns=time_ns(), reqid=oid, - time_ns=time.time_ns(), - action=side, - price=price_avg, - broker_details={ - 'name': 'binance', - 'commissions': { - 'amount': commission_amount, - 'asset': commission_asset - }, - 'broker_time': broker_time - }, - broker_time=broker_time + + # just use size value for now? + # action=action, + size=clear_size_filled, + price=fill_price, + + # TODO: maybe capture more msg data + # i.e fees? + broker_details={'name': 'broker'} | order_msg, + broker_time=time.time(), ) + await ems_stream.send(fill_msg) - else: - if status == 'NEW': - status = 'submitted' - - elif status == 'CANCELED': - status = 'cancelled' - - msg = BrokerdStatus( - reqid=oid, - time_ns=time.time_ns(), - status=status, - filled=filled_qty, - remaining=order_qty - filled_qty, - broker_details={'name': 'binance'} - ) + if accum_size_filled == req_size: + status = 'closed' + + case 'NEW': + status = 'open' + + case 'EXPIRED': + status = 'canceled' - else: - # XXX: temporary, to catch unhandled msgs - breakpoint() + case _: + status = status.lower() - await ems_stream.send(msg.dict()) + resp = BrokerdStatus( + time_ns=time_ns(), + reqid=oid, + + status=status, + filled=accum_size_filled, + remaining=req_size - accum_size_filled, + broker_details={ + 'name': 'binance', + 'broker_time': epoch_ms / 1000. + } + ) + await ems_stream.send(resp) From 98f6d85b654c3da409a30b6c3fc8b17cbdd3671b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 03:37:49 -0400 Subject: [PATCH 35/73] Make order request methods be venue aware --- piker/brokers/binance/api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 39a3e711b..ae6573049 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -655,7 +655,7 @@ async def submit_limit( 'Submitting ReST order request:\n' f'{pformat(params)}' ) - resp = await self._api( + resp = await self.mkt_mode_req[self.mkt_mode]( 'order', params=params, signed=True, @@ -687,7 +687,7 @@ async def submit_cancel( 'Submitting ReST order cancel: {oid}\n' f'{pformat(params)}' ) - return await self._api( + await self.mkt_mode_req[self.mkt_mode]( 'order', params=params, signed=True, From a9c016ba103d9b4a57de5be084bdfa08d36af62e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 13:26:20 -0400 Subject: [PATCH 36/73] Use `Client._pairs` cross-venue table for orders Since the request handler task will work concurrently across venues (spot, futes, margin) we need to be sure that we look up the correct venue to update the order dialog and this is naturally determined by the FQME-style symbol in the `BrokerdOrder` msg; the best way to map that symbol-key to the correct venue/`Pair` is by using said `._pairs: ChainMap`. Further, handle limit order errors by catching and relaying back an error response to the EMS. Fix the "account name" to be `binance.usdtm` so that we can eventually and explicitly support all venues by name. --- piker/brokers/binance/broker.py | 51 ++++++++++++++++++++++----------- 1 file changed, 34 insertions(+), 17 deletions(-) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index a9f0a0a99..2eff326f4 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -48,6 +48,7 @@ ) from piker.brokers import ( open_cached_client, + BrokerError, ) from piker.clearing._messages import ( BrokerdOrder, @@ -91,7 +92,8 @@ async def handle_order_requests( ) await ems_order_stream.send(BrokerdError( oid=cancel.oid, - symbol=cancel.symbol, + # TODO: do we need the symbol? + symbol='unknown', reason=( 'Invalid `binance` order request dialog oid', ) @@ -100,12 +102,12 @@ async def handle_order_requests( else: await client.submit_cancel( - cancel.symbol, + existing.symbol, cancel.oid, ) case { - 'account': ('binance.futes' | 'binance.spot') as account, + 'account': ('binance.usdtm' | 'binance.spot') as account, 'action': action, } if action in {'buy', 'sell'}: @@ -131,19 +133,34 @@ async def handle_order_requests( ) await ems_order_stream.send(resp) - # call our client api to submit the order - reqid = await client.submit_limit( - symbol=order.symbol, - side=order.action, - quantity=order.size, - price=order.price, - oid=order.oid - ) - # thank god at least someone lets us do this XD - assert reqid == order.oid + # lookup the binance-native symbol + bs_mktid: str = client._pairs[order.symbol.upper()].symbol - # track latest request state - dialogs[reqid].maps.append(msg) + # call our client api to submit the order + try: + reqid = await client.submit_limit( + symbol=bs_mktid, + side=order.action, + quantity=order.size, + price=order.price, + oid=order.oid + ) + # thank god at least someone lets us do this XD + assert reqid == order.oid + + # track latest request state + dialogs[reqid].maps.append(msg) + except BrokerError as be: + await ems_order_stream.send( + BrokerdError( + oid=msg['oid'], + symbol=msg['symbol'], + reason=( + '`binance` request failed:\n' + f'{be}' + )) + ) + continue case _: account = msg.get('account') @@ -157,7 +174,7 @@ async def handle_order_requests( oid=msg['oid'], symbol=msg['symbol'], reason=( - 'Invalid `binance` broker request msg:\n{msg}' + f'Invalid `binance` broker request msg:\n{msg}' )) ) @@ -230,7 +247,7 @@ async def open_trade_dialog( # 'feeTier': 0} if 'account' in req: alias: str = resp['accountAlias'] - accounts['binance.usdtm_futes'] = alias + accounts['binance.usdtm'] = alias # @balance response: # {'accountAlias': 'sRFzFzAuuXsR', From 249d358737f6ed58a7eadfd498f58f3992800058 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 13:37:58 -0400 Subject: [PATCH 37/73] Woops, fix wss_url lookup depending on venue.. --- piker/brokers/binance/feed.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index 84e0914bb..66a0bff04 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -406,6 +406,7 @@ async def stream_quotes( async with ( send_chan as send_chan, + open_cached_client('binance') as client, ): init_msgs: list[FeedInit] = [] for sym in symbols: @@ -416,10 +417,15 @@ async def stream_quotes( FeedInit(mkt_info=mkt) ) + wss_url: str = get_api_eps(client.mkt_mode)[1] # 2nd elem is wss url + + # TODO: for sanity, but remove eventually Xp + if 'future' in mkt.type_key: + assert 'fstream' in wss_url + async with ( - open_cached_client('binance') as client, open_autorecon_ws( - url=get_api_eps(client.mkt_mode)[1], # 2nd elem is wss url + url=wss_url, fixture=partial( subscribe, symbols=[mkt.bs_mktid], From 60b0b721c5776a3e81efbc780cce964850679657 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 13:50:22 -0400 Subject: [PATCH 38/73] Split out crypto$ derivs into separate type set For crypto derivatives (at least futes), yes they are margined, but generally not around a single unit of vlm (like equities or commodities futes) so don't pre-set the order mode allocator to use a #unit limit, $limit is fine. --- piker/accounting/_mktinfo.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 1ac853691..c1f14f9fb 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -48,12 +48,15 @@ 'commodity', ] +_crypto_derivs: list[str] = [ + 'perpetual_future', + 'crypto_future', +] _derivs: list[str] = [ 'swap', 'future', 'continuous_future', - 'perpetual_future', 'option', 'futures_option', @@ -68,6 +71,8 @@ _underlyings + _derivs + + + _crypto_derivs ] # egs. stock, futer, option, bond etc. From 45ded4f2d155e946c39bf6eef384162023b6876d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 14:43:54 -0400 Subject: [PATCH 39/73] binance: order submission "user id" is not the same as their internal `int` one.. --- piker/brokers/binance/api.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index ae6573049..fc34ba29f 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -661,10 +661,12 @@ async def submit_limit( signed=True, action='post' ) - reqid: str = resp['orderId'] + + # ensure our id is tracked by them if oid: - assert oid == reqid + assert oid == resp['clientOrderId'] + reqid: str = resp['orderId'] return reqid async def submit_cancel( From 8a06e4d0732062c5b265108d5d568cb0f337f0b8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 14:45:45 -0400 Subject: [PATCH 40/73] Wrap dialog tracking in new `OrderDialogs` type, info log all user stream msgs --- piker/brokers/binance/broker.py | 98 +++++++++++++++++++++++++-------- 1 file changed, 75 insertions(+), 23 deletions(-) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 2eff326f4..1926fdbab 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -22,7 +22,10 @@ ''' from __future__ import annotations -from collections import ChainMap +from collections import ( + ChainMap, + defaultdict, +) from pprint import pformat from typing import ( Any, @@ -42,6 +45,7 @@ from piker.brokers._util import ( get_logger, ) +from piker.data.types import Struct from piker.data._web_bs import ( open_autorecon_ws, NoBsWs, @@ -65,12 +69,38 @@ log = get_logger('piker.brokers.binance') +class OrderDialogs(Struct): + ''' + Order control dialog (and thus transaction) tracking via + message recording. + + Allows easily recording messages associated with a given set of + order control transactions and looking up the latest field + state using the entire (reverse chronological) msg flow. + + ''' + _dialogs: defaultdict[str, ChainMap] = defaultdict(ChainMap) + + def add_msg( + self, + oid: str, + msg: dict, + ) -> None: + self._dialogs[oid].maps.insert(0, msg) + + def get( + self, + oid: str, + field: str, + ) -> Any: + return self._dialogs[oid][field] + + async def handle_order_requests( ems_order_stream: tractor.MsgStream, client: Client, - - # TODO: update this from open orders loaded at boot! - dialogs: ChainMap[str, BrokerdOrder] = ChainMap(), + dids: bidict[str, str], + dialogs: OrderDialogs, ) -> None: ''' @@ -92,8 +122,11 @@ async def handle_order_requests( ) await ems_order_stream.send(BrokerdError( oid=cancel.oid, + # TODO: do we need the symbol? + # https://github.com/pikers/piker/issues/514 symbol='unknown', + reason=( 'Invalid `binance` order request dialog oid', ) @@ -113,6 +146,7 @@ async def handle_order_requests( # validate order = BrokerdOrder(**msg) + oid: str = order.oid # emsd order id # NOTE: check and report edits if existing := dialogs.get(order.oid): @@ -123,16 +157,6 @@ async def handle_order_requests( # TODO: figure out what special params we have to send? # https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade - # XXX: ACK the request **immediately** before sending - # the api side request to ensure the ems maps the oid -> - # reqid correctly! - resp = BrokerdOrderAck( - oid=order.oid, # ems order request id - reqid=order.oid, # our custom int mapping - account='binance', # piker account - ) - await ems_order_stream.send(resp) - # lookup the binance-native symbol bs_mktid: str = client._pairs[order.symbol.upper()].symbol @@ -143,13 +167,28 @@ async def handle_order_requests( side=order.action, quantity=order.size, price=order.price, - oid=order.oid + oid=oid, + ) + + # XXX: ACK the request **immediately** before sending + # the api side request to ensure the ems maps the oid -> + # reqid correctly! + resp = BrokerdOrderAck( + oid=oid, # ems order request id + reqid=reqid, # our custom int mapping + account='binance', # piker account ) - # thank god at least someone lets us do this XD - assert reqid == order.oid + await ems_order_stream.send(resp) + + # SMH they do gen their own order id: ints.. + # assert reqid == order.oid + dids[order.oid] = reqid + + # track latest request state such that map + # lookups start at the most recent msg and then + # scan reverse-chronologically. + dialogs.add_msg(msg) - # track latest request state - dialogs[reqid].maps.append(msg) except BrokerError as be: await ems_order_stream.send( BrokerdError( @@ -190,10 +229,6 @@ async def open_trade_dialog( await ctx.started('paper') return - # table: PpTable - # ledger: TransactionLedger - - # TODO: load pps and accounts using accounting apis! async with ( open_cached_client('binance') as client, ): @@ -323,6 +358,20 @@ async def open_trade_dialog( await ctx.started((positions, list(accounts))) + dialogs = OrderDialogs() + dids: dict[str, int] = bidict() + + # TODO: further init setup things to get full EMS and + # .accounting support B) + # - live order loading via user stream subscription and + # update to the order dialog table. + # - position loading using `piker.accounting` subsys + # and comparison with binance's own position calcs. + # - load pps and accounts using accounting apis, write + # the ledger and account files + # - table: PpTable + # - ledger: TransactionLedger + async with ( trio.open_nursery() as tn, ctx.open_stream() as ems_stream, @@ -332,6 +381,8 @@ async def open_trade_dialog( handle_order_requests, ems_stream, client, + dids, + dialogs, ) tn.start_soon( handle_order_updates, @@ -368,6 +419,7 @@ async def handle_order_updates( ''' async for msg in wss: match msg: + log.info(f'Rx USERSTREAM msg:\n{pformat(msg)}') # TODO: # POSITION update From 09007cbf0822cd7646d8b46a8578f27834fecd88 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 16:06:17 -0400 Subject: [PATCH 41/73] Do native symbology lookup in order methods, send user oid in cancel requests --- piker/brokers/binance/api.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index fc34ba29f..15fd92aed 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -637,8 +637,10 @@ async def submit_limit( Submit a live limit order to ze binance. ''' + # lookup the binance-native symbol from search table + bs_mktid: str = self._pairs[symbol.upper()].symbol params: dict = OrderedDict([ - ('symbol', symbol.upper()), + ('symbol', bs_mktid), ('side', side.upper()), ('type', 'LIMIT'), ('timeInForce', tif), @@ -677,10 +679,11 @@ async def submit_cancel( recv_window: int = 60000 ) -> None: - symbol = symbol.upper() + bs_mktid: str = self._pairs[symbol.upper()].symbol params = OrderedDict([ - ('symbol', symbol), - ('orderId', oid), + ('symbol', bs_mktid), + # ('orderId', oid), + ('origClientOrderId', oid), ('recvWindow', recv_window), ('timestamp', binance_timestamp(now())) ]) From 3f555b2f5a398adba57d0e334b31d015207c5382 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 16:06:42 -0400 Subject: [PATCH 42/73] Fix user event matching Was using the wrong key before from our old code (not sure how that slipped back in.. prolly doing too many git stashes XD), so fix that to properly match against order update events with 'ORDER_TRADE_UPDATE'. Also, don't match on the types we want to *cast to*, that's not how match syntax works (facepalm), so we have to typecast prior to EMS msg creation / downstream logic. Further, - try not bothering with binance's own internal `'orderId'` field tracking since they seem to support just using your own user version for all ctl endpoints? (thus we only need to track the EMS `.oid`s B) - log all event update msgs for now. - pop order dialogs on 'closed' statuses. - wrap cancel requests in an error handler block since it seems the EMS is double sending requests from the client? --- piker/brokers/binance/broker.py | 148 ++++++++++++++++++++++++-------- 1 file changed, 111 insertions(+), 37 deletions(-) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 1926fdbab..d43f2a99e 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -88,12 +88,16 @@ def add_msg( ) -> None: self._dialogs[oid].maps.insert(0, msg) + # TODO: wrap all this in the `collections.abc.Mapping` interface? def get( self, oid: str, - field: str, - ) -> Any: - return self._dialogs[oid][field] + ) -> ChainMap[str, Any]: + ''' + Return the dialog `ChainMap` for provided id. + + ''' + return self._dialogs.get(oid, None) async def handle_order_requests( @@ -134,10 +138,23 @@ async def handle_order_requests( continue else: - await client.submit_cancel( - existing.symbol, - cancel.oid, - ) + symbol: str = existing['symbol'] + try: + await client.submit_cancel( + symbol, + cancel.oid, + ) + except BrokerError as be: + await ems_order_stream.send( + BrokerdError( + oid=msg['oid'], + symbol=symbol, + reason=( + '`binance` CANCEL failed:\n' + f'{be}' + )) + ) + continue case { 'account': ('binance.usdtm' | 'binance.spot') as account, @@ -151,35 +168,35 @@ async def handle_order_requests( # NOTE: check and report edits if existing := dialogs.get(order.oid): log.info( - f'Existing order for {existing.oid} updated:\n' - f'{pformat(existing.to_dict())} -> {pformat(msg)}' + f'Existing order for {oid} updated:\n' + f'{pformat(existing.maps[-1])} -> {pformat(msg)}' ) # TODO: figure out what special params we have to send? # https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade # lookup the binance-native symbol - bs_mktid: str = client._pairs[order.symbol.upper()].symbol + # bs_mktid: str = client._pairs[order.symbol.upper()].symbol # call our client api to submit the order try: - reqid = await client.submit_limit( - symbol=bs_mktid, - side=order.action, - quantity=order.size, - price=order.price, - oid=oid, - ) - # XXX: ACK the request **immediately** before sending # the api side request to ensure the ems maps the oid -> # reqid correctly! resp = BrokerdOrderAck( oid=oid, # ems order request id - reqid=reqid, # our custom int mapping + reqid=oid, # our custom int mapping account='binance', # piker account ) await ems_order_stream.send(resp) + reqid = await client.submit_limit( + symbol=order.symbol, + side=order.action, + quantity=order.size, + price=order.price, + oid=oid, + ) + # SMH they do gen their own order id: ints.. # assert reqid == order.oid dids[order.oid] = reqid @@ -187,7 +204,7 @@ async def handle_order_requests( # track latest request state such that map # lookups start at the most recent msg and then # scan reverse-chronologically. - dialogs.add_msg(msg) + dialogs.add_msg(oid, msg) except BrokerError as be: await ems_order_stream.send( @@ -235,7 +252,7 @@ async def open_trade_dialog( client.mkt_mode: str = 'usdtm_futes' # if client. - account: str = client.mkt_mode + venue: str = client.mkt_mode wss: NoBsWs async with ( @@ -332,14 +349,14 @@ async def open_trade_dialog( pair: Pair | None if ( - pair := client._venue2pairs[account].get(bs_mktid) + pair := client._venue2pairs[venue].get(bs_mktid) and entry_size > 0 ): entry_price: float = float(entry['entryPrice']) ppmsg = BrokerdPosition( broker='binance', - account='binance.futes', + account='binance.usdtm', # TODO: maybe we should be passing back # a `MktPair` here? @@ -365,6 +382,9 @@ async def open_trade_dialog( # .accounting support B) # - live order loading via user stream subscription and # update to the order dialog table. + # - MAKE SURE we add live orders loaded during init + # into the dialogs table to ensure they can be + # cancelled, meaning we can do a symbol lookup. # - position loading using `piker.accounting` subsys # and comparison with binance's own position calcs. # - load pps and accounts using accounting apis, write @@ -388,6 +408,7 @@ async def open_trade_dialog( handle_order_updates, ems_stream, wss, + dialogs, ) @@ -397,6 +418,7 @@ async def open_trade_dialog( async def handle_order_updates( ems_stream: tractor.MsgStream, wss: NoBsWs, + dialogs: OrderDialogs, # apiflows: dict[int, ChainMap[dict[str, dict]]], # ids: bidict[str, int], @@ -418,20 +440,55 @@ async def handle_order_updates( ''' async for msg in wss: + log.info(f'Rx USERSTREAM msg:\n{pformat(msg)}') match msg: - log.info(f'Rx USERSTREAM msg:\n{pformat(msg)}') # TODO: # POSITION update - # futes: https://binance-docs.github.io/apidocs/futures/en/#event-balance-and-position-update # ORDER update # spot: https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update # futes: https://binance-docs.github.io/apidocs/futures/en/#event-order-update + # futes: https://binance-docs.github.io/apidocs/futures/en/#event-balance-and-position-update + # {'o': { + # 'L': '0', + # 'N': 'USDT', + # 'R': False, + # 'S': 'BUY', + # 'T': 1687028772484, + # 'X': 'NEW', + # 'a': '0', + # 'ap': '0', + # 'b': '7012.06520', + # 'c': '518d4122-8d3e-49b0-9a1e-1fabe6f62e4c', + # 'cp': False, + # 'f': 'GTC', + # 'i': 3376956924, + # 'l': '0', + # 'm': False, + # 'n': '0', + # 'o': 'LIMIT', + # 'ot': 'LIMIT', + # 'p': '21136.80', + # 'pP': False, + # 'ps': 'BOTH', + # 'q': '0.047', + # 'rp': '0', + # 's': 'BTCUSDT', + # 'si': 0, + # 'sp': '0', + # 'ss': 0, + # 't': 0, + # 'wt': 'CONTRACT_PRICE', + # 'x': 'NEW', + # 'z': '0'} + # } case { - 'e': 'executionReport', - 'T': float(epoch_ms), + # 'e': 'executionReport', + 'e': 'ORDER_TRADE_UPDATE', + 'T': int(epoch_ms), 'o': { + 'i': reqid, 's': bs_mktid, # XXX NOTE XXX see special ids for market @@ -444,22 +501,22 @@ async def handle_order_updates( 'c': oid, # prices - 'a': float(submit_price), - 'ap': float(avg_price), - 'L': float(fill_price), + 'a': submit_price, + 'ap': avg_price, + 'L': fill_price, # sizing - 'q': float(req_size), - 'l': float(clear_size_filled), # this event - 'z': float(accum_size_filled), # accum + 'q': req_size, + 'l': clear_size_filled, # this event + 'z': accum_size_filled, # accum # commissions - 'n': float(cost), - 'N': str(cost_asset), + 'n': cost, + 'N': cost_asset, # state - 'S': str(side), - 'X': str(status), + 'S': side, + 'X': status, }, } as order_msg: log.info( @@ -485,12 +542,18 @@ async def handle_order_updates( # - CANCELED # - EXPIRED # https://binance-docs.github.io/apidocs/futures/en/#event-order-update + + req_size: float = float(req_size) + accum_size_filled: float = float(accum_size_filled) + fill_price: float = float(fill_price) + match status: case 'PARTIALLY_FILLED' | 'FILLED': status = 'fill' fill_msg = BrokerdFill( time_ns=time_ns(), + # reqid=reqid, reqid=oid, # just use size value for now? @@ -507,21 +570,26 @@ async def handle_order_updates( if accum_size_filled == req_size: status = 'closed' + del dialogs._dialogs[oid] case 'NEW': status = 'open' case 'EXPIRED': status = 'canceled' + del dialogs._dialogs[oid] case _: status = status.lower() resp = BrokerdStatus( time_ns=time_ns(), + # reqid=reqid, reqid=oid, + # account='binance.usdtm', status=status, + filled=accum_size_filled, remaining=req_size - accum_size_filled, broker_details={ @@ -530,3 +598,9 @@ async def handle_order_updates( } ) await ems_stream.send(resp) + + case _: + log.warning( + 'Unhandled event:\n' + f'{pformat(msg)}' + ) From 4eeb232248e0dc513e9e0495da56c6cdeff91af2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 17:18:20 -0400 Subject: [PATCH 43/73] kraken: add more type annots in broker codez --- piker/brokers/kraken/broker.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 62477e9c1..86d30f41f 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -183,10 +183,10 @@ async def handle_order_requests( # logic from old `Client.submit_limit()` if order.oid in ids: - ep = 'editOrder' - reqid = ids[order.oid] # integer not txid + ep: str = 'editOrder' + reqid: int = ids[order.oid] # integer not txid try: - txid = reqids2txids[reqid] + txid: str = reqids2txids[reqid] except KeyError: # XXX: not sure if this block ever gets hit now? log.error('TOO FAST EDIT') @@ -208,7 +208,7 @@ async def handle_order_requests( } else: - ep = 'addOrder' + ep: str = 'addOrder' reqid = BrokerClient.new_reqid() ids[order.oid] = reqid @@ -221,8 +221,8 @@ async def handle_order_requests( 'type': order.action, } - psym = order.symbol.upper() - pair = f'{psym[:3]}/{psym[3:]}' + psym: str = order.symbol.upper() + pair: str = f'{psym[:3]}/{psym[3:]}' # XXX: ACK the request **immediately** before sending # the api side request to ensure the ems maps the oid -> @@ -423,7 +423,6 @@ async def open_trade_dialog( ) -> AsyncIterator[dict[str, Any]]: async with get_client() as client: - # make ems flip to paper mode when no creds setup in # `brokers.toml` B0 if not client._api_key: @@ -867,8 +866,9 @@ async def handle_order_updates( # 'vol_exec': exec_vlm} # 0.0000 match update_msg: - # EMS-unknown LIVE order that needs to be - # delivered and loaded on the client-side. + # EMS-unknown pre-exising-submitted LIVE + # order that needs to be delivered and + # loaded on the client-side. case { 'userref': reqid, 'descr': { From 572badb4d847155007c603cfd71231def6c3a18b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 18:00:23 -0400 Subject: [PATCH 44/73] Add full real-time position update support B) There was one trick which was that it seems that binance will often send the account/position update event over the user stream *before* the actual clearing (aka FILLED) order update event, so make sure we put an entry in the `dialogs: OrderDialogs` as soon as an order request comes in such that even if the account update arrives first the `BrokerdPosition` msg can be relayed without delay / order event order considerations. --- piker/brokers/binance/broker.py | 103 +++++++++++++++++++++----------- 1 file changed, 69 insertions(+), 34 deletions(-) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index d43f2a99e..10248f9f3 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -174,21 +174,23 @@ async def handle_order_requests( # TODO: figure out what special params we have to send? # https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade - # lookup the binance-native symbol - # bs_mktid: str = client._pairs[order.symbol.upper()].symbol + # track latest request state such that map + # lookups start at the most recent msg and then + # scan reverse-chronologically. + dialogs.add_msg(oid, msg) + + # XXX: ACK the request **immediately** before sending + # the api side request to ensure the ems maps the oid -> + # reqid correctly! + resp = BrokerdOrderAck( + oid=oid, # ems order request id + reqid=oid, # our custom int mapping + account='binance', # piker account + ) + await ems_order_stream.send(resp) # call our client api to submit the order try: - # XXX: ACK the request **immediately** before sending - # the api side request to ensure the ems maps the oid -> - # reqid correctly! - resp = BrokerdOrderAck( - oid=oid, # ems order request id - reqid=oid, # our custom int mapping - account='binance', # piker account - ) - await ems_order_stream.send(resp) - reqid = await client.submit_limit( symbol=order.symbol, side=order.action, @@ -201,11 +203,6 @@ async def handle_order_requests( # assert reqid == order.oid dids[order.oid] = reqid - # track latest request state such that map - # lookups start at the most recent msg and then - # scan reverse-chronologically. - dialogs.add_msg(oid, msg) - except BrokerError as be: await ems_order_stream.send( BrokerdError( @@ -347,9 +344,9 @@ async def open_trade_dialog( bs_mktid: str = entry['symbol'] entry_size: float = float(entry['positionAmt']) - pair: Pair | None + pair: Pair | None = client._venue2pairs[venue].get(bs_mktid) if ( - pair := client._venue2pairs[venue].get(bs_mktid) + pair and entry_size > 0 ): entry_price: float = float(entry['entryPrice']) @@ -406,6 +403,8 @@ async def open_trade_dialog( ) tn.start_soon( handle_order_updates, + venue, + client, ems_stream, wss, dialogs, @@ -416,21 +415,12 @@ async def open_trade_dialog( async def handle_order_updates( + venue: str, + client: Client, ems_stream: tractor.MsgStream, wss: NoBsWs, dialogs: OrderDialogs, - # apiflows: dict[int, ChainMap[dict[str, dict]]], - # ids: bidict[str, int], - # reqids2txids: bidict[int, str], - - # table: PpTable, - # ledger_trans: dict[str, Transaction], - - # acctid: str, - # acc_name: str, - # token: str, - ) -> None: ''' Main msg handling loop for all things order management. @@ -443,9 +433,6 @@ async def handle_order_updates( log.info(f'Rx USERSTREAM msg:\n{pformat(msg)}') match msg: - # TODO: - # POSITION update - # ORDER update # spot: https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update # futes: https://binance-docs.github.io/apidocs/futures/en/#event-order-update @@ -488,7 +475,6 @@ async def handle_order_updates( 'e': 'ORDER_TRADE_UPDATE', 'T': int(epoch_ms), 'o': { - 'i': reqid, 's': bs_mktid, # XXX NOTE XXX see special ids for market @@ -499,6 +485,7 @@ async def handle_order_updates( # // "settlement_autoclose-": settlement order # for delisting or delivery 'c': oid, + # 'i': reqid, # binance internal int id # prices 'a': submit_price, @@ -579,6 +566,8 @@ async def handle_order_updates( status = 'canceled' del dialogs._dialogs[oid] + # case 'TRADE': + case _: status = status.lower() @@ -599,6 +588,52 @@ async def handle_order_updates( ) await ems_stream.send(resp) + # ACCOUNT and POSITION update B) + # { + # 'E': 1687036749218, + # 'e': 'ACCOUNT_UPDATE' + # 'T': 1687036749215, + # 'a': {'B': [{'a': 'USDT', + # 'bc': '0', + # 'cw': '1267.48920735', + # 'wb': '1410.90245576'}], + # 'P': [{'cr': '-3292.10973007', + # 'ep': '26349.90000', + # 'iw': '143.41324841', + # 'ma': 'USDT', + # 'mt': 'isolated', + # 'pa': '0.038', + # 'ps': 'BOTH', + # 's': 'BTCUSDT', + # 'up': '5.17555453'}], + # 'm': 'ORDER'}, + # } + case { + 'T': int(epoch_ms), + 'e': 'ACCOUNT_UPDATE', + 'a': { + 'P': [{ + 's': bs_mktid, + 'pa': pos_amount, + 'ep': entry_price, + }], + }, + }: + # real-time relay position updates back to EMS + pair: Pair | None = client._venue2pairs[venue].get(bs_mktid) + ppmsg = BrokerdPosition( + broker='binance', + account='binance.usdtm', + + # TODO: maybe we should be passing back + # a `MktPair` here? + symbol=pair.bs_fqme.lower() + '.binance', + + size=float(pos_amount), + avg_price=float(entry_price), + ) + await ems_stream.send(ppmsg) + case _: log.warning( 'Unhandled event:\n' From 6eee6ead79e8b4673ed81192a62f786ce488dba3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 17 Jun 2023 18:48:35 -0400 Subject: [PATCH 45/73] binance: add accounts def to `brokers.toml` template --- config/brokers.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/config/brokers.toml b/config/brokers.toml index 96c67b45d..675af9409 100644 --- a/config/brokers.toml +++ b/config/brokers.toml @@ -2,10 +2,12 @@ # ---- CEXY ---- ################ [binance] +accounts.usdtm = 'futes' futes.use_testnet = true futes.api_key = '' futes.api_secret = '' +accounts.spot = 'spot' spot.use_testnet = true spot.api_key = '' spot.api_secret = '' From dc3ac8de01b57b552ca54b89889cb27f8e5e92b1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 18 Jun 2023 19:51:13 -0400 Subject: [PATCH 46/73] binance: support order "modifies" B) Only a couple tweaks to make this work according to the docs: https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade - use a PUT request. - provide the original user id in a `'origClientOrderId'` msg field. - don't expect the same oid in the PUT response. Other broker-mode related details: - don't call `OrderDialogs.add_msg()` until after the existing check since we want to check against the *last* msgs contents not the new request. - ensure we pass the `modify=True` flag in the edit case. --- piker/brokers/binance/api.py | 40 +++++++++++++++++++++++++------ piker/brokers/binance/broker.py | 42 ++++++++++++++++++++------------- piker/brokers/binance/venues.py | 5 ++-- 3 files changed, 61 insertions(+), 26 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 15fd92aed..1fbe89ba3 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -628,13 +628,24 @@ async def submit_limit( oid: int | None = None, tif: str = 'GTC', - recv_window: int = 60000 + recv_window: int = 60000, + # iceberg_quantity: float | None = None, - # order_resp_type: str | None = None, + resp_type: str = 'ACK', + + # TODO: this is probably useful for doing stops, maybe we + # can set it only on dark-stops? + # close_all: bool = False, + + modify: bool = False, ) -> str: ''' - Submit a live limit order to ze binance. + Submit or modify a live limit order to ze binance. + + For modify see: + - spot: + - futes https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade ''' # lookup the binance-native symbol from search table @@ -647,10 +658,22 @@ async def submit_limit( ('quantity', quantity), ('price', price), ('recvWindow', recv_window), - ('newOrderRespType', 'ACK'), + ('newOrderRespType', resp_type), ('timestamp', binance_timestamp(now())) + + # ('closeAll', close_all), ]) - if oid: + + action: str = 'post' + + # NOTE: modifies only require diff key for user oid: + # https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade + if modify: + assert oid + params['origClientOrderId'] = oid + action: str = 'put' + + elif oid: params['newClientOrderId'] = oid log.info( @@ -661,11 +684,14 @@ async def submit_limit( 'order', params=params, signed=True, - action='post' + action=action, ) # ensure our id is tracked by them - if oid: + if ( + oid + and not modify + ): assert oid == resp['clientOrderId'] reqid: str = resp['orderId'] diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 10248f9f3..0145c7f56 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -164,6 +164,7 @@ async def handle_order_requests( # validate order = BrokerdOrder(**msg) oid: str = order.oid # emsd order id + modify: bool = False # NOTE: check and report edits if existing := dialogs.get(order.oid): @@ -171,25 +172,31 @@ async def handle_order_requests( f'Existing order for {oid} updated:\n' f'{pformat(existing.maps[-1])} -> {pformat(msg)}' ) - # TODO: figure out what special params we have to send? - # https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade - - # track latest request state such that map - # lookups start at the most recent msg and then - # scan reverse-chronologically. - dialogs.add_msg(oid, msg) - - # XXX: ACK the request **immediately** before sending - # the api side request to ensure the ems maps the oid -> - # reqid correctly! - resp = BrokerdOrderAck( - oid=oid, # ems order request id - reqid=oid, # our custom int mapping - account='binance', # piker account - ) - await ems_order_stream.send(resp) + modify = True + + # only add new msg AFTER the existing check + dialogs.add_msg(oid, msg) + + else: + # XXX NOTE: update before the ack! + # track latest request state such that map + # lookups start at the most recent msg and then + # scan reverse-chronologically. + dialogs.add_msg(oid, msg) + + # XXX: ACK the request **immediately** before sending + # the api side request to ensure the ems maps the oid -> + # reqid correctly! + resp = BrokerdOrderAck( + oid=oid, # ems order request id + reqid=oid, # our custom int mapping + account='binance', # piker account + ) + await ems_order_stream.send(resp) # call our client api to submit the order + # NOTE: modifies only require diff key for user oid: + # https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade try: reqid = await client.submit_limit( symbol=order.symbol, @@ -197,6 +204,7 @@ async def handle_order_requests( quantity=order.size, price=order.price, oid=oid, + modify=modify, ) # SMH they do gen their own order id: ints.. diff --git a/piker/brokers/binance/venues.py b/piker/brokers/binance/venues.py index 4b530122b..2de73185c 100644 --- a/piker/brokers/binance/venues.py +++ b/piker/brokers/binance/venues.py @@ -43,8 +43,6 @@ _spot_ws: str = 'wss://stream.binance.com/ws' # 'wss://ws-api.binance.com:443/ws-api/v3', -# NOTE: spot test network only allows certain ep sets: -# https://testnet.binance.vision/ _testnet_spot_ws: str = 'wss://testnet.binance.vision/ws-api/v3' # https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams @@ -52,6 +50,9 @@ _auth_futes_ws: str = 'wss://fstream-auth.{_domain}/ws/' # test nets +# NOTE: spot test network only allows certain ep sets: +# https://testnet.binance.vision/ +# https://www.binance.com/en/support/faq/how-to-test-my-functions-on-binance-testnet-ab78f9a1b8824cf0a106b4229c76496d _testnet_spot_url: str = 'https://testnet.binance.vision/api' _testnet_spot_ws: str = 'wss://testnet.binance.vision/ws' From 5c315ba1638caea32f03f92dbb704c9e26573365 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 19 Jun 2023 11:04:38 -0400 Subject: [PATCH 47/73] Support live order loading (with caveats) As you'd expect query and sync the EMS with existing live orders reported by the market venue by packing them in `Status` msgs and sending over the order dialog stream before starting the handler tasks. XXX CAVEAT: - there appears to be no way (at least on the usdtm market/venue) to distinguish between different contracts such as perps vs. the quarterlies? - for now we just assume that the perp is being used since there's no indicator otherwise in the 'symbol' field? - we should maybe open an issue with the futures-connector project to see how they'd recommend solving this discrepancy? --- piker/brokers/binance/api.py | 68 +++++++++++++++++++++++++++++++-- piker/brokers/binance/broker.py | 66 ++++++++++++++++++++++++++++---- 2 files changed, 123 insertions(+), 11 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 1fbe89ba3..481c427cc 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -49,6 +49,9 @@ import numpy as np from piker import config +from piker.clearing._messages import ( + Order, +) from piker.accounting import ( Asset, digits_to_dec, @@ -378,9 +381,6 @@ async def _cache_pairs( raise SymbolNotFound(f'No market pairs found!?:\n{resp}') pairs_view_subtable: dict[str, Pair] = {} - # if venue == 'spot': - # import tractor - # await tractor.breakpoint() for item in mkt_pairs: filters_ls: list = item.pop('filters', False) @@ -619,6 +619,68 @@ async def get_withdrawls( signed=True, ) + async def get_open_orders( + self, + symbol: str | None = None, + + ) -> list[Order]: + ''' + Get all open orders for venue-account. + + WARNING: apparently not specifying the symbol is given + a much heavier API "weight" meaning you shouldn't call it + often to avoid getting throttled as per: + + 'https://binance-docs.github.io/apidocs/futures/en/#current-all-open-orders-user_data + + + ''' + params: dict[str, Any] = { + 'timestamp': binance_timestamp(now()), + } + if symbol is not None: + params['symbol'] = symbol + + resp = await self.mkt_mode_req[self.mkt_mode]( + 'openOrders', + params=params, + signed=True, + action='get', + ) + orders: list[Order] = [] + for entry in resp: + oid: str = entry['clientOrderId'] + + # XXX TODO XXX: it appears as though entries have no + # indicator from the symbology system which market + # / venue the order is from.. which normally isn't + # a huge deal since you could assume based on the + # endpoint you made the request to, BUT the futes USD-M + # endpoints have multiple contracts for the same + # symbols (eg. BTCUSDT.PERP, BTCUSDT.230630.. etc.) + # NOTE: for now until we have a better system we're + # going to assume orders that don't have some kind of + # further info in the order resp dict are perps though + # likely this will need to change in the future.. + venue: str = self.mkt_mode.rstrip('_futes') + bs_mktid: str = entry['symbol'] + fqme: str = f'{bs_mktid.lower()}.{venue}.perp' + + orders.append( + Order( + oid=oid, + symbol=fqme, + + action=entry['side'].lower(), + price=float(entry['price']), + size=float(entry['origQty']), + + exec_mode='live', + account=f'binance.{venue}', + ) + ) + return orders + async def submit_limit( self, symbol: str, diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 0145c7f56..dfc8373c5 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -62,6 +62,8 @@ BrokerdFill, BrokerdCancel, BrokerdError, + Status, + Order, ) from .venues import Pair from .api import Client @@ -69,6 +71,10 @@ log = get_logger('piker.brokers.binance') +# TODO: factor this into `.clearing._util` (or something) +# and use in other backends like kraken which currently has +# a less formalized version more or less: +# `apiflows[reqid].maps.append(status_msg.to_dict())` class OrderDialogs(Struct): ''' Order control dialog (and thus transaction) tracking via @@ -79,25 +85,49 @@ class OrderDialogs(Struct): state using the entire (reverse chronological) msg flow. ''' - _dialogs: defaultdict[str, ChainMap] = defaultdict(ChainMap) + _flows: dict[str, ChainMap] = {} def add_msg( self, oid: str, msg: dict, ) -> None: - self._dialogs[oid].maps.insert(0, msg) + + # NOTE: manually enter a new map on the first msg add to + # avoid creating one with an empty dict first entry in + # `ChainMap.maps` which is the default if none passed at + # init. + cm: ChainMap = self._flows.get(oid) + if cm: + cm.maps.insert(0, msg) + else: + cm = ChainMap(msg) + self._flows[oid] = cm # TODO: wrap all this in the `collections.abc.Mapping` interface? def get( self, oid: str, + ) -> ChainMap[str, Any]: ''' Return the dialog `ChainMap` for provided id. ''' - return self._dialogs.get(oid, None) + return self._flows.get(oid, None) + + def pop( + self, + oid: str, + + ) -> ChainMap[str, Any]: + ''' + Pop and thus remove the `ChainMap` containing the msg flow + for the given order id. + + ''' + return self._flows.pop(oid) + async def handle_order_requests( @@ -277,11 +307,15 @@ async def open_trade_dialog( f"{listen_key}@account", f"{listen_key}@balance", f"{listen_key}@position", + + # TODO: does this even work!? seems to cause + # a hang on the first msg..? lelelel. + # f"{listen_key}@order", ], "id": nsid }) - with trio.fail_after(1): + with trio.fail_after(6): msg = await wss.recv_msg() assert msg['id'] == nsid @@ -401,6 +435,24 @@ async def open_trade_dialog( trio.open_nursery() as tn, ctx.open_stream() as ems_stream, ): + # deliver all pre-exist open orders to EMS thus syncing + # state with the binance existing live limit set. + open_orders: list[Order] = await client.get_open_orders() + + # fill out `Status` with boxed `Order`s and sync the EMS. + for order in open_orders: + status_msg = Status( + time_ns=time.time_ns(), + resp='open', + oid=order.oid, + reqid=order.oid, + + # embedded order info + req=order, + src='binance', + ) + dialogs.add_msg(order.oid, order.to_dict()) + await ems_stream.send(status_msg) tn.start_soon( handle_order_requests, @@ -565,16 +617,14 @@ async def handle_order_updates( if accum_size_filled == req_size: status = 'closed' - del dialogs._dialogs[oid] + dialogs.pop(oid) case 'NEW': status = 'open' case 'EXPIRED': status = 'canceled' - del dialogs._dialogs[oid] - - # case 'TRADE': + dialogs.pop(oid) case _: status = status.lower() From 7f39de59d4e427db21c99152071456139f3acbcf Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 19 Jun 2023 11:44:28 -0400 Subject: [PATCH 48/73] Factor `OrderDialogs` into `.clearing._util` It's finally a decent little design / interface and definitely can be used in other backends like `kraken` which rolled something lower level but more or less the same without a wrapper class. --- piker/brokers/binance/broker.py | 65 +-------------------------------- piker/clearing/__init__.py | 2 + piker/clearing/_util.py | 63 ++++++++++++++++++++++++++++++++ 3 files changed, 66 insertions(+), 64 deletions(-) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index dfc8373c5..6011aa9dd 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -22,10 +22,6 @@ ''' from __future__ import annotations -from collections import ( - ChainMap, - defaultdict, -) from pprint import pformat from typing import ( Any, @@ -45,7 +41,6 @@ from piker.brokers._util import ( get_logger, ) -from piker.data.types import Struct from piker.data._web_bs import ( open_autorecon_ws, NoBsWs, @@ -54,6 +49,7 @@ open_cached_client, BrokerError, ) +from piker.clearing import OrderDialogs from piker.clearing._messages import ( BrokerdOrder, BrokerdOrderAck, @@ -71,65 +67,6 @@ log = get_logger('piker.brokers.binance') -# TODO: factor this into `.clearing._util` (or something) -# and use in other backends like kraken which currently has -# a less formalized version more or less: -# `apiflows[reqid].maps.append(status_msg.to_dict())` -class OrderDialogs(Struct): - ''' - Order control dialog (and thus transaction) tracking via - message recording. - - Allows easily recording messages associated with a given set of - order control transactions and looking up the latest field - state using the entire (reverse chronological) msg flow. - - ''' - _flows: dict[str, ChainMap] = {} - - def add_msg( - self, - oid: str, - msg: dict, - ) -> None: - - # NOTE: manually enter a new map on the first msg add to - # avoid creating one with an empty dict first entry in - # `ChainMap.maps` which is the default if none passed at - # init. - cm: ChainMap = self._flows.get(oid) - if cm: - cm.maps.insert(0, msg) - else: - cm = ChainMap(msg) - self._flows[oid] = cm - - # TODO: wrap all this in the `collections.abc.Mapping` interface? - def get( - self, - oid: str, - - ) -> ChainMap[str, Any]: - ''' - Return the dialog `ChainMap` for provided id. - - ''' - return self._flows.get(oid, None) - - def pop( - self, - oid: str, - - ) -> ChainMap[str, Any]: - ''' - Pop and thus remove the `ChainMap` containing the msg flow - for the given order id. - - ''' - return self._flows.pop(oid) - - - async def handle_order_requests( ems_order_stream: tractor.MsgStream, client: Client, diff --git a/piker/clearing/__init__.py b/piker/clearing/__init__.py index ec796ac9d..19d6390f2 100644 --- a/piker/clearing/__init__.py +++ b/piker/clearing/__init__.py @@ -26,12 +26,14 @@ from ._ems import ( open_brokerd_dialog, ) +from ._util import OrderDialogs __all__ = [ 'open_ems', 'OrderClient', 'open_brokerd_dialog', + 'OrderDialogs', ] diff --git a/piker/clearing/_util.py b/piker/clearing/_util.py index ec93512dc..9015ba69b 100644 --- a/piker/clearing/_util.py +++ b/piker/clearing/_util.py @@ -17,12 +17,15 @@ Sub-sys module commons. """ +from collections import ChainMap from functools import partial +from typing import Any from ..log import ( get_logger, get_console_log, ) +from piker.data.types import Struct subsys: str = 'piker.clearing' log = get_logger(subsys) @@ -31,3 +34,63 @@ get_console_log, name=subsys, ) + + +# TODO: use this in other backends like kraken which currently has +# a less formalized version more or less: +# `apiflows[reqid].maps.append(status_msg.to_dict())` +class OrderDialogs(Struct): + ''' + Order control dialog (and thus transaction) tracking via + message recording. + + Allows easily recording messages associated with a given set of + order control transactions and looking up the latest field + state using the entire (reverse chronological) msg flow. + + ''' + _flows: dict[str, ChainMap] = {} + + def add_msg( + self, + oid: str, + msg: dict, + ) -> None: + + # NOTE: manually enter a new map on the first msg add to + # avoid creating one with an empty dict first entry in + # `ChainMap.maps` which is the default if none passed at + # init. + cm: ChainMap = self._flows.get(oid) + if cm: + cm.maps.insert(0, msg) + else: + cm = ChainMap(msg) + self._flows[oid] = cm + + # TODO: wrap all this in the `collections.abc.Mapping` interface? + def get( + self, + oid: str, + + ) -> ChainMap[str, Any]: + ''' + Return the dialog `ChainMap` for provided id. + + ''' + return self._flows.get(oid, None) + + def pop( + self, + oid: str, + + ) -> ChainMap[str, Any]: + ''' + Pop and thus remove the `ChainMap` containing the msg flow + for the given order id. + + ''' + return self._flows.pop(oid) + + + From 77db2fa7c8160311007cb2c45c7f038503b97bb8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 19 Jun 2023 12:30:43 -0400 Subject: [PATCH 49/73] Support loading quarterly futes existing lives Do parsing of the `'symbol'` and check for an `_` suffix, in which case we re-format in capitalized FQME style, do the `Client._pairs[str, Pair]` lookup and then send the `Pair.bs_fqme` in the `Order.fqme: str` field. --- piker/brokers/binance/api.py | 33 ++++++++++++++++++--------------- piker/brokers/binance/broker.py | 8 +++----- 2 files changed, 21 insertions(+), 20 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 481c427cc..dc3482cc4 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -647,29 +647,32 @@ async def get_open_orders( signed=True, action='get', ) + # figure out which venue (in FQME terms) we're using + # since that normally maps 1-to-1 with the account (right?) + venue: str = self.mkt_mode.rstrip('_futes') + orders: list[Order] = [] for entry in resp: oid: str = entry['clientOrderId'] + symbol: str = entry['symbol'] + + # build out a fqme-styled key that should map to a pair + # entry in `._pairs` cross-venue table. + bs_mktid, _, expiry = entry['symbol'].partition('_') + bs_mktid += f'.{venue.upper()}' + + if expiry: + bs_mktid += f'.{expiry}' + else: + bs_mktid += '.PERP' - # XXX TODO XXX: it appears as though entries have no - # indicator from the symbology system which market - # / venue the order is from.. which normally isn't - # a huge deal since you could assume based on the - # endpoint you made the request to, BUT the futes USD-M - # endpoints have multiple contracts for the same - # symbols (eg. BTCUSDT.PERP, BTCUSDT.230630.. etc.) - # NOTE: for now until we have a better system we're - # going to assume orders that don't have some kind of - # further info in the order resp dict are perps though - # likely this will need to change in the future.. - venue: str = self.mkt_mode.rstrip('_futes') - bs_mktid: str = entry['symbol'] - fqme: str = f'{bs_mktid.lower()}.{venue}.perp' + # should never key error if we've got it right B) + pair: Pair = self._pairs[bs_mktid] orders.append( Order( oid=oid, - symbol=fqme, + symbol=pair.bs_fqme.lower(), action=entry['side'].lower(), price=float(entry['price']), diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 6011aa9dd..776708166 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -373,11 +373,9 @@ async def open_trade_dialog( ctx.open_stream() as ems_stream, ): # deliver all pre-exist open orders to EMS thus syncing - # state with the binance existing live limit set. - open_orders: list[Order] = await client.get_open_orders() - - # fill out `Status` with boxed `Order`s and sync the EMS. - for order in open_orders: + # state with existing live limits reported by them. + order: Order + for order in await client.get_open_orders(): status_msg = Status( time_ns=time.time_ns(), resp='open', From fe902c017b35080190f45b24b1242b94e4dc7a5b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 19 Jun 2023 16:17:42 -0400 Subject: [PATCH 50/73] Drop `OrderedDict` usage, not necessary in modern python --- piker/brokers/binance/api.py | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index dc3482cc4..0b3c6973e 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -22,10 +22,7 @@ """ from __future__ import annotations -from collections import ( - OrderedDict, - ChainMap, -) +from collections import ChainMap from contextlib import ( asynccontextmanager as acm, ) @@ -234,7 +231,7 @@ def __init__( # 'futes_coin': self._dapi, # TODO } - def _mk_sig(self, data: OrderedDict) -> str: + def _mk_sig(self, data: dict) -> str: # XXX: Info on security and authentification # https://binance-docs.github.io/apidocs/#endpoint-security-type @@ -264,7 +261,7 @@ def _mk_sig(self, data: OrderedDict) -> str: async def _api( self, method: str, - params: dict | OrderedDict, + params: dict, signed: bool = False, action: str = 'get' @@ -292,7 +289,7 @@ async def _api( async def _fapi( self, method: str, - params: dict | OrderedDict, + params: dict, signed: bool = False, action: str = 'get', testnet: bool = True, @@ -336,7 +333,7 @@ async def _fapi( async def _sapi( self, method: str, - params: dict | OrderedDict, + params: dict, signed: bool = False, action: str = 'get' @@ -571,7 +568,7 @@ async def get_positions( for sym in self.watchlist: log.info(f'doing {sym}...') - params = OrderedDict([ + params = dict([ ('symbol', sym), ('recvWindow', recv_window), ('timestamp', binance_timestamp(now())) @@ -593,7 +590,7 @@ async def get_deposits( # TODO: can't we drop this since normal dicts are # ordered implicitly in mordern python? - params = OrderedDict([ + params = dict([ ('recvWindow', recv_window), ('timestamp', binance_timestamp(now())) ]) @@ -609,7 +606,7 @@ async def get_withdrawls( ) -> list: - params = OrderedDict([ + params = dict([ ('recvWindow', recv_window), ('timestamp', binance_timestamp(now())) ]) @@ -715,7 +712,7 @@ async def submit_limit( ''' # lookup the binance-native symbol from search table bs_mktid: str = self._pairs[symbol.upper()].symbol - params: dict = OrderedDict([ + params: dict = dict([ ('symbol', bs_mktid), ('side', side.upper()), ('type', 'LIMIT'), @@ -771,7 +768,7 @@ async def submit_cancel( ) -> None: bs_mktid: str = self._pairs[symbol.upper()].symbol - params = OrderedDict([ + params = dict([ ('symbol', bs_mktid), # ('orderId', oid), ('origClientOrderId', oid), From 9970fa89ee2770db81096508857c8b43fe422aa4 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 19 Jun 2023 17:59:40 -0400 Subject: [PATCH 51/73] Drop per-venue request methods from `Client` Use dynamic lookups instead by mapping to the correct http session and endpoints path using the venue routing/mode key. This let's us simplify from 3 methods down to a single `Client._api()` which either can be passed the `venue: str` explicitly by the caller (as is needed in the `._cache_pairs()` case) or falls back to the client's current `.mkt_mode: str` setting B) Deatz: - add couple more tables to suffice all authed-endpoint use cases: - `.venue2configkey: dict[str, str]` which maps the venue key to the `brokers.toml` subsection which should be used for auth creds and testnet config. - `.confkey2venuekeys: dict[str, list[str]]` which maps each config subsection key to the list of venue name keys for doing config to venues lookup. - always build out testnet sessions for spot and futes venues (though if not set the sessions obviously won't ever be used). - add and use new `config.ConfigurationError` custom exceptions when api creds are missing. - rename `action: str` to `method: str` in `._api()` since it's the proper ReST term and switch what was "method" to be `endpoint: str`. - mask out `.get_positions()` since we can get that from a user stream wss request (and are doing that). - (in theory) import and use spot testnet url as necessary. --- piker/brokers/__init__.py | 2 + piker/brokers/binance/api.py | 340 +++++++++++++++++--------------- piker/brokers/binance/broker.py | 8 +- piker/brokers/binance/venues.py | 4 +- piker/config.py | 6 +- 5 files changed, 202 insertions(+), 158 deletions(-) diff --git a/piker/brokers/__init__.py b/piker/brokers/__init__.py index 986905e2c..87a0446ad 100644 --- a/piker/brokers/__init__.py +++ b/piker/brokers/__init__.py @@ -33,6 +33,7 @@ DataUnavailable, DataThrottle, resproc, + get_logger, ) __all__: list[str] = [ @@ -42,6 +43,7 @@ 'DataUnavailable', 'DataThrottle', 'resproc', + 'get_logger', ] __brokers__: list[str] = [ diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 0b3c6973e..7c4324df6 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -37,6 +37,7 @@ import hashlib from pathlib import Path +from bidict import bidict import trio from pendulum import ( now, @@ -55,7 +56,7 @@ ) from piker.data.types import Struct from piker.data import def_iohlcv_fields -from piker.brokers._util import ( +from piker.brokers import ( resproc, SymbolNotFound, get_logger, @@ -67,8 +68,8 @@ _spot_url, _futes_url, - _testnet_futes_url, + _testnet_spot_url, ) log = get_logger('piker.brokers.binance') @@ -181,6 +182,9 @@ def __init__( # spot EPs sesh self._sesh = asks.Session(connections=4) self._sesh.base_location: str = _spot_url + # spot testnet + self._test_sesh: asks.Session = asks.Session(connections=4) + self._test_sesh.base_location: str = _testnet_spot_url # margin and extended spot endpoints session. self._sapi_sesh = asks.Session(connections=4) @@ -189,54 +193,100 @@ def __init__( # futes EPs sesh self._fapi_sesh = asks.Session(connections=4) self._fapi_sesh.base_location: str = _futes_url + # futes testnet + self._test_fapi_sesh: asks.Session = asks.Session(connections=4) + self._test_fapi_sesh.base_location: str = _testnet_futes_url - # for creating API keys see, - # https://www.binance.com/en/support/faq/how-to-create-api-keys-on-binance-360002502072 - root_conf: dict = get_config() - conf: dict = root_conf['futes'] + # global client "venue selection" mode. + # set this when you want to switch venues and not have to + # specify the venue for the next request. + self.mkt_mode: MarketType = mkt_mode - self.api_key: str = conf.get('api_key', '') - self.api_secret: str = conf.get('api_secret', '') - self.use_testnet: bool = conf.get('use_testnet', False) + # per 8 + self.venue_sesh: dict[ + str, # venue key + tuple[asks.Session, str] # session, eps path + ] = { + 'spot': (self._sesh, '/api/v3/'), + 'spot_testnet': (self._test_sesh, '/fapi/v1/'), - if self.use_testnet: - self._test_fapi_sesh = asks.Session(connections=4) - self._test_fapi_sesh.base_location: str = _testnet_futes_url + 'margin': (self._sapi_sesh, '/sapi/v1/'), - self.watchlist = conf.get('watchlist', []) + 'usdtm_futes': (self._fapi_sesh, '/fapi/v1/'), + 'usdtm_futes_testnet': (self._test_fapi_sesh, '/fapi/v1/'), - if self.api_key: - api_key_header: dict = { - # taken from official: - # https://github.com/binance/binance-futures-connector-python/blob/main/binance/api.py#L47 - "Content-Type": "application/json;charset=utf-8", + # 'futes_coin': self._dapi, # TODO + } - # TODO: prolly should just always query and copy - # in the real latest ver? - "User-Agent": "binance-connector/6.1.6smbz6", - "X-MBX-APIKEY": self.api_key, - } - self._sesh.headers.update(api_key_header) - self._sapi_sesh.headers.update(api_key_header) - self._fapi_sesh.headers.update(api_key_header) + # lookup for going from `.mkt_mode: str` to the config + # subsection `key: str` + self.venue2configkey: bidict[str, str] = { + 'spot': 'spot', + 'margin': 'spot', + 'usdtm_futes': 'futes', + # 'coinm_futes': 'futes', + } + self.confkey2venuekeys: dict[str, list[str]] = { + 'spot': ['spot', 'margin'], + 'futes': ['usdtm_futes'], + } - if self.use_testnet: - self._test_fapi_sesh.headers.update(api_key_header) + # for creating API keys see, + # https://www.binance.com/en/support/faq/how-to-create-api-keys-on-binance-360002502072 + self.conf: dict = get_config() + + for key, subconf in self.conf.items(): + if api_key := subconf.get('api_key', ''): + venue_keys: list[str] = self.confkey2venuekeys[key] + + venue_key: str + sesh: asks.Session + for venue_key in venue_keys: + sesh, _ = self.venue_sesh[venue_key] + + api_key_header: dict = { + # taken from official: + # https://github.com/binance/binance-futures-connector-python/blob/main/binance/api.py#L47 + "Content-Type": "application/json;charset=utf-8", + + # TODO: prolly should just always query and copy + # in the real latest ver? + "User-Agent": "binance-connector/6.1.6smbz6", + "X-MBX-APIKEY": api_key, + } + sesh.headers.update(api_key_header) + + # if `.use_tesnet = true` in the config then + # also add headers for the testnet session which + # will be used for all order control + if subconf.get('use_testnet', False): + testnet_sesh, _ = self.venue_sesh[ + venue_key + '_testnet' + ] + testnet_sesh.headers.update(api_key_header) + + def _mk_sig( + self, + data: dict, + venue: str, - self.mkt_mode: MarketType = mkt_mode - self.mkt_mode_req: dict[str, Callable] = { - 'spot': self._api, - 'margin': self._sapi, - 'usdtm_futes': self._fapi, - # 'futes_coin': self._dapi, # TODO - } + ) -> str: + + # look up subconfig (spot or futes) section using + # venue specific key lookup to figure out which mkt + # we need a key for. + section_name: str = self.venue2configkey[venue] + subconf: dict | None = self.conf.get(section_name) + if subconf is None: + raise config.ConfigurationError( + f'binance configuration is missing a `{section_name}` section ' + 'to define the creds for auth-ed endpoints!?' + ) - def _mk_sig(self, data: dict) -> str: # XXX: Info on security and authentification # https://binance-docs.github.io/apidocs/#endpoint-security-type - - if not self.api_secret: + if not (api_secret := subconf.get('api_secret')): raise config.NoSignature( "Can't generate a signature without setting up credentials" ) @@ -246,10 +296,8 @@ def _mk_sig(self, data: dict) -> str: for key, value in data.items() ]) - # log.info(query_str) - msg_auth = hmac.new( - self.api_secret.encode('utf-8'), + api_secret.encode('utf-8'), query_str.encode('utf-8'), hashlib.sha256 ) @@ -260,103 +308,83 @@ def _mk_sig(self, data: dict) -> str: # mkt_mode: MarketType input! async def _api( self, - method: str, + endpoint: str, # ReST endpoint key params: dict, + + method: str = 'get', + venue: str | None = None, # if None use `.mkt_mode` state signed: bool = False, - action: str = 'get' + testnet: bool = True, ) -> dict[str, Any]: ''' - Make a /api/v3/ SPOT account/market endpoint request. + Make a ReST API request via + - a /api/v3/ SPOT, or + - /fapi/v3/ USD-M FUTURES, or + - /api/v3/ SPOT/MARGIN - For eg. rest market-data and spot-account-trade eps use - this endpoing parent path: - - https://binance-docs.github.io/apidocs/spot/en/#market-data-endpoints - - https://binance-docs.github.io/apidocs/spot/en/#spot-account-trade + account/market endpoint request depending on either passed in `venue: str` + or the current setting `.mkt_mode: str` setting, default `'spot'`. - ''' - if signed: - params['signature'] = self._mk_sig(params) - resp = await getattr(self._sesh, action)( - path=f'/api/v3/{method}', - params=params, - timeout=float('inf'), - ) + Docs per venue API: - return resproc(resp, log) - - async def _fapi( - self, - method: str, - params: dict, - signed: bool = False, - action: str = 'get', - testnet: bool = True, + SPOT: market-data and spot-account-trade eps use this + ---- endpoing parent path: + - https://binance-docs.github.io/apidocs/spot/en/#market-data-endpoints + - https://binance-docs.github.io/apidocs/spot/en/#spot-account-trade - ) -> dict[str, Any]: - ''' - Make a /fapi/v3/ USD-M FUTURES account/market endpoint - request. + MARGIN: and advancecd spot account eps: + ------ + - https://binance-docs.github.io/apidocs/spot/en/#margin-account-trade + - https://binance-docs.github.io/apidocs/spot/en/#listen-key-spot + - https://binance-docs.github.io/apidocs/spot/en/#spot-algo-endpoints - For all USD-M futures endpoints use this parent path: - https://binance-docs.github.io/apidocs/futures/en/#market-data-endpoints + USD-M FUTES: + ----------- + - https://binance-docs.github.io/apidocs/futures/en/#market-data-endpoints ''' + venue_key: str = venue or self.mkt_mode + if signed: - params['signature'] = self._mk_sig(params) + params['signature'] = self._mk_sig( + params, + venue=venue_key, + ) + + sesh: asks.Session + path: str + + # Check if we're configured to route order requests to the + # venue equivalent's testnet. + use_testnet: bool = False + section_name: str = self.venue2configkey[venue_key] + if subconf := self.conf.get(section_name): + use_testnet = subconf.get('use_testnet', False) - # NOTE: only use testnet if user set brokers.toml config - # var to true **and** it's not one of the market data - # endpoints since we basically never want to display the - # test net feeds, we only are using it for testing order - # ctl machinery B) if ( - self.use_testnet + use_testnet and method not in { 'klines', 'exchangeInfo', } ): - meth = getattr(self._test_fapi_sesh, action) - else: - meth = getattr(self._fapi_sesh, action) + # NOTE: only use testnet if user set brokers.toml config + # var to true **and** it's not one of the market data + # endpoints since we basically never want to display the + # test net feeds, we only are using it for testing order + # ctl machinery B) + venue_key += '_testnet' - resp = await meth( - path=f'/fapi/v1/{method}', - params=params, - timeout=float('inf') - ) - - return resproc(resp, log) - - async def _sapi( - self, - method: str, - params: dict, - signed: bool = False, - action: str = 'get' + sesh, path = self.venue_sesh[venue_key] - ) -> dict[str, Any]: - ''' - Make a /api/v3/ SPOT/MARGIN account/market endpoint request. - - For eg. all margin and advancecd spot account eps use this - endpoing parent path: - - https://binance-docs.github.io/apidocs/spot/en/#margin-account-trade - - https://binance-docs.github.io/apidocs/spot/en/#listen-key-spot - - https://binance-docs.github.io/apidocs/spot/en/#spot-algo-endpoints - - ''' - if signed: - params['signature'] = self._mk_sig(params) - - resp = await getattr(self._sapi_sesh, action)( - path=f'/sapi/v1/{method}', + meth: Callable = getattr(sesh, method) + resp = await meth( + path=path + endpoint, params=params, - timeout=float('inf') + timeout=float('inf'), ) - return resproc(resp, log) async def _cache_pairs( @@ -369,9 +397,13 @@ async def _cache_pairs( asset_table: dict[str, Asset] = self._venue2assets[venue] # make API request(s) - resp = await self.mkt_mode_req[venue]( + resp = await self._api( 'exchangeInfo', params={}, # NOTE: retrieve all symbols by default + # XXX: MUST explicitly pass the routing venue since we + # don't know the routing mode but want to cache market + # infos across all venues + venue=venue, ) mkt_pairs = resp['symbols'] if not mkt_pairs: @@ -519,8 +551,7 @@ async def bars( end_time = binance_timestamp(end_dt) # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data - bars = await self.mkt_mode_req[self.mkt_mode]( - # bars = await self._api( + bars = await self._api( 'klines', params={ 'symbol': symbol.upper(), @@ -558,30 +589,31 @@ async def bars( dtype=def_iohlcv_fields, ) - async def get_positions( - self, - recv_window: int = 60000 + # TODO: maybe drop? Do we need this if we can simply request it + # over the user stream wss? + # async def get_positions( + # self, + # symbol: str, + # recv_window: int = 60000 - ) -> tuple: - positions = {} - volumes = {} - - for sym in self.watchlist: - log.info(f'doing {sym}...') - params = dict([ - ('symbol', sym), - ('recvWindow', recv_window), - ('timestamp', binance_timestamp(now())) - ]) - resp = await self._api( - 'allOrders', - params=params, - signed=True - ) - log.info(f'done. len {len(resp)}') - # await trio.sleep(3) + # ) -> tuple: + + # positions = {} + # volumes = {} + + # params = dict([ + # ('symbol', symbol), + # ('recvWindow', recv_window), + # ('timestamp', binance_timestamp(now())) + # ]) + # resp = await self._api( + # 'allOrders', + # params=params, + # signed=True + # ) + # log.info(f'done. len {len(resp)}') - return positions, volumes + # return positions, volumes async def get_deposits( self, @@ -638,11 +670,11 @@ async def get_open_orders( if symbol is not None: params['symbol'] = symbol - resp = await self.mkt_mode_req[self.mkt_mode]( + resp = await self._api( 'openOrders', params=params, signed=True, - action='get', + method='get', ) # figure out which venue (in FQME terms) we're using # since that normally maps 1-to-1 with the account (right?) @@ -726,14 +758,14 @@ async def submit_limit( # ('closeAll', close_all), ]) - action: str = 'post' + method: str = 'post' # NOTE: modifies only require diff key for user oid: # https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade if modify: assert oid params['origClientOrderId'] = oid - action: str = 'put' + method: str = 'put' elif oid: params['newClientOrderId'] = oid @@ -742,11 +774,12 @@ async def submit_limit( 'Submitting ReST order request:\n' f'{pformat(params)}' ) - resp = await self.mkt_mode_req[self.mkt_mode]( + resp = await self._api( 'order', params=params, signed=True, - action=action, + method=method, + venue=self.mkt_mode, ) # ensure our id is tracked by them @@ -780,41 +813,38 @@ async def submit_cancel( 'Submitting ReST order cancel: {oid}\n' f'{pformat(params)}' ) - await self.mkt_mode_req[self.mkt_mode]( + await self._api( 'order', params=params, signed=True, - action='delete' + method='delete' ) async def get_listen_key(self) -> str: - # resp = await self._api( - resp = await self.mkt_mode_req[self.mkt_mode]( + resp = await self._api( # 'userDataStream', # spot 'listenKey', params={}, - action='post', + method='post', signed=True, ) return resp['listenKey'] async def keep_alive_key(self, listen_key: str) -> None: - # await self._fapi( - await self.mkt_mode_req[self.mkt_mode]( + await self._api( # 'userDataStream', 'listenKey', params={'listenKey': listen_key}, - action='put' + method='put' ) async def close_listen_key(self, listen_key: str) -> None: - # await self._fapi( - await self.mkt_mode_req[self.mkt_mode]( + await self._api( # 'userDataStream', 'listenKey', params={'listenKey': listen_key}, - action='delete' + method='delete' ) @acm diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 776708166..78b864139 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -214,7 +214,13 @@ async def open_trade_dialog( ) -> AsyncIterator[dict[str, Any]]: async with open_cached_client('binance') as client: - if not client.api_key: + for key, subconf in client.conf.items(): + if subconf.get('api_key'): + break + + # XXX: if no futes.api_key or spot.api_key has been set we + # always fall back to the paper engine! + else: await ctx.started('paper') return diff --git a/piker/brokers/binance/venues.py b/piker/brokers/binance/venues.py index 2de73185c..d2d0b1a60 100644 --- a/piker/brokers/binance/venues.py +++ b/piker/brokers/binance/venues.py @@ -41,9 +41,9 @@ # NOTE XXX: see api docs which show diff addr? # https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information _spot_ws: str = 'wss://stream.binance.com/ws' +# or this one? .. # 'wss://ws-api.binance.com:443/ws-api/v3', -_testnet_spot_ws: str = 'wss://testnet.binance.vision/ws-api/v3' # https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams _futes_ws: str = f'wss://fstream.{_domain}/ws/' @@ -55,6 +55,8 @@ # https://www.binance.com/en/support/faq/how-to-test-my-functions-on-binance-testnet-ab78f9a1b8824cf0a106b4229c76496d _testnet_spot_url: str = 'https://testnet.binance.vision/api' _testnet_spot_ws: str = 'wss://testnet.binance.vision/ws' +# or this one? .. +# 'wss://testnet.binance.vision/ws-api/v3' _testnet_futes_url: str = 'https://testnet.binancefuture.com' _testnet_futes_ws: str = 'wss://stream.binancefuture.com' diff --git a/piker/config.py b/piker/config.py index a0d403d54..3bb026d5a 100644 --- a/piker/config.py +++ b/piker/config.py @@ -173,7 +173,11 @@ def _posixify(name): ) -class NoSignature(Exception): +class ConfigurationError(Exception): + 'Misconfigured settings, likely in a TOML file.' + + +class NoSignature(ConfigurationError): 'No credentials setup for broker backend!' From 676b00592db88163eca9c749932fd1c9b83bce86 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 19 Jun 2023 19:10:43 -0400 Subject: [PATCH 52/73] Don't allow `Client.api()` testnet queries by default, require explicit flag set --- piker/brokers/binance/api.py | 29 +++++++++++++++++++---------- piker/brokers/binance/venues.py | 1 - 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 7c4324df6..dc4d256ca 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -37,7 +37,6 @@ import hashlib from pathlib import Path -from bidict import bidict import trio from pendulum import ( now, @@ -220,14 +219,14 @@ def __init__( # lookup for going from `.mkt_mode: str` to the config # subsection `key: str` - self.venue2configkey: bidict[str, str] = { + self.venue2configkey: dict[str, str] = { 'spot': 'spot', 'margin': 'spot', 'usdtm_futes': 'futes', # 'coinm_futes': 'futes', } self.confkey2venuekeys: dict[str, list[str]] = { - 'spot': ['spot', 'margin'], + 'spot': ['spot'], # 'margin'], 'futes': ['usdtm_futes'], } @@ -314,7 +313,7 @@ async def _api( method: str = 'get', venue: str | None = None, # if None use `.mkt_mode` state signed: bool = False, - testnet: bool = True, + allow_testnet: bool = False, ) -> dict[str, Any]: ''' @@ -358,10 +357,12 @@ async def _api( # Check if we're configured to route order requests to the # venue equivalent's testnet. - use_testnet: bool = False section_name: str = self.venue2configkey[venue_key] if subconf := self.conf.get(section_name): - use_testnet = subconf.get('use_testnet', False) + use_testnet = ( + subconf.get('use_testnet', False) + and allow_testnet + ) if ( use_testnet @@ -404,6 +405,7 @@ async def _cache_pairs( # don't know the routing mode but want to cache market # infos across all venues venue=venue, + allow_testnet=False, # XXX: never use testnet for symbol lookups ) mkt_pairs = resp['symbols'] if not mkt_pairs: @@ -559,7 +561,8 @@ async def bars( 'startTime': start_time, 'endTime': end_time, 'limit': limit - } + }, + allow_testnet=False, ) new_bars: list[tuple] = [] for i, bar in enumerate(bars): @@ -675,6 +678,7 @@ async def get_open_orders( params=params, signed=True, method='get', + allow_testnet=True, ) # figure out which venue (in FQME terms) we're using # since that normally maps 1-to-1 with the account (right?) @@ -780,6 +784,7 @@ async def submit_limit( signed=True, method=method, venue=self.mkt_mode, + allow_testnet=True, ) # ensure our id is tracked by them @@ -817,7 +822,8 @@ async def submit_cancel( 'order', params=params, signed=True, - method='delete' + method='delete', + allow_testnet=True, ) async def get_listen_key(self) -> str: @@ -828,6 +834,7 @@ async def get_listen_key(self) -> str: params={}, method='post', signed=True, + allow_testnet=True, ) return resp['listenKey'] @@ -836,7 +843,8 @@ async def keep_alive_key(self, listen_key: str) -> None: # 'userDataStream', 'listenKey', params={'listenKey': listen_key}, - method='put' + method='put', + allow_testnet=True, ) async def close_listen_key(self, listen_key: str) -> None: @@ -844,7 +852,8 @@ async def close_listen_key(self, listen_key: str) -> None: # 'userDataStream', 'listenKey', params={'listenKey': listen_key}, - method='delete' + method='delete', + allow_testnet=True, ) @acm diff --git a/piker/brokers/binance/venues.py b/piker/brokers/binance/venues.py index d2d0b1a60..6c9afc779 100644 --- a/piker/brokers/binance/venues.py +++ b/piker/brokers/binance/venues.py @@ -44,7 +44,6 @@ # or this one? .. # 'wss://ws-api.binance.com:443/ws-api/v3', - # https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams _futes_ws: str = f'wss://fstream.{_domain}/ws/' _auth_futes_ws: str = 'wss://fstream-auth.{_domain}/ws/' From e4c1003abab916a21be93e737b59dfb070e61448 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 19 Jun 2023 19:20:41 -0400 Subject: [PATCH 53/73] Hard code futes venue(s) for now in `brokerd`.. --- piker/brokers/binance/broker.py | 62 +++++++++++++++++++++++---------- 1 file changed, 44 insertions(+), 18 deletions(-) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 78b864139..270088b25 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -61,7 +61,11 @@ Status, Order, ) -from .venues import Pair +from .venues import ( + Pair, + _futes_ws, + _testnet_futes_ws, +) from .api import Client log = get_logger('piker.brokers.binance') @@ -213,33 +217,41 @@ async def open_trade_dialog( ) -> AsyncIterator[dict[str, Any]]: + # TODO: how do we set this from the EMS such that + # positions are loaded from the correct venue on the user + # stream at startup? (that is in an attempt to support both + # spot and futes markets?) + # - I guess we just want to instead start 2 separate user + # stream tasks right? unless we want another actor pool? + # XXX: see issue: + venue_name: str = 'futes' + venue_mode: str = 'usdtm_futes' + account_name: str = 'usdtm' + use_testnet: bool = False + async with open_cached_client('binance') as client: - for key, subconf in client.conf.items(): - if subconf.get('api_key'): - break + subconf: dict = client.conf[venue_name] + use_testnet = subconf.get('use_testnet', False) # XXX: if no futes.api_key or spot.api_key has been set we # always fall back to the paper engine! - else: + if not subconf.get('api_key'): await ctx.started('paper') return async with ( open_cached_client('binance') as client, ): - client.mkt_mode: str = 'usdtm_futes' + client.mkt_mode: str = venue_mode - # if client. - venue: str = client.mkt_mode + # TODO: map these wss urls depending on spot or futes + # setting passed when this task is spawned? + wss_url: str = _futes_ws if not use_testnet else _testnet_futes_ws wss: NoBsWs async with ( client.manage_listen_key() as listen_key, - open_autorecon_ws( - f'wss://stream.binancefuture.com/ws/{listen_key}', - # f'wss://stream.binance.com:9443/ws/{listen_key}', - ) as wss, - + open_autorecon_ws(f'{wss_url}/ws/{listen_key}') as wss, ): nsid: int = time_ns() await wss.send_msg({ @@ -270,7 +282,7 @@ async def open_trade_dialog( positions: list[BrokerdPosition] = [] for resp_dict in msg['result']: - resp = resp_dict['res'] + resp: dict = resp_dict['res'] req: str = resp_dict['req'] # @account response should be something like: @@ -329,7 +341,9 @@ async def open_trade_dialog( bs_mktid: str = entry['symbol'] entry_size: float = float(entry['positionAmt']) - pair: Pair | None = client._venue2pairs[venue].get(bs_mktid) + pair: Pair | None = client._venue2pairs[ + venue_mode + ].get(bs_mktid) if ( pair and entry_size > 0 @@ -338,7 +352,7 @@ async def open_trade_dialog( ppmsg = BrokerdPosition( broker='binance', - account='binance.usdtm', + account=f'binance.{account_name}', # TODO: maybe we should be passing back # a `MktPair` here? @@ -357,6 +371,13 @@ async def open_trade_dialog( await ctx.started((positions, list(accounts))) + # TODO: package more state tracking into the dialogs API? + # - hmm maybe we could include `OrderDialogs.dids: + # bidict` as part of the interface and then ask for + # a reqid field to be passed at init? + # |-> `OrderDialog(reqid_field='orderId')` kinda thing? + # - also maybe bundle in some kind of dialog to account + # table? dialogs = OrderDialogs() dids: dict[str, int] = bidict() @@ -404,7 +425,8 @@ async def open_trade_dialog( ) tn.start_soon( handle_order_updates, - venue, + venue_mode, + account_name, client, ems_stream, wss, @@ -417,6 +439,7 @@ async def open_trade_dialog( async def handle_order_updates( venue: str, + account_name: str, client: Client, ems_stream: tractor.MsgStream, wss: NoBsWs, @@ -574,6 +597,9 @@ async def handle_order_updates( time_ns=time_ns(), # reqid=reqid, reqid=oid, + + # TODO: i feel like we don't need to make the + # ems and upstream clients aware of this? # account='binance.usdtm', status=status, @@ -622,7 +648,7 @@ async def handle_order_updates( pair: Pair | None = client._venue2pairs[venue].get(bs_mktid) ppmsg = BrokerdPosition( broker='binance', - account='binance.usdtm', + account=f'binance.{account_name}', # TODO: maybe we should be passing back # a `MktPair` here? From 5d930175e4ece1ae4eda03b71bbfabb026ec112d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 20 Jun 2023 12:00:54 -0400 Subject: [PATCH 54/73] kraken: use new `OrderDialogs` type, handle `.spot` Drop the older `dict[str, ChainMap]` prototype we had since the new `OrderDialogs` built-out while adding `binance` order support is more refined and general. Also, handle new and now expect `.spot` venue token in FQMEs since kraken too has futes markets that we'll likely want to support eventually. --- piker/brokers/kraken/broker.py | 38 ++++++++++++++++++++-------------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 86d30f41f..3d08f92b8 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -18,7 +18,6 @@ Order api and machinery ''' -from collections import ChainMap, defaultdict from contextlib import ( asynccontextmanager as acm, aclosing, @@ -52,6 +51,9 @@ from piker.accounting._mktinfo import ( MktPair, ) +from piker.clearing import( + OrderDialogs, +) from piker.clearing._messages import ( Order, Status, @@ -124,7 +126,7 @@ async def handle_order_requests( client: Client, ems_order_stream: tractor.MsgStream, token: str, - apiflows: dict[int, ChainMap[dict[str, dict]]], + apiflows: OrderDialogs, ids: bidict[str, int], reqids2txids: dict[int, str], @@ -188,6 +190,7 @@ async def handle_order_requests( try: txid: str = reqids2txids[reqid] except KeyError: + # XXX: not sure if this block ever gets hit now? log.error('TOO FAST EDIT') reqids2txids[reqid] = TooFastEdit(reqid) @@ -221,7 +224,11 @@ async def handle_order_requests( 'type': order.action, } - psym: str = order.symbol.upper() + # XXX strip any . token which should + # ONLY ever be '.spot' rn, until we support + # futes. + bs_fqme: str = order.symbol.rstrip('.spot') + psym: str = bs_fqme.upper() pair: str = f'{psym[:3]}/{psym[3:]}' # XXX: ACK the request **immediately** before sending @@ -260,7 +267,7 @@ async def handle_order_requests( await ws.send_msg(req) # placehold for sanity checking in relay loop - apiflows[reqid].maps.append(msg) + apiflows.add_msg(reqid, msg) case _: account = msg.get('account') @@ -440,10 +447,7 @@ async def open_trade_dialog( acc_name = 'kraken.' + acctid # task local msg dialog tracking - apiflows: defaultdict[ - int, - ChainMap[dict[str, dict]], - ] = defaultdict(ChainMap) + apiflows = OrderDialogs() # 2way map for ems ids to kraken int reqids.. ids: bidict[str, int] = bidict() @@ -706,7 +710,7 @@ async def handle_order_updates( ws: NoBsWs, ws_stream: AsyncIterator, ems_stream: tractor.MsgStream, - apiflows: dict[int, ChainMap[dict[str, dict]]], + apiflows: OrderDialogs, ids: bidict[str, int], reqids2txids: bidict[int, str], table: PpTable, @@ -921,7 +925,7 @@ async def handle_order_updates( ), src='kraken', ) - apiflows[reqid].maps.append(status_msg.to_dict()) + apiflows.add_msg(reqid, status_msg.to_dict()) await ems_stream.send(status_msg) continue @@ -1057,7 +1061,7 @@ async def handle_order_updates( ), ) - apiflows[reqid].maps.append(update_msg) + apiflows.add_msg(reqid, update_msg) await ems_stream.send(resp) # fill msg. @@ -1136,9 +1140,8 @@ async def handle_order_updates( ) continue - # update the msg chain - chain = apiflows[reqid] - chain.maps.append(event) + # update the msg history + apiflows.add_msg(reqid, event) if status == 'error': # any of ``{'add', 'edit', 'cancel'}`` @@ -1148,11 +1151,16 @@ async def handle_order_updates( f'Failed to {action} order {reqid}:\n' f'{errmsg}' ) + + symbol: str = 'N/A' + if chain := apiflows.get(reqid): + symbol: str = chain.get('symbol', 'N/A') + await ems_stream.send(BrokerdError( oid=oid, # XXX: use old reqid in case it changed? reqid=reqid, - symbol=chain.get('symbol', 'N/A'), + symbol=symbol, reason=f'Failed {action}:\n{errmsg}', broker_details=event From d82173dd5007b10a27b85983e49d104e66039c3d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 20 Jun 2023 12:29:50 -0400 Subject: [PATCH 55/73] Always use fully expanded FQME throughout `.clearing` Since crypto backends now also may expand an FQME like `xbteur.kraken` -> `xbteur.spot.kraken` (by filling in the venue token), we need to use this identifier when looking up per-market order dialogs or submitting new requests. The simple fix is to simply look up that expanded from from the `Feed.flumes` table which is always keyed by the `MktPair.fqme: str` - the expanded form. --- piker/clearing/_util.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/piker/clearing/_util.py b/piker/clearing/_util.py index 9015ba69b..9eebf1c4d 100644 --- a/piker/clearing/_util.py +++ b/piker/clearing/_util.py @@ -91,6 +91,3 @@ def pop( ''' return self._flows.pop(oid) - - - From a4d16ec6abccf5502e26b525992df765af41c014 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 20 Jun 2023 12:38:39 -0400 Subject: [PATCH 56/73] Fix ems tests: add `.spot` venue token to fqme --- tests/test_ems.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/tests/test_ems.py b/tests/test_ems.py index f9c010f08..c2f5d7a8e 100644 --- a/tests/test_ems.py +++ b/tests/test_ems.py @@ -164,7 +164,9 @@ def test_ems_err_on_bad_broker( async def load_bad_fqme(): try: async with ( - open_test_pikerd() as (_, _, _, _), + open_test_pikerd( + debug_mode=False, + ) as (_, _, _, _), open_ems( 'doggycoin.doggy', @@ -173,8 +175,11 @@ async def load_bad_fqme(): ) as _ ): pytest.fail('EMS is working on non-broker!?') - except ModuleNotFoundError: - pass + + # NOTE: emsd should error on the actor's enabled modules + # import phase, when looking for a backend named `doggy`. + except tractor.RemoteActorError as re: + assert re.type == ModuleNotFoundError run_and_tollerate_cancels(load_bad_fqme) @@ -241,8 +246,9 @@ async def submit_and_check( ''' broker: str = 'kraken' + venue: str = 'spot' mkt_key: str = 'xbtusdt' - fqme: str = f'{mkt_key}.{broker}' + fqme: str = f'{mkt_key}.{venue}.{broker}' startup_pps: dict[ tuple[str, str], # brokername, acctid From 65f2549d9001b8def257d17b1d96d9d1d6632759 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 20 Jun 2023 14:01:31 -0400 Subject: [PATCH 57/73] binance: more explicit var naming in `OHLC` parse loop --- piker/brokers/binance/api.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index dc4d256ca..e6a4cbc9b 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -113,7 +113,7 @@ class OHLC(Struct): https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams ''' - time: int + time: int # epoch in ms open: float high: float @@ -565,9 +565,9 @@ async def bars( allow_testnet=False, ) new_bars: list[tuple] = [] - for i, bar in enumerate(bars): + for i, bar_list in enumerate(bars): - bar = OHLC(*bar) + bar = OHLC(*bar_list) bar.typecast() row = [] From d9708e28c8d50f16ec47e530e3a7e3c48043f092 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 20 Jun 2023 14:33:32 -0400 Subject: [PATCH 58/73] kraken: drop `OHLC.ticks` field and just inject to quote before send --- piker/brokers/kraken/feed.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index 4830914f4..26956a1c3 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -65,7 +65,7 @@ ) -class OHLC(Struct): +class OHLC(Struct, frozen=True): ''' Description of the flattened OHLC quote format. @@ -76,6 +76,8 @@ class OHLC(Struct): chan_id: int # internal kraken id chan_name: str # eg. ohlc-1 (name-interval) pair: str # fx pair + + # unpacked from array time: float # Begin time of interval, in seconds since epoch etime: float # End time of interval, in seconds since epoch open: float # Open price of interval @@ -85,8 +87,6 @@ class OHLC(Struct): vwap: float # Volume weighted average price within interval volume: float # Accumulated volume **within interval** count: int # Number of trades within interval - # (sampled) generated tick data - ticks: list[Any] = [] async def stream_messages( @@ -150,14 +150,15 @@ async def process_data_feed_msgs( pair ]: if 'ohlc' in chan_name: + array: list = payload_array[0] ohlc = OHLC( chan_id, chan_name, pair, - *payload_array[0] + *map(float, array[:-1]), + count=array[-1], ) - ohlc.typecast() - yield 'ohlc', ohlc + yield 'ohlc', ohlc.copy() elif 'spread' in chan_name: @@ -430,7 +431,7 @@ async def subscribe(ws: NoBsWs): feed_is_live.set() # keep start of last interval for volume tracking - last_interval_start = ohlc_last.etime + last_interval_start: float = ohlc_last.etime # start streaming topic: str = mkt.bs_fqme @@ -448,24 +449,23 @@ async def subscribe(ws: NoBsWs): # new OHLC sample interval if quote.etime > last_interval_start: - last_interval_start = quote.etime - tick_volume = volume + last_interval_start: float = quote.etime + tick_volume: float = volume else: # this is the tick volume *within the interval* - tick_volume = volume - ohlc_last.volume + tick_volume: float = volume - ohlc_last.volume ohlc_last = quote last = quote.close + quote = normalize(quote) if tick_volume: - quote.ticks.append({ + quote['ticks'] = [{ 'type': 'trade', 'price': last, 'size': tick_volume, - }) - - quote = normalize(quote) + }] case 'l1': # passthrough quote msg From 3fcf44aa52c51b13ef5790dfc6ef3e872083e2bb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 20 Jun 2023 18:22:56 -0400 Subject: [PATCH 59/73] Skip marketstore docker tests, we're gonna drop it.. --- tests/test_docker_services.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_docker_services.py b/tests/test_docker_services.py index 66438e336..084e5e63e 100644 --- a/tests/test_docker_services.py +++ b/tests/test_docker_services.py @@ -12,6 +12,7 @@ from piker.service import elastic +@pytest.mark.skip def test_marketstore_startup_and_version( open_test_pikerd: AsyncContextManager, loglevel: str, @@ -38,7 +39,7 @@ async def main(): ) as ( _, # host _, # port - pikerd_portal, + _, services, ), From f7f76137ca56875d2155581e4ade7addfb05ba14 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 22 Jun 2023 15:28:20 -0400 Subject: [PATCH 60/73] kraken: handle `.spot.kraken` new-style FQMEs After #520 we've moved to better supporting explicit venues for cex backends which is important where a provider offers both spot and derivatives markets (kraken, binance, kucoin) and we need to distinguish which is being traded given a common asset pair (eg. BTC/USDT). So, make this work for `kraken`'s brokerd such that requests and pre-existing live order are (un)packed to/from EMS messaging form. --- piker/brokers/kraken/broker.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 3d08f92b8..7cb596725 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -136,10 +136,8 @@ async def handle_order_requests( and deliver acks or errors. ''' - # XXX: UGH, let's unify this.. with ``msgspec``. - msg: dict[str, Any] - order: BrokerdOrder - + # XXX: UGH, let's unify this.. with ``msgspec``!!! + msg: dict | Order async for msg in ems_order_stream: log.info(f'Rx order msg:\n{pformat(msg)}') match msg: @@ -227,7 +225,7 @@ async def handle_order_requests( # XXX strip any . token which should # ONLY ever be '.spot' rn, until we support # futes. - bs_fqme: str = order.symbol.rstrip('.spot') + bs_fqme: str = order.symbol.replace('.spot', '') psym: str = bs_fqme.upper() pair: str = f'{psym[:3]}/{psym[3:]}' @@ -891,7 +889,7 @@ async def handle_order_updates( ids.inverse.get(reqid) is None ): # parse out existing live order - fqme = pair.replace('/', '').lower() + fqme = pair.replace('/', '').lower() + '.spot' price = float(price) size = float(vol) From b1ef5492764a3add6a629e7a3d120e1dcd4a15ba Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 23 Jun 2023 17:33:38 -0400 Subject: [PATCH 61/73] Move `broker_init()` into `brokers._daemon` We might as well start standardizing on `brokerd` init such that it can be used more generally in client code (such as the `.accounting.cli` stuff). Deats of `broker_init()` impl: - loads appropriate py pkg module, - reads any declared `__enable_modules__: listr[str]` which will be passed to `tractor.ActorNursery.start_actor(enabled_modules=)` - loads the `.brokers._daemon._setup_persistent_brokerd As expected the `accounting.cli` tools can now import directly from this new location and use the common daemon fixture definition. --- piker/accounting/cli.py | 146 ++++++++++++++++++--------------------- piker/brokers/_daemon.py | 126 +++++++++++++++++++++++++++------ piker/brokers/_util.py | 2 + 3 files changed, 174 insertions(+), 100 deletions(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 0b18a3eb0..75798f3f4 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -18,11 +18,6 @@ CLI front end for trades ledger and position tracking management. ''' -from typing import ( - AsyncContextManager, -) -from types import ModuleType - from rich.console import Console from rich.markdown import Markdown import tractor @@ -36,63 +31,30 @@ from ..clearing._messages import BrokerdPosition from ..config import load_ledger from ..calc import humanize +from ..brokers._daemon import broker_init ledger = typer.Typer() -def broker_init( - brokername: str, - loglevel: str | None = None, - - **start_actor_kwargs, - -) -> tuple[ - ModuleType, - dict, - AsyncContextManager, -]: - ''' - Given an input broker name, load all named arguments - which can be passed to a daemon + context spawn for - the relevant `brokerd` service endpoint. - - ''' - from ..brokers import get_brokermod - brokermod = get_brokermod(brokername) - modpath = brokermod.__name__ - - start_actor_kwargs['name'] = f'brokerd.{brokername}' - start_actor_kwargs.update( - getattr( - brokermod, - '_spawn_kwargs', - {}, - ) - ) - - # lookup actor-enabled modules declared by the backend offering the - # `brokerd` endpoint(s). - enabled = start_actor_kwargs['enable_modules'] = [modpath] - for submodname in getattr( - brokermod, - '__enable_modules__', - [], - ): - subpath = f'{modpath}.{submodname}' - enabled.append(subpath) - - # TODO XXX: DO WE NEED THIS? - # enabled.append('piker.data.feed') - - # non-blocking setup of brokerd service nursery - from ..brokers._daemon import _setup_persistent_brokerd - - return ( - brokermod, - start_actor_kwargs, # to `ActorNursery.start_actor()` - _setup_persistent_brokerd, # deamon service task ep - ) +def unpack_fqan( + fully_qualified_account_name: str, + console: Console | None, +) -> tuple | bool: + try: + brokername, account = fully_qualified_account_name.split('.') + return brokername, account + except ValueError: + if console is not None: + md = Markdown( + f'=> `{fully_qualified_account_name}` <=\n\n' + 'is not a valid ' + '__fully qualified account name?__\n\n' + 'Your account name needs to be of the form ' + '`.`\n' + ) + console.print(md) + return False @ledger.command() @@ -108,19 +70,15 @@ def sync( log = get_logger(loglevel) console = Console() - try: - brokername, account = fully_qualified_account_name.split('.') - except ValueError: - md = Markdown( - f'=> `{fully_qualified_account_name}` <=\n\n' - 'is not a valid ' - '__fully qualified account name?__\n\n' - 'Your account name needs to be of the form ' - '`.`\n' - ) - console.print(md) + pair: tuple[str, str] + if not (pair := unpack_fqan( + fully_qualified_account_name, + console, + )): return + brokername, account = pair + brokermod, start_kwargs, deamon_ep = broker_init( brokername, loglevel=loglevel, @@ -155,18 +113,30 @@ async def main(): ) brokerd_stream: tractor.MsgStream - async with open_brokerd_dialog( - brokermod, - portal, - exec_mode=( - 'paper' if account == 'paper' - else 'live' + async with ( + # engage the brokerd daemon context + portal.open_context( + deamon_ep, + brokername=brokername, + loglevel=loglevel, + ), + + # manually open the brokerd trade dialog EP + # (what the EMS normally does internall) B) + open_brokerd_dialog( + brokermod, + portal, + exec_mode=( + 'paper' + if account == 'paper' + else 'live' + ), + loglevel=loglevel, + ) as ( + brokerd_stream, + pp_msg_table, + accounts, ), - loglevel=loglevel, - ) as ( - brokerd_stream, - pp_msg_table, - accounts, ): try: assert len(accounts) == 1 @@ -253,5 +223,23 @@ async def main(): trio.run(main) +@ledger.command() +def disect( + fully_qualified_account_name: str, + bs_mktid: int, # for ib + pdb: bool = False, + + loglevel: str = typer.Option( + 'error', + "-l", + ), +): + pair: tuple[str, str] + if not (pair := unpack_fqan( + fully_qualified_account_name, + )): + return + + if __name__ == "__main__": ledger() # this is called from ``>> ledger `` diff --git a/piker/brokers/_daemon.py b/piker/brokers/_daemon.py index 368e81164..ecb785f7b 100644 --- a/piker/brokers/_daemon.py +++ b/piker/brokers/_daemon.py @@ -23,7 +23,11 @@ from contextlib import ( asynccontextmanager as acm, ) -from typing import TYPE_CHECKING +from types import ModuleType +from typing import ( + TYPE_CHECKING, + AsyncContextManager, +) import exceptiongroup as eg import tractor @@ -39,7 +43,7 @@ # TODO: move this def to the `.data` subpkg.. # NOTE: keeping this list as small as possible is part of our caps-sec # model and should be treated with utmost care! -_data_mods = [ +_data_mods: str = [ 'piker.brokers.core', 'piker.brokers.data', 'piker.brokers._daemon', @@ -72,9 +76,13 @@ async def _setup_persistent_brokerd( loglevel or tractor.current_actor().loglevel, name=f'{_util.subsys}.{brokername}', ) + # set global for this actor to this new process-wide instance B) _util.log = log + # further, set the log level on any broker broker specific + # logger instance. + from piker.data import feed assert not feed._bus @@ -111,6 +119,79 @@ async def _setup_persistent_brokerd( raise +def broker_init( + brokername: str, + loglevel: str | None = None, + + **start_actor_kwargs, + +) -> tuple[ + ModuleType, + dict, + AsyncContextManager, +]: + ''' + Given an input broker name, load all named arguments + which can be passed for daemon endpoint + context spawn + as required in every `brokerd` (actor) service. + + This includes: + - load the appropriate .py pkg module, + - reads any declared `__enable_modules__: listr[str]` which will be + passed to `tractor.ActorNursery.start_actor(enabled_modules=)` + at actor start time, + - deliver a references to the daemon lifetime fixture, which + for now is always the `_setup_persistent_brokerd()` context defined + above. + + ''' + from ..brokers import get_brokermod + brokermod = get_brokermod(brokername) + modpath: str = brokermod.__name__ + + start_actor_kwargs['name'] = f'brokerd.{brokername}' + start_actor_kwargs.update( + getattr( + brokermod, + '_spawn_kwargs', + {}, + ) + ) + + # XXX TODO: make this not so hacky/monkeypatched.. + # -> we need a sane way to configure the logging level for all + # code running in brokerd. + # if utilmod := getattr(brokermod, '_util', False): + # utilmod.log.setLevel(loglevel.upper()) + + # lookup actor-enabled modules declared by the backend offering the + # `brokerd` endpoint(s). + enabled: list[str] + enabled = start_actor_kwargs['enable_modules'] = [ + __name__, # so that eps from THIS mod can be invoked + modpath, + ] + for submodname in getattr( + brokermod, + '__enable_modules__', + [], + ): + subpath: str = f'{modpath}.{submodname}' + enabled.append(subpath) + + # TODO XXX: DO WE NEED THIS? + # enabled.append('piker.data.feed') + + return ( + brokermod, + start_actor_kwargs, # to `ActorNursery.start_actor()` + + # XXX see impl above; contains all (actor global) + # setup/teardown expected in all `brokerd` actor instances. + _setup_persistent_brokerd, + ) + + async def spawn_brokerd( brokername: str, @@ -120,44 +201,44 @@ async def spawn_brokerd( ) -> bool: - from piker.service import Services from piker.service._util import log # use service mngr log - log.info(f'Spawning {brokername} broker daemon') - brokermod = get_brokermod(brokername) - dname = f'brokerd.{brokername}' + ( + brokermode, + tractor_kwargs, + daemon_fixture_ep, + ) = broker_init( + brokername, + loglevel, + **tractor_kwargs, + ) + brokermod = get_brokermod(brokername) extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {}) tractor_kwargs.update(extra_tractor_kwargs) # ask `pikerd` to spawn a new sub-actor and manage it under its # actor nursery - modpath = brokermod.__name__ - broker_enable = [modpath] - for submodname in getattr( - brokermod, - '__enable_modules__', - [], - ): - subpath = f'{modpath}.{submodname}' - broker_enable.append(subpath) + from piker.service import Services + dname: str = tractor_kwargs.pop('name') # f'brokerd.{brokername}' portal = await Services.actor_n.start_actor( dname, - enable_modules=_data_mods + broker_enable, - loglevel=loglevel, + enable_modules=_data_mods + tractor_kwargs.pop('enable_modules'), debug_mode=Services.debug_mode, **tractor_kwargs ) - # non-blocking setup of brokerd service nursery + # NOTE: the service mngr expects an already spawned actor + its + # portal ref in order to do non-blocking setup of brokerd + # service nursery. await Services.start_service_task( dname, portal, # signature of target root-task endpoint - _setup_persistent_brokerd, + daemon_fixture_ep, brokername=brokername, loglevel=loglevel, ) @@ -174,8 +255,11 @@ async def maybe_spawn_brokerd( ) -> tractor.Portal: ''' - Helper to spawn a brokerd service *from* a client - who wishes to use the sub-actor-daemon. + Helper to spawn a brokerd service *from* a client who wishes to + use the sub-actor-daemon but is fine with re-using any existing + and contactable `brokerd`. + + Mas o menos, acts as a cached-actor-getter factory. ''' from piker.service import maybe_spawn_daemon diff --git a/piker/brokers/_util.py b/piker/brokers/_util.py index baf2c7b2e..30e36f2e1 100644 --- a/piker/brokers/_util.py +++ b/piker/brokers/_util.py @@ -32,6 +32,8 @@ subsys: str = 'piker.brokers' # NOTE: level should be reset by any actor that is spawned +# as well as given a (more) explicit name/key such +# as `piker.brokers.binance` matching the subpkg. log = get_logger(subsys) get_console_log = partial( From 3be1d610e091ffe42a86063b20916bbe352d6211 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 24 Jun 2023 17:12:43 -0400 Subject: [PATCH 62/73] ib: expose trade EP as `open_trade_dialog()` Should be the final production backend to switch this over B) Also tidy up the `update_and_audit_msgs()` validator to log vs. raise when `validate: bool` is set; turn it off by default to avoid raises until we figure out wtf is up with ib ledger processing or wtv.. --- piker/brokers/ib/__init__.py | 4 +- piker/brokers/ib/_util.py | 2 +- piker/brokers/ib/api.py | 3 +- piker/brokers/ib/broker.py | 96 ++++++++++++++++++++---------------- 4 files changed, 59 insertions(+), 46 deletions(-) diff --git a/piker/brokers/ib/__init__.py b/piker/brokers/ib/__init__.py index 80bc228f1..1acef9746 100644 --- a/piker/brokers/ib/__init__.py +++ b/piker/brokers/ib/__init__.py @@ -34,12 +34,12 @@ stream_quotes, ) from .broker import ( - trades_dialogue, + open_trade_dialog, ) __all__ = [ 'get_client', - 'trades_dialogue', + 'open_trade_dialog', 'open_history_client', 'open_symbol_search', 'stream_quotes', diff --git a/piker/brokers/ib/_util.py b/piker/brokers/ib/_util.py index f23aa99b4..05417d98b 100644 --- a/piker/brokers/ib/_util.py +++ b/piker/brokers/ib/_util.py @@ -29,7 +29,7 @@ import tractor -from .._util import get_logger +from piker.brokers._util import get_logger if TYPE_CHECKING: from .api import Client diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 171578aa9..fd0d024d6 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -85,8 +85,9 @@ # non-relative for backends so that non-builting backends # can be easily modelled after this style B) from piker import config -from piker.brokers._util import ( +from ._util import ( log, + # only for the ib_sync internal logging get_logger, ) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index d6c361334..21d4baa5c 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -60,6 +60,7 @@ Position, Transaction, open_trade_ledger, + TransactionLedger, iter_by_dt, open_pps, PpTable, @@ -78,10 +79,10 @@ from piker.accounting import ( MktPair, ) +from ._util import log from .api import ( _accounts2clients, con2fqme, - log, get_config, open_client_proxies, Client, @@ -90,6 +91,7 @@ from ._flex_reports import parse_flex_dt + def pack_position( pos: IbPosition @@ -339,7 +341,7 @@ async def update_and_audit_msgs( acctid: str, # no `ib.` prefix is required! pps: list[Position], cids2pps: dict[tuple[str, int], BrokerdPosition], - validate: bool = False, + validate: bool = True, ) -> list[BrokerdPosition]: @@ -352,9 +354,9 @@ async def update_and_audit_msgs( # for comparison/audit versus the piker equivalent # breakeven pp calcs. ibppmsg = cids2pps.get((acctid, bs_mktid)) - if ibppmsg: - symbol = ibppmsg.symbol + + symbol: str = ibppmsg.symbol msg = BrokerdPosition( broker='ib', @@ -375,36 +377,41 @@ async def update_and_audit_msgs( ibfmtmsg = pformat(ibppmsg.to_dict()) pikerfmtmsg = pformat(msg.to_dict()) - if validate: - ibsize = ibppmsg.size - pikersize = msg.size - diff = pikersize - ibsize - - # if ib reports a lesser pp it's not as bad since we can - # presume we're at least not more in the shit then we - # thought. - if diff and pikersize: - reverse_split_ratio = pikersize / ibsize - split_ratio = 1/reverse_split_ratio - - if split_ratio >= reverse_split_ratio: - entry = f'split_ratio = {int(split_ratio)}' - else: - entry = f'split_ratio = 1/{int(reverse_split_ratio)}' - - # raise ValueError( - log.error( - f'Pos mismatch in ib vs. the piker ledger!\n' - f'IB:\n{ibfmtmsg}\n\n' - f'PIKER:\n{pikerfmtmsg}\n\n' - 'If you are expecting a (reverse) split in this ' - 'instrument you should probably put the following' - 'in the `pps.toml` section:\n' - f'{entry}\n' - # f'reverse_split_ratio: {reverse_split_ratio}\n' - # f'split_ratio: {split_ratio}\n\n' - ) - msg.size = ibsize + ibsize = ibppmsg.size + pikersize = msg.size + diff = pikersize - ibsize + + # if ib reports a lesser pp it's not as bad since we can + # presume we're at least not more in the shit then we + # thought. + if diff and pikersize: + reverse_split_ratio = pikersize / ibsize + split_ratio = 1/reverse_split_ratio + + if split_ratio >= reverse_split_ratio: + entry = f'split_ratio = {int(split_ratio)}' + else: + entry = f'split_ratio = 1/{int(reverse_split_ratio)}' + + msg.size = ibsize + + logmsg: str = ( + f'Pos mismatch in ib vs. the piker ledger!\n' + f'IB:\n{ibfmtmsg}\n\n' + f'PIKER:\n{pikerfmtmsg}\n\n' + 'If you are expecting a (reverse) split in this ' + 'instrument you should probably put the following' + 'in the `pps.toml` section:\n' + f'{entry}\n' + # f'reverse_split_ratio: {reverse_split_ratio}\n' + # f'split_ratio: {split_ratio}\n\n' + ) + + if validate: + raise ValueError(logmsg) + else: + # await tractor.pause() + log.error(logmsg) if ibppmsg.avg_price != msg.avg_price: # TODO: make this a "propaganda" log level? @@ -432,12 +439,16 @@ async def update_and_audit_msgs( size=p.size, avg_price=p.ppu, ) - if validate and p.size: - # raise ValueError( - log.error( + if p.size: + logmsg: str = ( f'UNEXPECTED POSITION says IB => {msg.symbol}\n' 'Maybe they LIQUIDATED YOU or are missing ledger entries?\n' ) + log.error(logmsg) + + # if validate: + # raise ValueError(logmsg) + msgs.append(msg) return msgs @@ -520,10 +531,8 @@ async def open_trade_event_stream( @tractor.context -async def trades_dialogue( - +async def open_trade_dialog( ctx: tractor.Context, - # loglevel: str = None, ) -> AsyncIterator[dict[str, Any]]: @@ -575,6 +584,7 @@ async def trades_dialogue( # open ledger and pptable wrapper for each # detected account. + ledger: TransactionLedger ledger = ledgers[acctid] = lstack.enter_context( open_trade_ledger( 'ib', @@ -643,13 +653,14 @@ async def trades_dialogue( # TODO: fix this `tractor` BUG! # https://github.com/goodboy/tractor/issues/354 - # await tractor.breakpoint() + # await tractor.pp() if trade_entries: # write ledger with all new api_trades # **AFTER** we've updated the `pps.toml` # from the original ledger state! (i.e. this # is currently done on exit) + for tid, entry in trade_entries.items(): ledger.setdefault(tid, {}).update(entry) @@ -670,6 +681,7 @@ async def trades_dialogue( # -> collect all ib-pp reported positions so that we can be # sure know which positions to update from the ledger if # any are missing from the ``pps.toml`` + # await tractor.pp() pos: IbPosition # named tuple subtype for pos in client.positions(): @@ -702,7 +714,7 @@ async def trades_dialogue( acctid, pps.values(), cids2pps, - validate=True, + validate=False, ) all_positions.extend(msg for msg in msgs) From efd52e8ce3b3cd151b4478755483b184c5aca3fd Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 24 Jun 2023 17:23:10 -0400 Subject: [PATCH 63/73] kraken: always insert ticks `list`, only append if vlm --- piker/brokers/kraken/feed.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index 26956a1c3..d0b14f33e 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -24,7 +24,6 @@ ) from datetime import datetime from typing import ( - Any, AsyncGenerator, Callable, Optional, @@ -460,12 +459,16 @@ async def subscribe(ws: NoBsWs): last = quote.close quote = normalize(quote) + ticks = quote.setdefault( + 'ticks', + [], + ) if tick_volume: - quote['ticks'] = [{ + ticks.append({ 'type': 'trade', 'price': last, 'size': tick_volume, - }] + }) case 'l1': # passthrough quote msg From cbe364cb62bf6495e21d55a40f0c564ba1106e7d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 24 Jun 2023 17:24:05 -0400 Subject: [PATCH 64/73] Add explicit `piker.cli` logger name for `pikerd` --- piker/cli/__init__.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/piker/cli/__init__.py b/piker/cli/__init__.py index a812555e7..6972270d0 100644 --- a/piker/cli/__init__.py +++ b/piker/cli/__init__.py @@ -39,7 +39,7 @@ from .. import config -log = get_logger('cli') +log = get_logger('piker.cli') @click.command() @@ -71,16 +71,14 @@ def pikerd( Spawn the piker broker-daemon. ''' - from .. import service - - log = get_console_log(loglevel) + log = get_console_log(loglevel, name='cli') if pdb: log.warning(( "\n" - "!!! You have enabled daemon DEBUG mode !!!\n" - "If a daemon crashes it will likely block" - " the service until resumed from console!\n" + "!!! YOU HAVE ENABLED DAEMON DEBUG MODE !!!\n" + "When a `piker` daemon crashes it will block the " + "task-thread until resumed from console!\n" "\n" )) @@ -91,6 +89,8 @@ def pikerd( int(port) or _default_registry_port, ) + from .. import service + async def main(): service_mngr: service.Services From 032976b1187e79f7f1ce1bb81151987f7b468de0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 24 Jun 2023 17:31:09 -0400 Subject: [PATCH 65/73] view_mode: add in one missing debug_print block.. --- piker/ui/view_mode.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/piker/ui/view_mode.py b/piker/ui/view_mode.py index d785c67a2..82dfbf623 100644 --- a/piker/ui/view_mode.py +++ b/piker/ui/view_mode.py @@ -488,7 +488,8 @@ def overlay_viewlists( upt.rng = r_up upt.y_val = new_major_ymx profiler(msg) - print(msg) + if debug_print: + print(msg) # register curves by a "full" dispersion metric for # later sort order in the overlay (technique From cf1f4bed758e36d46e751a365f8512b4eb28dbae Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 25 Jun 2023 17:21:15 -0400 Subject: [PATCH 66/73] Move `.accounting` related config loaders to subpkg Like you'd think: - `load_ledger()` -> ._ledger - `load_accounrt()` -> ._pos Also fixup the old `load_pps_from_ledger()` and expose it from a new `.accounting.cli.disect` cli cmd for trying to figure out why pp calcs are totally mucked on stupid ib.. --- piker/accounting/_ledger.py | 48 ++++++++++- piker/accounting/_pos.py | 167 +++++++++++++++++++++++++++--------- piker/accounting/cli.py | 43 ++++++++-- piker/config.py | 93 -------------------- piker/log.py | 5 +- 5 files changed, 213 insertions(+), 143 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 5107f2bbc..04ee04b7e 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -123,6 +123,11 @@ def update_from_t( self, t: Transaction, ) -> None: + ''' + Given an input `Transaction`, cast to `dict` and update + from it's transaction id. + + ''' self.data[t.tid] = t.to_dict() def iter_trans( @@ -259,6 +264,45 @@ def dyn_parse_to_dt( yield tid, data +def load_ledger( + brokername: str, + acctid: str, + +) -> tuple[dict, Path]: + ''' + Load a ledger (TOML) file from user's config directory: + $CONFIG_DIR/accounting/ledgers/trades__.toml + + Return its `dict`-content and file path. + + ''' + import time + try: + import tomllib + except ModuleNotFoundError: + import tomli as tomllib + + ldir: Path = config._config_dir / 'accounting' / 'ledgers' + if not ldir.is_dir(): + ldir.mkdir() + + fname = f'trades_{brokername}_{acctid}.toml' + fpath: Path = ldir / fname + + if not fpath.is_file(): + log.info( + f'Creating new local trades ledger: {fpath}' + ) + fpath.touch() + + with fpath.open(mode='rb') as cf: + start = time.time() + ledger_dict = tomllib.load(cf) + log.debug(f'Ledger load took {time.time() - start}s') + + return ledger_dict, fpath + + @cm def open_trade_ledger( broker: str, @@ -267,7 +311,7 @@ def open_trade_ledger( # default is to sort by detected datetime-ish field tx_sort: Callable = iter_by_dt, -) -> Generator[dict, None, None]: +) -> Generator[TransactionLedger, None, None]: ''' Indempotently create and read in a trade log file from the ``/ledgers/`` directory. @@ -277,7 +321,7 @@ def open_trade_ledger( name as defined in the user's ``brokers.toml`` config. ''' - ledger_dict, fpath = config.load_ledger(broker, account) + ledger_dict, fpath = load_ledger(broker, account) cpy = ledger_dict.copy() ledger = TransactionLedger( ledger_dict=cpy, diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 3af0eeef2..f50040cbc 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -42,6 +42,7 @@ Transaction, iter_by_dt, open_trade_ledger, + TransactionLedger, ) from ._mktinfo import ( MktPair, @@ -49,7 +50,6 @@ unpack_fqme, ) from .. import config -from ..brokers import get_brokermod from ..clearing._messages import ( BrokerdPosition, Status, @@ -327,7 +327,8 @@ def calc_ppu( entry: dict[str, Any] for (tid, entry) in self.iter_clears(): clear_size = entry['size'] - clear_price = entry['price'] + clear_price: str | float = entry['price'] + is_clear: bool = not isinstance(clear_price, str) last_accum_size = asize_h[-1] if asize_h else 0 accum_size = last_accum_size + clear_size @@ -340,9 +341,18 @@ def calc_ppu( asize_h.append(0) continue - if accum_size == 0: - ppu_h.append(0) - asize_h.append(0) + # on transfers we normally write some non-valid + # price since withdrawal to another account/wallet + # has nothing to do with inter-asset-market prices. + # TODO: this should be better handled via a `type: 'tx'` + # field as per existing issue surrounding all this: + # https://github.com/pikers/piker/issues/510 + if isinstance(clear_price, str): + # TODO: we can't necessarily have this commit to + # the overall pos size since we also need to + # include other positions contributions to this + # balance or we might end up with a -ve balance for + # the position.. continue # test if the pp somehow went "passed" a net zero size state @@ -375,7 +385,10 @@ def calc_ppu( # abs_clear_size = abs(clear_size) abs_new_size = abs(accum_size) - if abs_diff > 0: + if ( + abs_diff > 0 + and is_clear + ): cost_basis = ( # cost basis for this clear @@ -397,6 +410,12 @@ def calc_ppu( asize_h.append(accum_size) else: + # TODO: for PPU we should probably handle txs out + # (aka withdrawals) similarly by simply not having + # them contrib to the running PPU calc and only + # when the next entry clear comes in (which will + # then have a higher weighting on the PPU). + # on "exit" clears from a given direction, # only the size changes not the price-per-unit # need to be updated since the ppu remains constant @@ -734,48 +753,63 @@ def write_config(self) -> None: ) -def load_pps_from_ledger( - +def load_account( brokername: str, - acctname: str, - - # post normalization filter on ledger entries to be processed - filter_by: list[dict] | None = None, + acctid: str, -) -> tuple[ - dict[str, Transaction], - dict[str, Position], -]: +) -> tuple[dict, Path]: ''' - Open a ledger file by broker name and account and read in and - process any trade records into our normalized ``Transaction`` form - and then update the equivalent ``Pptable`` and deliver the two - bs_mktid-mapped dict-sets of the transactions and pps. + Load a accounting (with positions) file from + $CONFIG_DIR/accounting/account...toml + + Where normally $CONFIG_DIR = ~/.config/piker/ + and we implicitly create a accounting subdir which should + normally be linked to a git repo managed by the user B) ''' - with ( - open_trade_ledger(brokername, acctname) as ledger, - open_pps(brokername, acctname) as table, - ): - if not ledger: - # null case, no ledger file with content - return {} + legacy_fn: str = f'pps.{brokername}.{acctid}.toml' + fn: str = f'account.{brokername}.{acctid}.toml' - mod = get_brokermod(brokername) - src_records: dict[str, Transaction] = mod.norm_trade_records(ledger) + dirpath: Path = config._config_dir / 'accounting' + if not dirpath.is_dir(): + dirpath.mkdir() - if filter_by: - records = {} - bs_mktids = set(filter_by) - for tid, r in src_records.items(): - if r.bs_mktid in bs_mktids: - records[tid] = r - else: - records = src_records + conf, path = config.load( + path=dirpath / fn, + decode=tomlkit.parse, + touch_if_dne=True, + ) - updated = table.update_from_trans(records) + if not conf: + legacypath = dirpath / legacy_fn + log.warning( + f'Your account file is using the legacy `pps.` prefix..\n' + f'Rewriting contents to new name -> {path}\n' + 'Please delete the old file!\n' + f'|-> {legacypath}\n' + ) + if legacypath.is_file(): + legacy_config, _ = config.load( + path=legacypath, + + # TODO: move to tomlkit: + # - needs to be fixed to support bidict? + # https://github.com/sdispater/tomlkit/issues/289 + # - we need to use or fork's fix to do multiline array + # indenting. + decode=tomlkit.parse, + ) + conf.update(legacy_config) + + # XXX: override the presumably previously non-existant + # file with legacy's contents. + config.write( + conf, + path=path, + fail_empty=False, + ) - return records, updated + return conf, path @cm @@ -792,7 +826,7 @@ def open_pps( ''' conf: dict conf_path: Path - conf, conf_path = config.load_account(brokername, acctid) + conf, conf_path = load_account(brokername, acctid) if brokername in conf: log.warning( @@ -927,3 +961,56 @@ def open_pps( finally: if write_on_exit: table.write_config() + + +def load_pps_from_ledger( + + brokername: str, + acctname: str, + + # post normalization filter on ledger entries to be processed + filter_by_ids: list[str] | None = None, + +) -> tuple[ + dict[str, Transaction], + PpTable, +]: + ''' + Open a ledger file by broker name and account and read in and + process any trade records into our normalized ``Transaction`` form + and then update the equivalent ``Pptable`` and deliver the two + bs_mktid-mapped dict-sets of the transactions and pps. + + ''' + ledger: TransactionLedger + table: PpTable + with ( + open_trade_ledger(brokername, acctname) as ledger, + open_pps(brokername, acctname) as table, + ): + if not ledger: + # null case, no ledger file with content + return {} + + from ..brokers import get_brokermod + mod = get_brokermod(brokername) + src_records: dict[str, Transaction] = mod.norm_trade_records( + ledger + ) + + if not filter_by_ids: + # records = src_records + records = ledger + + else: + records = {} + bs_mktids = set(map(str, filter_by_ids)) + + # for tid, recdict in ledger.items(): + for tid, r in src_records.items(): + if r.bs_mktid in bs_mktids: + records[tid] = r.to_dict() + + # updated = table.update_from_trans(records) + + return records, table diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 75798f3f4..c184614ce 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -18,6 +18,7 @@ CLI front end for trades ledger and position tracking management. ''' +from __future__ import annotations from rich.console import Console from rich.markdown import Markdown import tractor @@ -29,9 +30,18 @@ open_piker_runtime, ) from ..clearing._messages import BrokerdPosition -from ..config import load_ledger from ..calc import humanize from ..brokers._daemon import broker_init +from ._ledger import ( + load_ledger, + # open_trade_ledger, + TransactionLedger, +) +from ._pos import ( + PpTable, + load_pps_from_ledger, + # load_account, +) ledger = typer.Typer() @@ -39,7 +49,7 @@ def unpack_fqan( fully_qualified_account_name: str, - console: Console | None, + console: Console | None = None, ) -> tuple | bool: try: brokername, account = fully_qualified_account_name.split('.') @@ -225,7 +235,8 @@ async def main(): @ledger.command() def disect( - fully_qualified_account_name: str, + # "fully_qualified_account_name" + fqan: str, bs_mktid: int, # for ib pdb: bool = False, @@ -235,10 +246,28 @@ def disect( ), ): pair: tuple[str, str] - if not (pair := unpack_fqan( - fully_qualified_account_name, - )): - return + if not (pair := unpack_fqan(fqan)): + raise ValueError('{fqan} malformed!?') + + brokername, account = pair + + ledger: TransactionLedger + table: PpTable + records, table = load_pps_from_ledger( + brokername, + account, + # filter_by_id = {568549458}, + filter_by_ids={bs_mktid}, + ) + breakpoint() + # tractor.pause_from_sync() + # with open_trade_ledger( + # brokername, + # account, + # ) as ledger: + # for tid, rec in ledger.items(): + # bs_mktid: str = rec['bs_mktid'] + if __name__ == "__main__": diff --git a/piker/config.py b/piker/config.py index 3bb026d5a..80f7b1d1a 100644 --- a/piker/config.py +++ b/piker/config.py @@ -22,7 +22,6 @@ import sys import os import shutil -import time from typing import ( Callable, MutableMapping, @@ -310,98 +309,6 @@ def load( return config, path -def load_account( - brokername: str, - acctid: str, - -) -> tuple[dict, Path]: - ''' - Load a accounting (with positions) file from - $CONFIG_DIR/accounting/account...toml - - Where normally $CONFIG_DIR = ~/.config/piker/ - and we implicitly create a accounting subdir which should - normally be linked to a git repo managed by the user B) - - ''' - legacy_fn: str = f'pps.{brokername}.{acctid}.toml' - fn: str = f'account.{brokername}.{acctid}.toml' - - dirpath: Path = _config_dir / 'accounting' - if not dirpath.is_dir(): - dirpath.mkdir() - - config, path = load( - path=dirpath / fn, - decode=tomlkit.parse, - touch_if_dne=True, - ) - - if not config: - legacypath = dirpath / legacy_fn - log.warning( - f'Your account file is using the legacy `pps.` prefix..\n' - f'Rewriting contents to new name -> {path}\n' - 'Please delete the old file!\n' - f'|-> {legacypath}\n' - ) - if legacypath.is_file(): - legacy_config, _ = load( - path=legacypath, - - # TODO: move to tomlkit: - # - needs to be fixed to support bidict? - # https://github.com/sdispater/tomlkit/issues/289 - # - we need to use or fork's fix to do multiline array - # indenting. - decode=tomlkit.parse, - ) - config.update(legacy_config) - - # XXX: override the presumably previously non-existant - # file with legacy's contents. - write( - config, - path=path, - fail_empty=False, - ) - - return config, path - - -def load_ledger( - brokername: str, - acctid: str, - -) -> tuple[dict, Path]: - ''' - Load a ledger (TOML) file from user's config directory: - $CONFIG_DIR/accounting/ledgers/trades__.toml - - Return its `dict`-content and file path. - - ''' - ldir: Path = _config_dir / 'accounting' / 'ledgers' - if not ldir.is_dir(): - ldir.mkdir() - - fname = f'trades_{brokername}_{acctid}.toml' - fpath: Path = ldir / fname - - if not fpath.is_file(): - log.info( - f'Creating new local trades ledger: {fpath}' - ) - fpath.touch() - - with fpath.open(mode='rb') as cf: - start = time.time() - ledger_dict = tomllib.load(cf) - log.debug(f'Ledger load took {time.time() - start}s') - - return ledger_dict, fpath - - def write( config: dict, # toml config as dict diff --git a/piker/log.py b/piker/log.py index a36beec02..56776e1e8 100644 --- a/piker/log.py +++ b/piker/log.py @@ -40,7 +40,10 @@ def get_logger( Return the package log or a sub-log for `name` if provided. ''' - return tractor.log.get_logger(name=name, _root_name=_proj_name) + return tractor.log.get_logger( + name=name, + _root_name=_proj_name, + ) def get_console_log( From 2d291bd2c3574ec7626440d98df1fb835a00fa6c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 25 Jun 2023 17:24:08 -0400 Subject: [PATCH 67/73] ib: expose `.broker.norm_trade_records()` from pkg --- piker/brokers/ib/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/piker/brokers/ib/__init__.py b/piker/brokers/ib/__init__.py index 1acef9746..07ed8af58 100644 --- a/piker/brokers/ib/__init__.py +++ b/piker/brokers/ib/__init__.py @@ -35,10 +35,12 @@ ) from .broker import ( open_trade_dialog, + norm_trade_records, ) __all__ = [ 'get_client', + 'norm_trade_records', 'open_trade_dialog', 'open_history_client', 'open_symbol_search', From 249b091c2f5c0ccd59938ff3f7fed4b60cb0def8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 26 Jun 2023 13:30:56 -0400 Subject: [PATCH 68/73] binance: better bad account in order request error msg --- piker/brokers/binance/broker.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 270088b25..067a21636 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -198,8 +198,11 @@ async def handle_order_requests( account = msg.get('account') if account not in {'binance.spot', 'binance.futes'}: log.error( - 'This is a binance account, \ - only a `binance.spot/.futes` selection is valid' + 'Order request does not have a valid binance account name?\n' + 'Only one of\n' + '- `binance.spot` or,\n' + '- `binance.usdtm`\n' + 'is currently valid!' ) await ems_order_stream.send( BrokerdError( @@ -277,7 +280,7 @@ async def open_trade_dialog( # TODO: load other market wide data / statistics: # - OI: https://binance-docs.github.io/apidocs/futures/en/#open-interest # - OI stats: https://binance-docs.github.io/apidocs/futures/en/#open-interest-statistics - accounts: bidict[str, str] = bidict() + accounts: bidict[str, str] = bidict({'binance.usdtm': None}) balances: dict[Asset, float] = {} positions: list[BrokerdPosition] = [] @@ -292,6 +295,8 @@ async def open_trade_dialog( # 'canWithdraw': True, # 'feeTier': 0} if 'account' in req: + # NOTE: fill in the hash-like key/alias binance + # provides for the account. alias: str = resp['accountAlias'] accounts['binance.usdtm'] = alias @@ -369,7 +374,10 @@ async def open_trade_dialog( f'{pformat(entry)}\n' ) - await ctx.started((positions, list(accounts))) + await ctx.started(( + positions, + list(accounts) + )) # TODO: package more state tracking into the dialogs API? # - hmm maybe we could include `OrderDialogs.dids: From 85fa87fe6fe2c2314445d74ffd978096213e2eaf Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 26 Jun 2023 13:38:57 -0400 Subject: [PATCH 69/73] Update the `_emsd_main()` doc task tree layout --- piker/clearing/_client.py | 4 +-- piker/clearing/_ems.py | 54 +++++++++++++++++++++------------------ 2 files changed, 31 insertions(+), 27 deletions(-) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index 436b4f8e1..9977f95d7 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -216,8 +216,8 @@ async def open_ems( loglevel: str = 'error', ) -> tuple[ - OrderClient, - tractor.MsgStream, + OrderClient, # client + tractor.MsgStream, # order ctl stream dict[ # brokername, acctid tuple[str, str], diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 1bb57ae79..c18631a66 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -1529,30 +1529,34 @@ async def _emsd_main( received in a stream from that client actor and then responses are streamed back up to the original calling task in the same client. - The primary ``emsd`` task trees are: + The primary ``emsd`` task tree is: - ``_setup_persistent_emsd()``: - is the ``emsd`` actor's primary root task which sets up an - actor-global ``Router`` instance and starts a relay loop task - which lives until the backend broker is shutdown or the ems is - terminated. - | - - (maybe) ``translate_and_relay_brokerd_events()``: - accept normalized trades responses from brokerd, process and - relay to ems client(s); this is a effectively a "trade event - reponse" proxy-broker. - - - ``_emsd_main()``: - attaches a brokerd real-time quote feed and trades dialogue with - brokderd trading api for every connecting client. - | - - ``clear_dark_triggers()``: - run (dark order) conditions on inputs and trigger brokerd "live" - order submissions. - | - - ``process_client_order_cmds()``: - accepts order cmds from requesting clients, registers dark orders and - alerts with clearing loop. + is the ``emsd`` actor's primary *service-fixture* task which + is opened by the `pikerd` service manager and sets up + a process-global (actor-local) ``Router`` instance and opens + a service nursery which lives until the backend broker is + shutdown or the ems is terminated; all tasks are + *dynamically* started (and persisted) within this service + nursery when the below endpoint context is opened: + | + - ``_emsd_main()``: + attaches a real-time quote feed and trades dialogue with + a `brokerd` actor which connects to the backend broker's + trading api for every connecting client. + | + - ``clear_dark_triggers()``: + run (dark order) conditions on inputs and trigger brokerd + "live" order submissions. + | + - ``process_client_order_cmds()``: + accepts order cmds from requesting clients, registers + dark orders and alerts with above (dark) clearing loop. + | + - (maybe) ``translate_and_relay_brokerd_events()``: + accept normalized trades responses from brokerd, process and + relay to ems client(s); this is a effectively a "trade event + reponse" proxy-broker. ''' global _router @@ -1560,9 +1564,9 @@ async def _emsd_main( broker, _, _, _ = unpack_fqme(fqme) - # TODO: would be nice if in tractor we can require either a ctx arg, - # or a named arg with ctx in it and a type annotation of - # tractor.Context instead of strictly requiring a ctx arg. + # TODO: would be nice if in tractor we can require either a ctx + # arg, or a named arg with ctx in it and a type annotation of + # `tractor.Context` instead of strictly requiring a ctx arg. ems_ctx = ctx # spawn one task per broker feed From 49e67d5f361320a3b931d25791d493b71de857fd Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 26 Jun 2023 13:39:32 -0400 Subject: [PATCH 70/73] Always add a paper (account) entry to order mode init Allows for tracking paper engine orders despite the ems not necessarily being opened by the current order mode instance (UI) in "paper" execution mode; useful for tracking bots/strats running against the same EMS daemon. --- piker/ui/order_mode.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 0b3a18970..9debfc582 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -587,7 +587,7 @@ def on_cancel( ) -> None: - msg = self.client._sent_orders.pop(uuid, None) + msg: Order = self.client._sent_orders.pop(uuid, None) if msg is not None: self.lines.remove_line(uuid=uuid) @@ -715,7 +715,8 @@ async def open_order_mode( loglevel: str = 'info' ) -> None: - '''Activate chart-trader order mode loop: + ''' + Activate chart-trader order mode loop: - connect to emsd - load existing positions @@ -769,15 +770,20 @@ async def open_order_mode( accounts_def: bidict[str, str | None] = config.load_accounts( providers=[mkt.broker], ) + # await tractor.pause() # XXX: ``brokerd`` delivers a set of account names that it # allows use of but the user also can define the accounts they'd # like to use, in order, in their `brokers.toml` file. - accounts = {} + accounts: dict[str, str] = {} for name in brokerd_accounts: # ensure name is in ``brokers.toml`` accounts[name] = accounts_def[name] + # always add a paper entry so that paper cleared + # order dialogs can be tracked in the order mode UIs. + accounts['paper'] = 'paper' + # first account listed is the one we select at startup # (aka order based selection). pp_account = next( From cdf9105d0de18acd7706f0d7078f5afaf2b10328 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 26 Jun 2023 15:21:30 -0400 Subject: [PATCH 71/73] Export `Flume` and `Feed` from `piker.data` --- piker/data/__init__.py | 4 ++++ piker/data/feed.py | 7 +++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/piker/data/__init__.py b/piker/data/__init__.py index cd0a11833..087928ec7 100644 --- a/piker/data/__init__.py +++ b/piker/data/__init__.py @@ -35,11 +35,15 @@ def_ohlcv_fields, ) from .feed import ( + Feed, open_feed, ) +from .flows import Flume __all__ = [ + 'Flume', + 'Feed', 'open_feed', 'ShmArray', 'iterticks', diff --git a/piker/data/feed.py b/piker/data/feed.py index ea7f360b9..fcd193da0 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -642,9 +642,12 @@ class Feed(Struct): ''' mods: dict[str, ModuleType] = {} portals: dict[ModuleType, tractor.Portal] = {} - flumes: dict[str, Flume] = {} + flumes: dict[ + str, # FQME + Flume, + ] = {} streams: dict[ - str, + str, # broker name trio.abc.ReceiveChannel[dict[str, Any]], ] = {} From e7e7919a43a14bbb63c115f57d1f2ade0dbef4ba Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 26 Jun 2023 15:22:05 -0400 Subject: [PATCH 72/73] Ensure paper engine logger is `piker.clearing` instance.. --- piker/clearing/_paper_engine.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 34e7ec58e..ac5f3d3fd 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -55,6 +55,7 @@ from ..accounting import unpack_fqme from ._util import ( log, # sub-sys logger + get_console_log, ) from ._messages import ( BrokerdCancel, @@ -536,7 +537,8 @@ async def open_trade_dialog( ) -> None: - tractor.log.get_console_log(loglevel) + # enable piker.clearing console log for *this* subactor + get_console_log(loglevel) ppt: PpTable ledger: TransactionLedger From 4a8eafabb8b814264c3d9d2537e53d93287285d1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 26 Jun 2023 16:00:16 -0400 Subject: [PATCH 73/73] Never key error on bad flow pops.. --- piker/clearing/_util.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/piker/clearing/_util.py b/piker/clearing/_util.py index 9eebf1c4d..d3c0fb8ef 100644 --- a/piker/clearing/_util.py +++ b/piker/clearing/_util.py @@ -90,4 +90,7 @@ def pop( for the given order id. ''' - return self._flows.pop(oid) + if (flow := self._flows.pop(oid, None)) is None: + log.warning(f'No flow found for oid: {oid}') + + return flow