Compare commits
	
		
			No commits in common. "kraken_fill_bugs" and "310_plus" have entirely different histories. 
		
	
	
		
			kraken_fil
			...
			310_plus
		
	
		|  | @ -50,8 +50,3 @@ prefer_data_account = [ | |||
| paper = "XX0000000" | ||||
| margin = "X0000000" | ||||
| ira = "X0000000" | ||||
| 
 | ||||
| 
 | ||||
| [deribit] | ||||
| key_id = 'XXXXXXXX' | ||||
| key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx' | ||||
|  |  | |||
|  | @ -3,12 +3,11 @@ | |||
| version: "3.5" | ||||
| 
 | ||||
| services: | ||||
|   ib_gw_paper: | ||||
|   ib-gateway: | ||||
|     # other image tags available: | ||||
|     # https://github.com/waytrade/ib-gateway-docker#supported-tags | ||||
|     # image: waytrade/ib-gateway:981.3j | ||||
|     image: waytrade/ib-gateway:1012.2i | ||||
|     restart: always  # restart whenev there's a crash or user clicsk | ||||
|     image: waytrade/ib-gateway:981.3j | ||||
|     restart: always | ||||
|     network_mode: 'host' | ||||
| 
 | ||||
|     volumes: | ||||
|  | @ -40,12 +39,14 @@ services: | |||
|     # this compose file which looks something like: | ||||
|     # TWS_USERID='myuser' | ||||
|     # TWS_PASSWORD='guest' | ||||
|     # TRADING_MODE=paper (or live) | ||||
|     # VNC_SERVER_PASSWORD='diggity' | ||||
| 
 | ||||
|     environment: | ||||
|       TWS_USERID: ${TWS_USERID} | ||||
|       TWS_PASSWORD: ${TWS_PASSWORD} | ||||
|       TRADING_MODE: 'paper' | ||||
|       VNC_SERVER_PASSWORD: 'doggy' | ||||
|       VNC_SERVER_PORT: '3003' | ||||
|       TRADING_MODE: ${TRADING_MODE:-paper} | ||||
|       VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-} | ||||
| 
 | ||||
|     # ports: | ||||
|     #   - target: 4002 | ||||
|  | @ -61,40 +62,3 @@ services: | |||
|       # - "127.0.0.1:4001:4001" | ||||
|       # - "127.0.0.1:4002:4002" | ||||
|       # - "127.0.0.1:5900:5900" | ||||
| 
 | ||||
|   ib_gw_live: | ||||
|     image: waytrade/ib-gateway:1012.2i | ||||
|     restart: always | ||||
|     network_mode: 'host' | ||||
| 
 | ||||
|     volumes: | ||||
|       - type: bind | ||||
|         source: ./jts_live.ini | ||||
|         target: /root/jts/jts.ini | ||||
|         # don't let ibc clobber this file for | ||||
|         # the main reason of not having a stupid | ||||
|         # timezone set.. | ||||
|         read_only: true | ||||
| 
 | ||||
|       # force our own ibc config | ||||
|       - type: bind | ||||
|         source: ./ibc.ini | ||||
|         target: /root/ibc/config.ini | ||||
| 
 | ||||
|       # force our noop script - socat isn't needed in host mode. | ||||
|       - type: bind | ||||
|         source: ./fork_ports_delayed.sh | ||||
|         target: /root/scripts/fork_ports_delayed.sh | ||||
| 
 | ||||
|       # force our noop script - socat isn't needed in host mode. | ||||
|       - type: bind | ||||
|         source: ./run_x11_vnc.sh | ||||
|         target: /root/scripts/run_x11_vnc.sh | ||||
|         read_only: true | ||||
| 
 | ||||
|     # NOTE: to fill these out, define an `.env` file in the same dir as | ||||
|     # this compose file which looks something like: | ||||
|     environment: | ||||
|       TRADING_MODE: 'live' | ||||
|       VNC_SERVER_PASSWORD: 'doggy' | ||||
|       VNC_SERVER_PORT: '3004' | ||||
|  |  | |||
|  | @ -188,7 +188,7 @@ AcceptNonBrokerageAccountWarning=yes | |||
| # | ||||
| # The default value is 60. | ||||
| 
 | ||||
| LoginDialogDisplayTimeout=20 | ||||
| LoginDialogDisplayTimeout = 60 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|  | @ -292,7 +292,7 @@ ExistingSessionDetectedAction=primary | |||
| # be set dynamically at run-time: most users will never need it, | ||||
| # so don't use it unless you know you need it. | ||||
| 
 | ||||
| ; OverrideTwsApiPort=4002 | ||||
| OverrideTwsApiPort=4002 | ||||
| 
 | ||||
| 
 | ||||
| # Read-only Login | ||||
|  |  | |||
|  | @ -1,33 +0,0 @@ | |||
| [IBGateway] | ||||
| ApiOnly=true | ||||
| LocalServerPort=4001 | ||||
| # NOTE: must be set if using IBC's "reject" mode | ||||
| TrustedIPs=127.0.0.1 | ||||
| ; RemoteHostOrderRouting=ndc1.ibllc.com | ||||
| ; WriteDebug=true | ||||
| ; RemotePortOrderRouting=4001 | ||||
| ; useRemoteSettings=false | ||||
| ; tradingMode=p | ||||
| ; Steps=8 | ||||
| ; colorPalletName=dark | ||||
| 
 | ||||
| # window geo, this may be useful for sending `xdotool` commands? | ||||
| ; MainWindow.Width=1986 | ||||
| ; screenHeight=3960 | ||||
| 
 | ||||
| 
 | ||||
| [Logon] | ||||
| Locale=en | ||||
| # most markets are oriented around this zone | ||||
| # so might as well hard code it. | ||||
| TimeZone=America/New_York | ||||
| UseSSL=true | ||||
| displayedproxymsg=1 | ||||
| os_titlebar=true | ||||
| s3store=true | ||||
| useRemoteSettings=false | ||||
| 
 | ||||
| [Communication] | ||||
| ctciAutoEncrypt=true | ||||
| Region=usr | ||||
| ; Peer=cdc1.ibllc.com:4001 | ||||
|  | @ -1,35 +1,16 @@ | |||
| #!/bin/sh | ||||
| # start vnc server and listen for connections | ||||
| # on port specced in `$VNC_SERVER_PORT` | ||||
| 
 | ||||
| # start VNC server | ||||
| x11vnc \ | ||||
|     -listen 127.0.0.1 \ | ||||
|     -allow 127.0.0.1 \ | ||||
|     -rfbport "${VNC_SERVER_PORT}" \ | ||||
|     -ncache_cr \ | ||||
|     -listen localhost \ | ||||
|     -display :1 \ | ||||
|     -forever \ | ||||
|     -shared \ | ||||
|     -logappend /var/log/x11vnc.log \ | ||||
|     -bg \ | ||||
|     -nowf \ | ||||
|     -noxdamage \ | ||||
|     -noxfixes \ | ||||
|     -no6 \ | ||||
|     -noipv6 \ | ||||
| 
 | ||||
| 
 | ||||
|     # -nowcr \ | ||||
|     # TODO: can't use this because of ``asyncvnc`` issue: | ||||
|     -autoport 3003 \ | ||||
|     # can't use this because of ``asyncvnc`` issue: | ||||
|     # https://github.com/barneygale/asyncvnc/issues/1 | ||||
|     # -passwd 'ibcansmbz' | ||||
| 
 | ||||
|     # XXX: optional graphics caching flags that seem to rekt the overlay | ||||
|     # of the 2 gw windows? When running a single gateway | ||||
|     # this seems to maybe optimize some memory usage? | ||||
|     # -ncache_cr \ | ||||
|     # -ncache \ | ||||
| 
 | ||||
|     # NOTE: this will prevent logs from going to the console. | ||||
|     # -logappend /var/log/x11vnc.log \ | ||||
| 
 | ||||
|     # where to start allocating ports | ||||
|     # -autoport "${VNC_SERVER_PORT}" \ | ||||
|  |  | |||
|  | @ -22,10 +22,10 @@ from typing import Optional, Union, Callable, Any | |||
| from contextlib import asynccontextmanager as acm | ||||
| from collections import defaultdict | ||||
| 
 | ||||
| from msgspec import Struct | ||||
| import tractor | ||||
| from pydantic import BaseModel | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import tractor | ||||
| 
 | ||||
| from .log import get_logger, get_console_log | ||||
| from .brokers import get_brokermod | ||||
|  | @ -47,13 +47,16 @@ _root_modules = [ | |||
| ] | ||||
| 
 | ||||
| 
 | ||||
| class Services(Struct): | ||||
| class Services(BaseModel): | ||||
| 
 | ||||
|     actor_n: tractor._supervise.ActorNursery | ||||
|     service_n: trio.Nursery | ||||
|     debug_mode: bool  # tractor sub-actor debug mode flag | ||||
|     service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {} | ||||
| 
 | ||||
|     class Config: | ||||
|         arbitrary_types_allowed = True | ||||
| 
 | ||||
|     async def start_service_task( | ||||
|         self, | ||||
|         name: str, | ||||
|  | @ -217,7 +220,7 @@ async def open_piker_runtime( | |||
|             # TODO: eventually we should be able to avoid | ||||
|             # having the root have more then permissions to | ||||
|             # spawn other specialized daemons I think? | ||||
|             enable_modules=_root_modules + enable_modules, | ||||
|             enable_modules=_root_modules, | ||||
|         ) as _, | ||||
|     ): | ||||
|         yield tractor.current_actor() | ||||
|  |  | |||
|  | @ -33,13 +33,14 @@ import asks | |||
| from fuzzywuzzy import process as fuzzy | ||||
| import numpy as np | ||||
| import tractor | ||||
| from pydantic.dataclasses import dataclass | ||||
| from pydantic import BaseModel | ||||
| import wsproto | ||||
| 
 | ||||
| from .._cacheables import open_cached_client | ||||
| from ._util import resproc, SymbolNotFound | ||||
| from ..log import get_logger, get_console_log | ||||
| from ..data import ShmArray | ||||
| from ..data.types import Struct | ||||
| from ..data._web_bs import open_autorecon_ws, NoBsWs | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  | @ -78,14 +79,12 @@ _show_wap_in_history = False | |||
| 
 | ||||
| 
 | ||||
| # https://binance-docs.github.io/apidocs/spot/en/#exchange-information | ||||
| class Pair(Struct, frozen=True): | ||||
| class Pair(BaseModel): | ||||
|     symbol: str | ||||
|     status: str | ||||
| 
 | ||||
|     baseAsset: str | ||||
|     baseAssetPrecision: int | ||||
|     cancelReplaceAllowed: bool | ||||
|     allowTrailingStop: bool | ||||
|     quoteAsset: str | ||||
|     quotePrecision: int | ||||
|     quoteAssetPrecision: int | ||||
|  | @ -105,14 +104,14 @@ class Pair(Struct, frozen=True): | |||
|     permissions: list[str] | ||||
| 
 | ||||
| 
 | ||||
| class OHLC(Struct): | ||||
|     ''' | ||||
|     Description of the flattened OHLC quote format. | ||||
| @dataclass | ||||
| class OHLC: | ||||
|     """Description of the flattened OHLC quote format. | ||||
| 
 | ||||
|     For schema details see: | ||||
|     https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     time: int | ||||
| 
 | ||||
|     open: float | ||||
|  | @ -261,7 +260,6 @@ class Client: | |||
|         for i, bar in enumerate(bars): | ||||
| 
 | ||||
|             bar = OHLC(*bar) | ||||
|             bar.typecast() | ||||
| 
 | ||||
|             row = [] | ||||
|             for j, (name, ftype) in enumerate(_ohlc_dtype[1:]): | ||||
|  | @ -289,7 +287,7 @@ async def get_client() -> Client: | |||
| 
 | ||||
| 
 | ||||
| # validation type | ||||
| class AggTrade(Struct): | ||||
| class AggTrade(BaseModel): | ||||
|     e: str  # Event type | ||||
|     E: int  # Event time | ||||
|     s: str  # Symbol | ||||
|  | @ -343,9 +341,7 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]: | |||
| 
 | ||||
|         elif msg.get('e') == 'aggTrade': | ||||
| 
 | ||||
|             # NOTE: this is purely for a definition, ``msgspec.Struct`` | ||||
|             # does not runtime-validate until you decode/encode. | ||||
|             # see: https://jcristharif.com/msgspec/structs.html#type-validation | ||||
|             # validate | ||||
|             msg = AggTrade(**msg) | ||||
| 
 | ||||
|             # TODO: type out and require this quote format | ||||
|  | @ -356,8 +352,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]: | |||
|                 'brokerd_ts': time.time(), | ||||
|                 'ticks': [{ | ||||
|                     'type': 'trade', | ||||
|                     'price': float(msg.p), | ||||
|                     'size': float(msg.q), | ||||
|                     'price': msg.p, | ||||
|                     'size': msg.q, | ||||
|                     'broker_ts': msg.T, | ||||
|                 }], | ||||
|             } | ||||
|  | @ -452,7 +448,7 @@ async def stream_quotes( | |||
|             d = cache[sym.upper()] | ||||
|             syminfo = Pair(**d)  # validation | ||||
| 
 | ||||
|             si = sym_infos[sym] = syminfo.to_dict() | ||||
|             si = sym_infos[sym] = syminfo.dict() | ||||
| 
 | ||||
|             # XXX: after manually inspecting the response format we | ||||
|             # just directly pick out the info we need | ||||
|  |  | |||
|  | @ -39,148 +39,6 @@ _config_dir = click.get_app_dir('piker') | |||
| _watchlists_data_path = os.path.join(_config_dir, 'watchlists.json') | ||||
| 
 | ||||
| 
 | ||||
| OK = '\033[92m' | ||||
| WARNING = '\033[93m' | ||||
| FAIL = '\033[91m' | ||||
| ENDC = '\033[0m' | ||||
| 
 | ||||
| 
 | ||||
| def print_ok(s: str, **kwargs): | ||||
|     print(OK + s + ENDC, **kwargs) | ||||
| 
 | ||||
| 
 | ||||
| def print_error(s: str, **kwargs): | ||||
|     print(FAIL + s + ENDC, **kwargs) | ||||
| 
 | ||||
| 
 | ||||
| def get_method(client, meth_name: str): | ||||
|     print(f'checking client for method \'{meth_name}\'...', end='', flush=True) | ||||
|     method = getattr(client, meth_name, None) | ||||
|     assert method | ||||
|     print_ok('found!.') | ||||
|     return method | ||||
| 
 | ||||
| async def run_method(client, meth_name: str, **kwargs): | ||||
|     method = get_method(client, meth_name) | ||||
|     print('running...', end='', flush=True) | ||||
|     result = await method(**kwargs) | ||||
|     print_ok(f'done! result: {type(result)}') | ||||
|     return result | ||||
| 
 | ||||
| async def run_test(broker_name: str): | ||||
|     brokermod = get_brokermod(broker_name) | ||||
|     total = 0 | ||||
|     passed = 0 | ||||
|     failed = 0 | ||||
| 
 | ||||
|     print(f'getting client...', end='', flush=True) | ||||
|     if not hasattr(brokermod, 'get_client'): | ||||
|         print_error('fail! no \'get_client\' context manager found.') | ||||
|         return | ||||
| 
 | ||||
|     async with brokermod.get_client(is_brokercheck=True) as client: | ||||
|         print_ok(f'done! inside client context.') | ||||
| 
 | ||||
|         # check for methods present on brokermod | ||||
|         method_list = [ | ||||
|             'backfill_bars', | ||||
|             'get_client', | ||||
|             'trades_dialogue', | ||||
|             'open_history_client', | ||||
|             'open_symbol_search', | ||||
|             'stream_quotes', | ||||
| 
 | ||||
|         ] | ||||
| 
 | ||||
|         for method in method_list: | ||||
|             print( | ||||
|                 f'checking brokermod for method \'{method}\'...', | ||||
|                 end='', flush=True) | ||||
|             if not hasattr(brokermod, method): | ||||
|                 print_error(f'fail! method \'{method}\' not found.') | ||||
|                 failed += 1 | ||||
|             else: | ||||
|                 print_ok('done!') | ||||
|                 passed += 1 | ||||
| 
 | ||||
|             total += 1 | ||||
| 
 | ||||
|         # check for methods present con brokermod.Client and their | ||||
|         # results | ||||
| 
 | ||||
|         # for private methods only check is present | ||||
|         method_list = [ | ||||
|             'get_balances', | ||||
|             'get_assets', | ||||
|             'get_trades', | ||||
|             'get_xfers', | ||||
|             'submit_limit', | ||||
|             'submit_cancel', | ||||
|             'search_symbols', | ||||
|         ] | ||||
| 
 | ||||
|         for method_name in method_list: | ||||
|             try: | ||||
|                 get_method(client, method_name) | ||||
|                 passed += 1 | ||||
| 
 | ||||
|             except AssertionError: | ||||
|                 print_error(f'fail! method \'{method_name}\' not found.') | ||||
|                 failed += 1 | ||||
| 
 | ||||
|             total += 1 | ||||
| 
 | ||||
| 
 | ||||
|         # check for methods present con brokermod.Client and their | ||||
|         # results | ||||
| 
 | ||||
|         syms = await run_method(client, 'symbol_info') | ||||
|         total += 1 | ||||
| 
 | ||||
|         if len(syms) == 0: | ||||
|             raise BaseException('Empty Symbol list?') | ||||
| 
 | ||||
|         passed += 1 | ||||
| 
 | ||||
|         first_sym = tuple(syms.keys())[0] | ||||
| 
 | ||||
|         method_list = [ | ||||
|             ('cache_symbols', {}), | ||||
|             ('search_symbols', {'pattern': first_sym[:-1]}), | ||||
|             ('bars', {'symbol': first_sym}) | ||||
|         ] | ||||
| 
 | ||||
|         for method_name, method_kwargs in method_list: | ||||
|             try: | ||||
|                 await run_method(client, method_name, **method_kwargs) | ||||
|                 passed += 1 | ||||
| 
 | ||||
|             except AssertionError: | ||||
|                 print_error(f'fail! method \'{method_name}\' not found.') | ||||
|                 failed += 1 | ||||
| 
 | ||||
|             total += 1 | ||||
| 
 | ||||
|         print(f'total: {total}, passed: {passed}, failed: {failed}') | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.argument('broker', nargs=1, required=True) | ||||
| @click.pass_obj | ||||
| def brokercheck(config, broker): | ||||
|     ''' | ||||
|     Test broker apis for completeness. | ||||
| 
 | ||||
|     ''' | ||||
|     async def bcheck_main(): | ||||
|         async with maybe_spawn_brokerd(broker) as portal: | ||||
|             await portal.run(run_test, broker) | ||||
|             await portal.cancel_actor() | ||||
| 
 | ||||
|     trio.run(run_test, broker) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.option('--keys', '-k', multiple=True, | ||||
|               help='Return results only for these keys') | ||||
|  | @ -335,8 +193,6 @@ def contracts(ctx, loglevel, broker, symbol, ids): | |||
|     brokermod = get_brokermod(broker) | ||||
|     get_console_log(loglevel) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|     contracts = trio.run(partial(core.contracts, brokermod, symbol)) | ||||
|     if not ids: | ||||
|         # just print out expiry dates which can be used with | ||||
|  |  | |||
|  | @ -1,70 +0,0 @@ | |||
| ``deribit`` backend | ||||
| ------------------ | ||||
| pretty good liquidity crypto derivatives, uses custom json rpc over ws for | ||||
| client methods, then `cryptofeed` for data streams. | ||||
| 
 | ||||
| status | ||||
| ****** | ||||
| - supports option charts | ||||
| - no order support yet  | ||||
| 
 | ||||
| 
 | ||||
| config | ||||
| ****** | ||||
| In order to get order mode support your ``brokers.toml`` | ||||
| needs to have something like the following: | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|     [deribit] | ||||
|     key_id = 'XXXXXXXX' | ||||
|     key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx' | ||||
| 
 | ||||
| To obtain an api id and secret you need to create an account, which can be a | ||||
| real market account over at: | ||||
| 
 | ||||
|     - deribit.com  (requires KYC for deposit address) | ||||
| 
 | ||||
| Or a testnet account over at: | ||||
| 
 | ||||
|     - test.deribit.com | ||||
| 
 | ||||
| For testnet once the account is created here is how you deposit fake crypto to | ||||
| try it out: | ||||
| 
 | ||||
| 1) Go to Wallet: | ||||
| 
 | ||||
| .. figure:: assets/0_wallet.png | ||||
|     :align: center | ||||
|     :target: assets/0_wallet.png | ||||
|     :alt: wallet page | ||||
| 
 | ||||
| 2) Then click on the elipsis menu and select deposit | ||||
| 
 | ||||
| .. figure:: assets/1_wallet_select_deposit.png | ||||
|     :align: center | ||||
|     :target: assets/1_wallet_select_deposit.png | ||||
|     :alt: wallet deposit page | ||||
| 
 | ||||
| 3) This will take you to the deposit address page | ||||
| 
 | ||||
| .. figure:: assets/2_gen_deposit_addr.png | ||||
|     :align: center | ||||
|     :target: assets/2_gen_deposit_addr.png | ||||
|     :alt: generate deposit address page | ||||
| 
 | ||||
| 4) After clicking generate you should see the address, copy it and go to the  | ||||
| `coin faucet <https://test.deribit.com/dericoin/BTC/deposit>`_ and send fake | ||||
| coins to that address. | ||||
| 
 | ||||
| .. figure:: assets/3_deposit_address.png | ||||
|     :align: center | ||||
|     :target: assets/3_deposit_address.png | ||||
|     :alt: generated address | ||||
| 
 | ||||
| 5) Back in the deposit address page you should see the deposit in your history | ||||
| 
 | ||||
| .. figure:: assets/4_wallet_deposit_history.png | ||||
|     :align: center | ||||
|     :target: assets/4_wallet_deposit_history.png | ||||
|     :alt: wallet deposit history | ||||
|  | @ -1,65 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Deribit backend. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| from piker.log import get_logger | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| from .api import ( | ||||
|     get_client, | ||||
| ) | ||||
| from .feed import ( | ||||
|     open_history_client, | ||||
|     open_symbol_search, | ||||
|     stream_quotes, | ||||
|     backfill_bars | ||||
| ) | ||||
| # from .broker import ( | ||||
|     # trades_dialogue, | ||||
|     # norm_trade_records, | ||||
| # ) | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'get_client', | ||||
| #    'trades_dialogue', | ||||
|     'open_history_client', | ||||
|     'open_symbol_search', | ||||
|     'stream_quotes', | ||||
| #    'norm_trade_records', | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| # tractor RPC enable arg | ||||
| __enable_modules__: list[str] = [ | ||||
|     'api', | ||||
|     'feed', | ||||
| #   'broker', | ||||
| ] | ||||
| 
 | ||||
| # passed to ``tractor.ActorNursery.start_actor()`` | ||||
| _spawn_kwargs = { | ||||
|     'infect_asyncio': True, | ||||
| } | ||||
| 
 | ||||
| # annotation to let backend agnostic code | ||||
| # know if ``brokerd`` should be spawned with | ||||
| # ``tractor``'s aio mode. | ||||
| _infect_asyncio: bool = True | ||||
|  | @ -1,667 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Deribit backend. | ||||
| 
 | ||||
| ''' | ||||
| import json | ||||
| import time | ||||
| import asyncio | ||||
| 
 | ||||
| from contextlib import asynccontextmanager as acm, AsyncExitStack | ||||
| from functools import partial | ||||
| from datetime import datetime | ||||
| from typing import Any, Optional, Iterable, Callable | ||||
| 
 | ||||
| import pendulum | ||||
| import asks | ||||
| import trio | ||||
| from trio_typing import Nursery, TaskStatus | ||||
| from fuzzywuzzy import process as fuzzy | ||||
| import numpy as np | ||||
| 
 | ||||
| from piker.data.types import Struct | ||||
| from piker.data._web_bs import ( | ||||
|     NoBsWs, | ||||
|     open_autorecon_ws, | ||||
|     open_jsonrpc_session | ||||
| ) | ||||
| 
 | ||||
| from .._util import resproc | ||||
| 
 | ||||
| from piker import config | ||||
| from piker.log import get_logger | ||||
| 
 | ||||
| from tractor.trionics import ( | ||||
|     broadcast_receiver, | ||||
|     BroadcastReceiver, | ||||
|     maybe_open_context | ||||
| ) | ||||
| from tractor import to_asyncio | ||||
| 
 | ||||
| from cryptofeed import FeedHandler | ||||
| 
 | ||||
| from cryptofeed.defines import ( | ||||
|     DERIBIT, | ||||
|     L1_BOOK, TRADES, | ||||
|     OPTION, CALL, PUT | ||||
| ) | ||||
| from cryptofeed.symbols import Symbol | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| _spawn_kwargs = { | ||||
|     'infect_asyncio': True, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| _url = 'https://www.deribit.com' | ||||
| _ws_url = 'wss://www.deribit.com/ws/api/v2' | ||||
| _testnet_ws_url = 'wss://test.deribit.com/ws/api/v2' | ||||
| 
 | ||||
| 
 | ||||
| # Broker specific ohlc schema (rest) | ||||
| _ohlc_dtype = [ | ||||
|     ('index', int), | ||||
|     ('time', int), | ||||
|     ('open', float), | ||||
|     ('high', float), | ||||
|     ('low', float), | ||||
|     ('close', float), | ||||
|     ('volume', float), | ||||
|     ('bar_wap', float),  # will be zeroed by sampler if not filled | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| class JSONRPCResult(Struct): | ||||
|     jsonrpc: str = '2.0' | ||||
|     id: int | ||||
|     result: Optional[dict] = None | ||||
|     error: Optional[dict] = None | ||||
|     usIn: int  | ||||
|     usOut: int  | ||||
|     usDiff: int  | ||||
|     testnet: bool | ||||
| 
 | ||||
| 
 | ||||
| class KLinesResult(Struct): | ||||
|     close: list[float] | ||||
|     cost: list[float] | ||||
|     high: list[float] | ||||
|     low: list[float] | ||||
|     open: list[float] | ||||
|     status: str | ||||
|     ticks: list[int] | ||||
|     volume: list[float] | ||||
| 
 | ||||
| class Trade(Struct): | ||||
|     trade_seq: int | ||||
|     trade_id: str | ||||
|     timestamp: int | ||||
|     tick_direction: int | ||||
|     price: float | ||||
|     mark_price: float | ||||
|     iv: float | ||||
|     instrument_name: str | ||||
|     index_price: float | ||||
|     direction: str | ||||
|     combo_trade_id: Optional[int] = 0, | ||||
|     combo_id: Optional[str] = '', | ||||
|     amount: float | ||||
| 
 | ||||
| class LastTradesResult(Struct): | ||||
|     trades: list[Trade] | ||||
|     has_more: bool | ||||
| 
 | ||||
| 
 | ||||
| # convert datetime obj timestamp to unixtime in milliseconds | ||||
| def deribit_timestamp(when): | ||||
|     return int((when.timestamp() * 1000) + (when.microsecond / 1000)) | ||||
| 
 | ||||
| 
 | ||||
| def str_to_cb_sym(name: str) -> Symbol: | ||||
|     base, strike_price, expiry_date, option_type = name.split('-') | ||||
| 
 | ||||
|     quote = base | ||||
| 
 | ||||
|     if option_type == 'put': | ||||
|         option_type = PUT  | ||||
|     elif option_type  == 'call': | ||||
|         option_type = CALL | ||||
|     else: | ||||
|         raise Exception("Couldn\'t parse option type") | ||||
| 
 | ||||
|     return Symbol( | ||||
|         base, quote, | ||||
|         type=OPTION, | ||||
|         strike_price=strike_price, | ||||
|         option_type=option_type, | ||||
|         expiry_date=expiry_date, | ||||
|         expiry_normalize=False) | ||||
| 
 | ||||
| 
 | ||||
| def piker_sym_to_cb_sym(name: str) -> Symbol: | ||||
|     base, expiry_date, strike_price, option_type = tuple( | ||||
|         name.upper().split('-')) | ||||
| 
 | ||||
|     quote = base | ||||
| 
 | ||||
|     if option_type == 'P': | ||||
|         option_type = PUT  | ||||
|     elif option_type  == 'C': | ||||
|         option_type = CALL | ||||
|     else: | ||||
|         raise Exception("Couldn\'t parse option type") | ||||
| 
 | ||||
|     return Symbol( | ||||
|         base, quote, | ||||
|         type=OPTION, | ||||
|         strike_price=strike_price, | ||||
|         option_type=option_type, | ||||
|         expiry_date=expiry_date.upper()) | ||||
| 
 | ||||
| 
 | ||||
| def cb_sym_to_deribit_inst(sym: Symbol): | ||||
|     # cryptofeed normalized | ||||
|     cb_norm = ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z'] | ||||
| 
 | ||||
|     # deribit specific  | ||||
|     months = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] | ||||
| 
 | ||||
|     exp = sym.expiry_date | ||||
| 
 | ||||
|     # YYMDD | ||||
|     # 01234 | ||||
|     year, month, day = ( | ||||
|         exp[:2], months[cb_norm.index(exp[2:3])], exp[3:]) | ||||
| 
 | ||||
|     otype = 'C' if sym.option_type == CALL else 'P' | ||||
| 
 | ||||
|     return f'{sym.base}-{day}{month}{year}-{sym.strike_price}-{otype}' | ||||
| 
 | ||||
| 
 | ||||
| def get_config() -> dict[str, Any]: | ||||
| 
 | ||||
|     conf, path = config.load() | ||||
| 
 | ||||
|     section = conf.get('deribit') | ||||
| 
 | ||||
|     # TODO: document why we send this, basically because logging params for cryptofeed | ||||
|     conf['log'] = {} | ||||
|     conf['log']['disabled'] = True | ||||
| 
 | ||||
|     if section is None: | ||||
|         log.warning(f'No config section found for deribit in {path}') | ||||
| 
 | ||||
|     return conf  | ||||
| 
 | ||||
| 
 | ||||
| class Client: | ||||
| 
 | ||||
|     def __init__(self, json_rpc: Callable) -> None: | ||||
|         self._pairs: dict[str, Any] = None | ||||
| 
 | ||||
|         config = get_config().get('deribit', {}) | ||||
| 
 | ||||
|         if ('key_id' in config) and ('key_secret' in config): | ||||
|             self._key_id = config['key_id'] | ||||
|             self._key_secret = config['key_secret'] | ||||
| 
 | ||||
|         else: | ||||
|             self._key_id = None | ||||
|             self._key_secret = None | ||||
| 
 | ||||
|         self.json_rpc = json_rpc | ||||
| 
 | ||||
|     @property | ||||
|     def currencies(self): | ||||
|         return ['btc', 'eth', 'sol', 'usd'] | ||||
| 
 | ||||
|     async def get_balances(self, kind: str = 'option') -> dict[str, float]: | ||||
|         """Return the set of positions for this account | ||||
|         by symbol. | ||||
|         """ | ||||
|         balances = {} | ||||
| 
 | ||||
|         for currency in self.currencies: | ||||
|             resp = await self.json_rpc( | ||||
|                 'private/get_positions', params={ | ||||
|                     'currency': currency.upper(), | ||||
|                     'kind': kind}) | ||||
| 
 | ||||
|             balances[currency] = resp.result | ||||
| 
 | ||||
|         return balances | ||||
| 
 | ||||
|     async def get_assets(self) -> dict[str, float]: | ||||
|         """Return the set of asset balances for this account | ||||
|         by symbol. | ||||
|         """ | ||||
|         balances = {} | ||||
| 
 | ||||
|         for currency in self.currencies: | ||||
|             resp = await self.json_rpc( | ||||
|                 'private/get_account_summary', params={ | ||||
|                     'currency': currency.upper()}) | ||||
| 
 | ||||
|             balances[currency] = resp.result['balance'] | ||||
| 
 | ||||
|         return balances | ||||
| 
 | ||||
|     async def submit_limit( | ||||
|         self, | ||||
|         symbol: str, | ||||
|         price: float, | ||||
|         action: str, | ||||
|         size: float | ||||
|     ) -> dict: | ||||
|         """Place an order | ||||
|         """ | ||||
|         params = { | ||||
|             'instrument_name': symbol.upper(), | ||||
|             'amount': size, | ||||
|             'type': 'limit', | ||||
|             'price': price, | ||||
|         } | ||||
|         resp = await self.json_rpc( | ||||
|             f'private/{action}', params) | ||||
| 
 | ||||
|         return resp.result | ||||
| 
 | ||||
|     async def submit_cancel(self, oid: str): | ||||
|         """Send cancel request for order id | ||||
|         """ | ||||
|         resp = await self.json_rpc( | ||||
|             'private/cancel', {'order_id': oid}) | ||||
|         return resp.result | ||||
| 
 | ||||
|     async def symbol_info( | ||||
|         self, | ||||
|         instrument: Optional[str] = None, | ||||
|         currency: str = 'btc',  # BTC, ETH, SOL, USDC | ||||
|         kind: str = 'option', | ||||
|         expired: bool = False | ||||
|     ) -> dict[str, Any]: | ||||
|         """Get symbol info for the exchange. | ||||
| 
 | ||||
|         """ | ||||
|         if self._pairs: | ||||
|             return self._pairs | ||||
| 
 | ||||
|         # will retrieve all symbols by default | ||||
|         params = { | ||||
|             'currency': currency.upper(), | ||||
|             'kind': kind, | ||||
|             'expired': str(expired).lower() | ||||
|         } | ||||
| 
 | ||||
|         resp = await self.json_rpc('public/get_instruments', params) | ||||
|         results = resp.result | ||||
| 
 | ||||
|         instruments = { | ||||
|             item['instrument_name'].lower(): item | ||||
|             for item in results | ||||
|         } | ||||
| 
 | ||||
|         if instrument is not None: | ||||
|             return instruments[instrument] | ||||
|         else: | ||||
|             return instruments | ||||
| 
 | ||||
|     async def cache_symbols( | ||||
|         self, | ||||
|     ) -> dict: | ||||
|         if not self._pairs: | ||||
|             self._pairs = await self.symbol_info() | ||||
| 
 | ||||
|         return self._pairs | ||||
| 
 | ||||
|     async def search_symbols( | ||||
|         self, | ||||
|         pattern: str, | ||||
|         limit: int = 30, | ||||
|     ) -> dict[str, Any]: | ||||
|         data = await self.symbol_info() | ||||
| 
 | ||||
|         matches = fuzzy.extractBests( | ||||
|             pattern, | ||||
|             data, | ||||
|             score_cutoff=35, | ||||
|             limit=limit | ||||
|         ) | ||||
|         # repack in dict form | ||||
|         return {item[0]['instrument_name'].lower(): item[0] | ||||
|                 for item in matches} | ||||
| 
 | ||||
|     async def bars( | ||||
|         self, | ||||
|         symbol: str, | ||||
|         start_dt: Optional[datetime] = None, | ||||
|         end_dt: Optional[datetime] = None, | ||||
|         limit: int = 1000, | ||||
|         as_np: bool = True, | ||||
|     ) -> dict: | ||||
|         instrument = symbol | ||||
| 
 | ||||
|         if end_dt is None: | ||||
|             end_dt = pendulum.now('UTC') | ||||
| 
 | ||||
|         if start_dt is None: | ||||
|             start_dt = end_dt.start_of( | ||||
|                 'minute').subtract(minutes=limit) | ||||
| 
 | ||||
|         start_time = deribit_timestamp(start_dt) | ||||
|         end_time = deribit_timestamp(end_dt) | ||||
| 
 | ||||
|         # https://docs.deribit.com/#public-get_tradingview_chart_data | ||||
|         resp = await self.json_rpc( | ||||
|             'public/get_tradingview_chart_data', | ||||
|             params={ | ||||
|                 'instrument_name': instrument.upper(), | ||||
|                 'start_timestamp': start_time, | ||||
|                 'end_timestamp': end_time, | ||||
|                 'resolution': '1' | ||||
|             }) | ||||
| 
 | ||||
|         result = KLinesResult(**resp.result) | ||||
|         new_bars = [] | ||||
|         for i in range(len(result.close)): | ||||
| 
 | ||||
|             _open = result.open[i] | ||||
|             high = result.high[i] | ||||
|             low = result.low[i] | ||||
|             close = result.close[i] | ||||
|             volume = result.volume[i] | ||||
| 
 | ||||
|             row = [ | ||||
|                 (start_time + (i * (60 * 1000))) / 1000.0,  # time | ||||
|                 result.open[i], | ||||
|                 result.high[i], | ||||
|                 result.low[i], | ||||
|                 result.close[i], | ||||
|                 result.volume[i], | ||||
|                 0 | ||||
|             ] | ||||
| 
 | ||||
|             new_bars.append((i,) + tuple(row)) | ||||
| 
 | ||||
|         array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines | ||||
|         return array | ||||
| 
 | ||||
|     async def last_trades( | ||||
|         self, | ||||
|         instrument: str, | ||||
|         count: int = 10 | ||||
|     ): | ||||
|         resp = await self.json_rpc( | ||||
|             'public/get_last_trades_by_instrument', | ||||
|             params={ | ||||
|                 'instrument_name': instrument, | ||||
|                 'count': count | ||||
|             }) | ||||
| 
 | ||||
|         return LastTradesResult(**resp.result) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_client( | ||||
|     is_brokercheck: bool = False | ||||
| ) -> Client: | ||||
| 
 | ||||
|     async with ( | ||||
|         trio.open_nursery() as n, | ||||
|         open_jsonrpc_session( | ||||
|             _testnet_ws_url, dtype=JSONRPCResult) as json_rpc | ||||
|     ): | ||||
|         client = Client(json_rpc) | ||||
| 
 | ||||
|         _refresh_token: Optional[str] = None | ||||
|         _access_token: Optional[str] = None | ||||
| 
 | ||||
|         async def _auth_loop( | ||||
|             task_status: TaskStatus = trio.TASK_STATUS_IGNORED | ||||
|         ): | ||||
|             """Background task that adquires a first access token and then will | ||||
|             refresh the access token while the nursery isn't cancelled. | ||||
| 
 | ||||
|             https://docs.deribit.com/?python#authentication-2 | ||||
|             """ | ||||
|             renew_time = 10 | ||||
|             access_scope = 'trade:read_write' | ||||
|             _expiry_time = time.time() | ||||
|             got_access = False | ||||
|             nonlocal _refresh_token | ||||
|             nonlocal _access_token | ||||
| 
 | ||||
|             while True: | ||||
|                 if time.time() - _expiry_time < renew_time: | ||||
|                     # if we are close to token expiry time | ||||
| 
 | ||||
|                     if _refresh_token != None: | ||||
|                         # if we have a refresh token already dont need to send | ||||
|                         # secret | ||||
|                         params = { | ||||
|                             'grant_type': 'refresh_token', | ||||
|                             'refresh_token': _refresh_token, | ||||
|                             'scope': access_scope | ||||
|                         } | ||||
| 
 | ||||
|                     else: | ||||
|                         # we don't have refresh token, send secret to initialize | ||||
|                         params = { | ||||
|                             'grant_type': 'client_credentials', | ||||
|                             'client_id': client._key_id, | ||||
|                             'client_secret': client._key_secret, | ||||
|                             'scope': access_scope | ||||
|                         } | ||||
| 
 | ||||
|                     resp = await json_rpc('public/auth', params) | ||||
|                     result = resp.result | ||||
| 
 | ||||
|                     _expiry_time = time.time() + result['expires_in'] | ||||
|                     _refresh_token = result['refresh_token'] | ||||
| 
 | ||||
|                     if 'access_token' in result: | ||||
|                         _access_token = result['access_token'] | ||||
| 
 | ||||
|                     if not got_access: | ||||
|                         # first time this loop runs we must indicate task is | ||||
|                         # started, we have auth | ||||
|                         got_access = True | ||||
|                         task_status.started() | ||||
| 
 | ||||
|                 else: | ||||
|                     await trio.sleep(renew_time / 2) | ||||
| 
 | ||||
|         # if we have client creds launch auth loop | ||||
|         if client._key_id is not None: | ||||
|             await n.start(_auth_loop) | ||||
| 
 | ||||
|         await client.cache_symbols() | ||||
|         yield client | ||||
|         n.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_feed_handler(): | ||||
|     fh = FeedHandler(config=get_config()) | ||||
|     yield fh | ||||
|     await to_asyncio.run_task(fh.stop_async) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_feed_handler() -> trio.abc.ReceiveStream: | ||||
|     async with maybe_open_context( | ||||
|         acm_func=open_feed_handler, | ||||
|         key='feedhandler', | ||||
|     ) as (cache_hit, fh): | ||||
|         yield fh | ||||
| 
 | ||||
| 
 | ||||
| async def aio_price_feed_relay( | ||||
|     fh: FeedHandler, | ||||
|     instrument: Symbol, | ||||
|     from_trio: asyncio.Queue, | ||||
|     to_trio: trio.abc.SendChannel, | ||||
| ) -> None: | ||||
|     async def _trade(data: dict, receipt_timestamp): | ||||
|         to_trio.send_nowait(('trade', { | ||||
|             'symbol': cb_sym_to_deribit_inst( | ||||
|                 str_to_cb_sym(data.symbol)).lower(), | ||||
|             'last': data, | ||||
|             'broker_ts': time.time(), | ||||
|             'data': data.to_dict(), | ||||
|             'receipt': receipt_timestamp | ||||
|         })) | ||||
| 
 | ||||
|     async def _l1(data: dict, receipt_timestamp): | ||||
|         to_trio.send_nowait(('l1', { | ||||
|             'symbol': cb_sym_to_deribit_inst( | ||||
|                 str_to_cb_sym(data.symbol)).lower(), | ||||
|             'ticks': [ | ||||
|                 {'type': 'bid', | ||||
|                     'price': float(data.bid_price), 'size': float(data.bid_size)}, | ||||
|                 {'type': 'bsize', | ||||
|                     'price': float(data.bid_price), 'size': float(data.bid_size)}, | ||||
|                 {'type': 'ask', | ||||
|                     'price': float(data.ask_price), 'size': float(data.ask_size)}, | ||||
|                 {'type': 'asize', | ||||
|                     'price': float(data.ask_price), 'size': float(data.ask_size)} | ||||
|             ] | ||||
|         })) | ||||
| 
 | ||||
|     fh.add_feed( | ||||
|         DERIBIT, | ||||
|         channels=[TRADES, L1_BOOK], | ||||
|         symbols=[piker_sym_to_cb_sym(instrument)], | ||||
|         callbacks={ | ||||
|             TRADES: _trade, | ||||
|             L1_BOOK: _l1 | ||||
|         }) | ||||
| 
 | ||||
|     if not fh.running: | ||||
|         fh.run( | ||||
|             start_loop=False, | ||||
|             install_signal_handlers=False) | ||||
| 
 | ||||
|     # sync with trio | ||||
|     to_trio.send_nowait(None) | ||||
| 
 | ||||
|     await asyncio.sleep(float('inf')) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_price_feed( | ||||
|     instrument: str | ||||
| ) -> trio.abc.ReceiveStream: | ||||
|     async with maybe_open_feed_handler() as fh: | ||||
|         async with to_asyncio.open_channel_from( | ||||
|             partial( | ||||
|                 aio_price_feed_relay, | ||||
|                 fh, | ||||
|                 instrument | ||||
|             ) | ||||
|         ) as (first, chan): | ||||
|             yield chan | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_price_feed( | ||||
|     instrument: str | ||||
| ) -> trio.abc.ReceiveStream: | ||||
| 
 | ||||
|     # TODO: add a predicate to maybe_open_context | ||||
|     async with maybe_open_context( | ||||
|         acm_func=open_price_feed, | ||||
|         kwargs={ | ||||
|             'instrument': instrument | ||||
|         }, | ||||
|         key=f'{instrument}-price', | ||||
|     ) as (cache_hit, feed): | ||||
|         if cache_hit: | ||||
|             yield broadcast_receiver(feed, 10) | ||||
|         else: | ||||
|             yield feed | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| async def aio_order_feed_relay( | ||||
|     fh: FeedHandler, | ||||
|     instrument: Symbol, | ||||
|     from_trio: asyncio.Queue, | ||||
|     to_trio: trio.abc.SendChannel, | ||||
| ) -> None: | ||||
|     async def _fill(data: dict, receipt_timestamp): | ||||
|         breakpoint() | ||||
| 
 | ||||
|     async def _order_info(data: dict, receipt_timestamp): | ||||
|         breakpoint() | ||||
| 
 | ||||
|     fh.add_feed( | ||||
|         DERIBIT, | ||||
|         channels=[FILLS, ORDER_INFO], | ||||
|         symbols=[instrument.upper()], | ||||
|         callbacks={ | ||||
|             FILLS: _fill, | ||||
|             ORDER_INFO: _order_info, | ||||
|         }) | ||||
| 
 | ||||
|     if not fh.running: | ||||
|         fh.run( | ||||
|             start_loop=False, | ||||
|             install_signal_handlers=False) | ||||
| 
 | ||||
|     # sync with trio | ||||
|     to_trio.send_nowait(None) | ||||
| 
 | ||||
|     await asyncio.sleep(float('inf')) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_order_feed( | ||||
|     instrument: list[str] | ||||
| ) -> trio.abc.ReceiveStream: | ||||
|     async with maybe_open_feed_handler() as fh: | ||||
|         async with to_asyncio.open_channel_from( | ||||
|             partial( | ||||
|                 aio_order_feed_relay, | ||||
|                 fh, | ||||
|                 instrument | ||||
|             ) | ||||
|         ) as (first, chan): | ||||
|             yield chan | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_order_feed( | ||||
|     instrument: str | ||||
| ) -> trio.abc.ReceiveStream: | ||||
| 
 | ||||
|     # TODO: add a predicate to maybe_open_context | ||||
|     async with maybe_open_context( | ||||
|         acm_func=open_order_feed, | ||||
|         kwargs={ | ||||
|             'instrument': instrument, | ||||
|             'fh': fh | ||||
|         }, | ||||
|         key=f'{instrument}-order', | ||||
|     ) as (cache_hit, feed): | ||||
|         if cache_hit: | ||||
|             yield broadcast_receiver(feed, 10) | ||||
|         else: | ||||
|             yield feed | ||||
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 169 KiB | 
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 106 KiB | 
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 59 KiB | 
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 70 KiB | 
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 132 KiB | 
|  | @ -1,200 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Deribit backend. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from datetime import datetime | ||||
| from typing import Any, Optional, Callable | ||||
| import time | ||||
| 
 | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import pendulum | ||||
| from fuzzywuzzy import process as fuzzy | ||||
| import numpy as np | ||||
| import tractor | ||||
| 
 | ||||
| from piker._cacheables import open_cached_client | ||||
| from piker.log import get_logger, get_console_log | ||||
| from piker.data import ShmArray | ||||
| from piker.brokers._util import ( | ||||
|     BrokerError, | ||||
|     DataUnavailable, | ||||
| ) | ||||
| 
 | ||||
| from cryptofeed import FeedHandler | ||||
| 
 | ||||
| from cryptofeed.defines import ( | ||||
|     DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT | ||||
| ) | ||||
| from cryptofeed.symbols import Symbol | ||||
| 
 | ||||
| from .api import ( | ||||
|     Client, Trade, | ||||
|     get_config, | ||||
|     str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst, | ||||
|     maybe_open_price_feed | ||||
| ) | ||||
| 
 | ||||
| _spawn_kwargs = { | ||||
|     'infect_asyncio': True, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_history_client( | ||||
|     instrument: str, | ||||
| ) -> tuple[Callable, int]: | ||||
| 
 | ||||
|     # TODO implement history getter for the new storage layer. | ||||
|     async with open_cached_client('deribit') as client: | ||||
| 
 | ||||
|         async def get_ohlc( | ||||
|             end_dt: Optional[datetime] = None, | ||||
|             start_dt: Optional[datetime] = None, | ||||
| 
 | ||||
|         ) -> tuple[ | ||||
|             np.ndarray, | ||||
|             datetime,  # start | ||||
|             datetime,  # end | ||||
|         ]: | ||||
| 
 | ||||
|             array = await client.bars( | ||||
|                 instrument, | ||||
|                 start_dt=start_dt, | ||||
|                 end_dt=end_dt, | ||||
|             ) | ||||
|             if len(array) == 0: | ||||
|                 raise DataUnavailable | ||||
| 
 | ||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) | ||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) | ||||
| 
 | ||||
|             return array, start_dt, end_dt | ||||
| 
 | ||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} | ||||
| 
 | ||||
| 
 | ||||
| async def backfill_bars( | ||||
|     symbol: str, | ||||
|     shm: ShmArray,  # type: ignore # noqa | ||||
|     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||
| ) -> None: | ||||
|     """Fill historical bars into shared mem / storage afap. | ||||
|     """ | ||||
|     instrument = symbol | ||||
|     with trio.CancelScope() as cs: | ||||
|         async with open_cached_client('deribit') as client: | ||||
|             bars = await client.bars(instrument) | ||||
|             shm.push(bars) | ||||
|             task_status.started(cs) | ||||
| 
 | ||||
| 
 | ||||
| async def stream_quotes( | ||||
| 
 | ||||
|     send_chan: trio.abc.SendChannel, | ||||
|     symbols: list[str], | ||||
|     feed_is_live: trio.Event, | ||||
|     loglevel: str = None, | ||||
| 
 | ||||
|     # startup sync | ||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging | ||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) | ||||
| 
 | ||||
|     sym = symbols[0] | ||||
| 
 | ||||
|     async with ( | ||||
|         open_cached_client('deribit') as client, | ||||
|         send_chan as send_chan | ||||
|     ): | ||||
| 
 | ||||
|         init_msgs = { | ||||
|             # pass back token, and bool, signalling if we're the writer | ||||
|             # and that history has been written | ||||
|             sym: { | ||||
|                 'symbol_info': { | ||||
|                     'asset_type': 'option', | ||||
|                     'price_tick_size': 0.0005 | ||||
|                 }, | ||||
|                 'shm_write_opts': {'sum_tick_vml': False}, | ||||
|                 'fqsn': sym, | ||||
|             }, | ||||
|         } | ||||
| 
 | ||||
|         nsym = piker_sym_to_cb_sym(sym) | ||||
| 
 | ||||
|         async with maybe_open_price_feed(sym) as stream: | ||||
| 
 | ||||
|             cache = await client.cache_symbols() | ||||
| 
 | ||||
|             last_trades = (await client.last_trades( | ||||
|                 cb_sym_to_deribit_inst(nsym), count=1)).trades | ||||
| 
 | ||||
|             if len(last_trades) == 0: | ||||
|                 last_trade = None | ||||
|                 async for typ, quote in stream: | ||||
|                     if typ == 'trade': | ||||
|                         last_trade = Trade(**(quote['data'])) | ||||
|                         break | ||||
| 
 | ||||
|             else: | ||||
|                 last_trade = Trade(**(last_trades[0])) | ||||
| 
 | ||||
|             first_quote = { | ||||
|                 'symbol': sym, | ||||
|                 'last': last_trade.price, | ||||
|                 'brokerd_ts': last_trade.timestamp, | ||||
|                 'ticks': [{ | ||||
|                     'type': 'trade', | ||||
|                     'price': last_trade.price, | ||||
|                     'size': last_trade.amount, | ||||
|                     'broker_ts': last_trade.timestamp | ||||
|                 }] | ||||
|             } | ||||
|             task_status.started((init_msgs,  first_quote)) | ||||
| 
 | ||||
|             feed_is_live.set() | ||||
| 
 | ||||
|             async for typ, quote in stream: | ||||
|                 topic = quote['symbol'] | ||||
|                 await send_chan.send({topic: quote}) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_symbol_search( | ||||
|     ctx: tractor.Context, | ||||
| ) -> Client: | ||||
|     async with open_cached_client('deribit') as client: | ||||
| 
 | ||||
|         # load all symbols locally for fast search | ||||
|         cache = await client.cache_symbols() | ||||
|         await ctx.started() | ||||
| 
 | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             async for pattern in stream: | ||||
|                 # repack in dict form | ||||
|                 await stream.send( | ||||
|                     await client.search_symbols(pattern)) | ||||
|  | @ -1,134 +0,0 @@ | |||
| ``ib`` backend | ||||
| -------------- | ||||
| more or less the "everything broker" for traditional and international | ||||
| markets. they are the "go to" provider for automatic retail trading | ||||
| and we interface to their APIs using the `ib_insync` project. | ||||
| 
 | ||||
| status | ||||
| ****** | ||||
| current support is *production grade* and both real-time data and order | ||||
| management should be correct and fast. this backend is used by core devs | ||||
| for live trading. | ||||
| 
 | ||||
| currently there is not yet full support for: | ||||
| - options charting and trading | ||||
| - paxos based crypto rt feeds and trading | ||||
| 
 | ||||
| 
 | ||||
| config | ||||
| ****** | ||||
| In order to get order mode support your ``brokers.toml`` | ||||
| needs to have something like the following: | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|    [ib] | ||||
|    hosts = [ | ||||
|     "127.0.0.1", | ||||
|    ] | ||||
|    # TODO: when we eventually spawn gateways in our | ||||
|    # container, we can just dynamically allocate these | ||||
|    # using IBC. | ||||
|    ports = [ | ||||
|        4002, | ||||
|        4003, | ||||
|        4006, | ||||
|        4001, | ||||
|        7497, | ||||
|    ] | ||||
| 
 | ||||
|    # XXX: for a paper account the flex web query service | ||||
|    # is not supported so you have to manually download | ||||
|    # and XML report and put it in a location that can be | ||||
|    # accessed by the ``brokerd.ib`` backend code for parsing. | ||||
|    flex_token = '1111111111111111' | ||||
|    flex_trades_query_id = '6969696'  # live accounts only? | ||||
| 
 | ||||
|    # 3rd party web-api token | ||||
|    # (XXX: not sure if this works yet) | ||||
|    trade_log_token = '111111111111111' | ||||
| 
 | ||||
|    # when clients are being scanned this determines | ||||
|    # which clients are preferred to be used for data feeds | ||||
|    # based on account names which are detected as active | ||||
|    # on each client. | ||||
|    prefer_data_account = [ | ||||
|        # this has to be first in order to make data work with dual paper + live | ||||
|        'main', | ||||
|        'algopaper', | ||||
|    ] | ||||
| 
 | ||||
|    [ib.accounts] | ||||
|    main = 'U69696969' | ||||
|    algopaper = 'DU9696969' | ||||
| 
 | ||||
| 
 | ||||
| If everything works correctly you should see any current positions | ||||
| loaded in the pps pane on chart load and you should also be able to | ||||
| check your trade records in the file:: | ||||
| 
 | ||||
|     <pikerk_conf_dir>/ledgers/trades_ib_algopaper.toml | ||||
| 
 | ||||
| 
 | ||||
| An example ledger file will have entries written verbatim from the | ||||
| trade events schema: | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|     ["0000e1a7.630f5e5a.01.01"] | ||||
|     secType = "FUT" | ||||
|     conId = 515416577 | ||||
|     symbol = "MNQ" | ||||
|     lastTradeDateOrContractMonth = "20221216" | ||||
|     strike = 0.0 | ||||
|     right = "" | ||||
|     multiplier = "2" | ||||
|     exchange = "GLOBEX" | ||||
|     primaryExchange = "" | ||||
|     currency = "USD" | ||||
|     localSymbol = "MNQZ2" | ||||
|     tradingClass = "MNQ" | ||||
|     includeExpired = false | ||||
|     secIdType = "" | ||||
|     secId = "" | ||||
|     comboLegsDescrip = "" | ||||
|     comboLegs = [] | ||||
|     execId = "0000e1a7.630f5e5a.01.01" | ||||
|     time = 1661972086.0 | ||||
|     acctNumber = "DU69696969" | ||||
|     side = "BOT" | ||||
|     shares = 1.0 | ||||
|     price = 12372.75 | ||||
|     permId = 441472655 | ||||
|     clientId = 6116 | ||||
|     orderId = 985 | ||||
|     liquidation = 0 | ||||
|     cumQty = 1.0 | ||||
|     avgPrice = 12372.75 | ||||
|     orderRef = "" | ||||
|     evRule = "" | ||||
|     evMultiplier = 0.0 | ||||
|     modelCode = "" | ||||
|     lastLiquidity = 1 | ||||
|     broker_time = 1661972086.0 | ||||
|     name = "ib" | ||||
|     commission = 0.57 | ||||
|     realizedPNL = 243.41 | ||||
|     yield_ = 0.0 | ||||
|     yieldRedemptionDate = 0 | ||||
|     listingExchange = "GLOBEX" | ||||
|     date = "2022-08-31T18:54:46+00:00" | ||||
| 
 | ||||
| 
 | ||||
| your ``pps.toml`` file will have position entries like, | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|     [ib.algopaper."mnq.globex.20221216"] | ||||
|     size = -1.0 | ||||
|     ppu = 12423.630576923071 | ||||
|     bsuid = 515416577 | ||||
|     expiry = "2022-12-16T00:00:00+00:00" | ||||
|     clears = [ | ||||
|      { dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" }, | ||||
|     ] | ||||
|  | @ -20,10 +20,15 @@ Interactive Brokers API backend. | |||
| Sub-modules within break into the core functionalities: | ||||
| 
 | ||||
| - ``broker.py`` part for orders / trading endpoints | ||||
| - ``feed.py`` for real-time data feed endpoints | ||||
| - ``api.py`` for the core API machinery which is ``trio``-ized | ||||
| - ``data.py`` for real-time data feed endpoints | ||||
| 
 | ||||
| - ``client.py`` for the core API machinery which is ``trio``-ized | ||||
|   wrapping around ``ib_insync``. | ||||
| 
 | ||||
| - ``report.py`` for the hackery to build manual pp calcs | ||||
|   to avoid ib's absolute bullshit FIFO style position | ||||
|   tracking.. | ||||
| 
 | ||||
| """ | ||||
| from .api import ( | ||||
|     get_client, | ||||
|  | @ -33,10 +38,7 @@ from .feed import ( | |||
|     open_symbol_search, | ||||
|     stream_quotes, | ||||
| ) | ||||
| from .broker import ( | ||||
|     trades_dialogue, | ||||
|     norm_trade_records, | ||||
| ) | ||||
| from .broker import trades_dialogue | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'get_client', | ||||
|  |  | |||
|  | @ -29,7 +29,6 @@ import itertools | |||
| from math import isnan | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Union, | ||||
| ) | ||||
| import asyncio | ||||
|  | @ -39,28 +38,16 @@ import time | |||
| from types import SimpleNamespace | ||||
| 
 | ||||
| 
 | ||||
| from bidict import bidict | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import to_asyncio | ||||
| import ib_insync as ibis | ||||
| from ib_insync.contract import ( | ||||
|     Contract, | ||||
|     ContractDetails, | ||||
|     Option, | ||||
| ) | ||||
| from ib_insync.wrapper import RequestError | ||||
| from ib_insync.contract import Contract, ContractDetails | ||||
| from ib_insync.order import Order | ||||
| from ib_insync.ticker import Ticker | ||||
| from ib_insync.objects import ( | ||||
|     Position, | ||||
|     Fill, | ||||
|     Execution, | ||||
|     CommissionReport, | ||||
| ) | ||||
| from ib_insync.wrapper import ( | ||||
|     Wrapper, | ||||
|     RequestError, | ||||
| ) | ||||
| from ib_insync.objects import Position | ||||
| import ib_insync as ibis | ||||
| from ib_insync.wrapper import Wrapper | ||||
| from ib_insync.client import Client as ib_Client | ||||
| import numpy as np | ||||
| 
 | ||||
|  | @ -168,93 +155,60 @@ class NonShittyIB(ibis.IB): | |||
|         self.client.apiEnd += self.disconnectedEvent | ||||
| 
 | ||||
| 
 | ||||
| _futes_venues = ( | ||||
|     'GLOBEX', | ||||
|     'NYMEX', | ||||
|     'CME', | ||||
|     'CMECRYPTO', | ||||
|     'COMEX', | ||||
|     'CMDTY',  # special name case.. | ||||
| ) | ||||
| 
 | ||||
| _adhoc_futes_set = { | ||||
| 
 | ||||
|     # equities | ||||
|     'nq.globex', | ||||
|     'mnq.globex',  # micro | ||||
| 
 | ||||
|     'es.globex', | ||||
|     'mes.globex',  # micro | ||||
| 
 | ||||
|     # cypto$ | ||||
|     'brr.cmecrypto', | ||||
|     'ethusdrr.cmecrypto', | ||||
| 
 | ||||
|     # agriculture | ||||
|     'he.nymex',  # lean hogs | ||||
|     'le.nymex',  # live cattle (geezers) | ||||
|     'gf.nymex',  # feeder cattle (younguns) | ||||
| 
 | ||||
|     # raw | ||||
|     'lb.nymex',  # random len lumber | ||||
| 
 | ||||
|     # metals | ||||
|     'xauusd.cmdty',  # gold spot | ||||
|     'gc.nymex', | ||||
|     'mgc.nymex',  # micro | ||||
| 
 | ||||
|     # oil & gas | ||||
|     'cl.nymex', | ||||
| 
 | ||||
|     'xagusd.cmdty',  # silver spot | ||||
|     'ni.nymex',  # silver futes | ||||
|     'qi.comex',  # mini-silver futes | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| # taken from list here: | ||||
| # https://www.interactivebrokers.com/en/trading/products-spot-currencies.php | ||||
| _adhoc_fiat_set = set(( | ||||
|     'USD, AED, AUD, CAD,' | ||||
|     'CHF, CNH, CZK, DKK,' | ||||
|     'EUR, GBP, HKD, HUF,' | ||||
|     'ILS, JPY, MXN, NOK,' | ||||
|     'NZD, PLN, RUB, SAR,' | ||||
|     'SEK, SGD, TRY, ZAR' | ||||
|     ).split(' ,') | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # map of symbols to contract ids | ||||
| _adhoc_symbol_map = { | ||||
| _adhoc_cmdty_data_map = { | ||||
|     # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 | ||||
| 
 | ||||
|     # NOTE: some cmdtys/metals don't have trade data like gold/usd: | ||||
|     # https://groups.io/g/twsapi/message/44174 | ||||
|     'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}), | ||||
| } | ||||
| for qsn in _adhoc_futes_set: | ||||
|     sym, venue = qsn.split('.') | ||||
|     assert venue.upper() in _futes_venues, f'{venue}' | ||||
|     _adhoc_symbol_map[sym.upper()] = ( | ||||
|         {'exchange': venue}, | ||||
|         {}, | ||||
|     ) | ||||
| 
 | ||||
| _futes_venues = ( | ||||
|     'GLOBEX', | ||||
|     'NYMEX', | ||||
|     'CME', | ||||
|     'CMECRYPTO', | ||||
| ) | ||||
| 
 | ||||
| _adhoc_futes_set = { | ||||
| 
 | ||||
|     # equities | ||||
|     'nq.globex', | ||||
|     'mnq.globex', | ||||
| 
 | ||||
|     'es.globex', | ||||
|     'mes.globex', | ||||
| 
 | ||||
|     # cypto$ | ||||
|     'brr.cmecrypto', | ||||
|     'ethusdrr.cmecrypto', | ||||
| 
 | ||||
|     # agriculture | ||||
|     'he.globex',  # lean hogs | ||||
|     'le.globex',  # live cattle (geezers) | ||||
|     'gf.globex',  # feeder cattle (younguns) | ||||
| 
 | ||||
|     # raw | ||||
|     'lb.globex',  # random len lumber | ||||
| 
 | ||||
|     # metals | ||||
|     'xauusd.cmdty',  # gold spot | ||||
|     'gc.nymex', | ||||
|     'mgc.nymex', | ||||
| 
 | ||||
|     'xagusd.cmdty',  # silver spot | ||||
|     'ni.nymex',  # silver futes | ||||
|     'qi.comex',  # mini-silver futes | ||||
| } | ||||
| 
 | ||||
| # exchanges we don't support at the moment due to not knowing | ||||
| # how to do symbol-contract lookup correctly likely due | ||||
| # to not having the data feeds subscribed. | ||||
| _exch_skip_list = { | ||||
| 
 | ||||
|     'ASX',  # aussie stocks | ||||
|     'MEXI',  # mexican stocks | ||||
| 
 | ||||
|     # no idea | ||||
|     'VALUE', | ||||
|     'FUNDSERV', | ||||
|     'SWB2', | ||||
|     'PSE', | ||||
|     'VALUE',  # no idea | ||||
| } | ||||
| 
 | ||||
| # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 | ||||
|  | @ -307,29 +261,27 @@ class Client: | |||
| 
 | ||||
|         # NOTE: the ib.client here is "throttled" to 45 rps by default | ||||
| 
 | ||||
|     async def trades(self) -> dict[str, Any]: | ||||
|         ''' | ||||
|         Return list of trade-fills from current session in ``dict``. | ||||
|     async def trades( | ||||
|         self, | ||||
|         # api_only: bool = False, | ||||
| 
 | ||||
|         ''' | ||||
|         fills: list[Fill] = self.ib.fills() | ||||
|         norm_fills: list[dict] = [] | ||||
|     ) -> dict[str, Any]: | ||||
| 
 | ||||
|         # orders = await self.ib.reqCompletedOrdersAsync( | ||||
|         #     apiOnly=api_only | ||||
|         # ) | ||||
|         fills = await self.ib.reqExecutionsAsync() | ||||
|         norm_fills = [] | ||||
|         for fill in fills: | ||||
|             fill = fill._asdict()  # namedtuple | ||||
|             for key, val in fill.items(): | ||||
|                 match val: | ||||
|                     case Contract() | Execution() | CommissionReport(): | ||||
|             for key, val in fill.copy().items(): | ||||
|                 if isinstance(val, Contract): | ||||
|                     fill[key] = asdict(val) | ||||
| 
 | ||||
|             norm_fills.append(fill) | ||||
| 
 | ||||
|         return norm_fills | ||||
| 
 | ||||
|     async def orders(self) -> list[Order]: | ||||
|         return await self.ib.reqAllOpenOrdersAsync( | ||||
|             apiOnly=False, | ||||
|         ) | ||||
| 
 | ||||
|     async def bars( | ||||
|         self, | ||||
|         fqsn: str, | ||||
|  | @ -357,7 +309,7 @@ class Client: | |||
| 
 | ||||
|         _enters += 1 | ||||
| 
 | ||||
|         contract = (await self.find_contracts(fqsn))[0] | ||||
|         contract = await self.find_contract(fqsn) | ||||
|         bars_kwargs.update(getattr(contract, 'bars_kwargs', {})) | ||||
| 
 | ||||
|         # _min = min(2000*100, count) | ||||
|  | @ -412,15 +364,7 @@ class Client: | |||
|                 futs.append(self.ib.reqContractDetailsAsync(con)) | ||||
| 
 | ||||
|         # batch request all details | ||||
|         try: | ||||
|         results = await asyncio.gather(*futs) | ||||
|         except RequestError as err: | ||||
|             msg = err.message | ||||
|             if ( | ||||
|                 'No security definition' in msg | ||||
|             ): | ||||
|                 log.warning(f'{msg}: {contracts}') | ||||
|                 return {} | ||||
| 
 | ||||
|         # one set per future result | ||||
|         details = {} | ||||
|  | @ -429,11 +373,20 @@ class Client: | |||
|             # XXX: if there is more then one entry in the details list | ||||
|             # then the contract is so called "ambiguous". | ||||
|             for d in details_set: | ||||
|                 con = d.contract | ||||
| 
 | ||||
|                 # nested dataclass we probably don't need and that won't | ||||
|                 # IPC serialize.. | ||||
|                 key = '.'.join([ | ||||
|                     con.symbol, | ||||
|                     con.primaryExchange or con.exchange, | ||||
|                 ]) | ||||
|                 expiry = con.lastTradeDateOrContractMonth | ||||
|                 if expiry: | ||||
|                     key += f'.{expiry}' | ||||
| 
 | ||||
|                 # nested dataclass we probably don't need and that | ||||
|                 # won't IPC serialize.. | ||||
|                 d.secIdList = '' | ||||
|                 key, calc_price = con2fqsn(d.contract) | ||||
| 
 | ||||
|                 details[key] = d | ||||
| 
 | ||||
|         return details | ||||
|  | @ -463,7 +416,7 @@ class Client: | |||
|         self, | ||||
|         pattern: str, | ||||
|         # how many contracts to search "up to" | ||||
|         upto: int = 6, | ||||
|         upto: int = 3, | ||||
|         asdicts: bool = True, | ||||
| 
 | ||||
|     ) -> dict[str, ContractDetails]: | ||||
|  | @ -474,6 +427,7 @@ class Client: | |||
|             pattern, | ||||
|             upto=upto, | ||||
|         ) | ||||
| 
 | ||||
|         for key, deats in results.copy().items(): | ||||
| 
 | ||||
|             tract = deats.contract | ||||
|  | @ -483,44 +437,21 @@ class Client: | |||
|             if sectype == 'IND': | ||||
|                 results[f'{sym}.IND'] = tract | ||||
|                 results.pop(key) | ||||
|                 # exch = tract.exchange | ||||
| 
 | ||||
|                 # XXX: add back one of these to get the weird deadlock | ||||
|                 # on the debugger from root without the latest | ||||
|                 # maybe_wait_for_debugger() fix in the `open_context()` | ||||
|                 # exit. | ||||
|                 # assert 0 | ||||
|                 # if con.exchange not in _exch_skip_list: | ||||
| 
 | ||||
|                 exch = tract.exchange | ||||
|                 if exch not in _exch_skip_list: | ||||
| 
 | ||||
|                 if exch in _futes_venues: | ||||
|                     # try get all possible contracts for symbol as per, | ||||
|                     # https://interactivebrokers.github.io/tws-api/basic_contracts.html#fut | ||||
|                     con = ibis.Future( | ||||
|                         symbol=sym, | ||||
|                         exchange=exch, | ||||
|                     ) | ||||
|                     # TODO: make this work, think it's something to do | ||||
|                     # with the qualify flag. | ||||
|                     # cons = await self.find_contracts( | ||||
|                     #     contract=con, | ||||
|                     #     err_on_qualify=False, | ||||
|                     # ) | ||||
|                     # if cons: | ||||
|                     try: | ||||
|                         all_deats = await self.con_deats([con]) | ||||
|                         results |= all_deats | ||||
| 
 | ||||
|             # forex pairs | ||||
|             elif sectype == 'CASH': | ||||
|                 dst, src = tract.localSymbol.split('.') | ||||
|                 pair_key = "/".join([dst, src]) | ||||
|                 exch = tract.exchange.lower() | ||||
|                 results[f'{pair_key}.{exch}'] = tract | ||||
|                 results.pop(key) | ||||
| 
 | ||||
|                 # XXX: again seems to trigger the weird tractor | ||||
|                 # bug with the debugger.. | ||||
|                 # assert 0 | ||||
|                     except RequestError as err: | ||||
|                         log.warning(err.message) | ||||
| 
 | ||||
|         return results | ||||
| 
 | ||||
|  | @ -552,19 +483,13 @@ class Client: | |||
| 
 | ||||
|         return con | ||||
| 
 | ||||
|     async def get_con( | ||||
|         self, | ||||
|         conid: int, | ||||
|     ) -> Contract: | ||||
|         return await self.ib.qualifyContractsAsync( | ||||
|             ibis.Contract(conId=conid) | ||||
|         ) | ||||
| 
 | ||||
|     def parse_patt2fqsn( | ||||
|     async def find_contract( | ||||
|         self, | ||||
|         pattern: str, | ||||
|         currency: str = 'USD', | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> tuple[str, str, str, str]: | ||||
|     ) -> Contract: | ||||
| 
 | ||||
|         # TODO: we can't use this currently because | ||||
|         # ``wrapper.starTicker()`` currently cashes ticker instances | ||||
|  | @ -577,30 +502,12 @@ class Client: | |||
|         # XXX UPDATE: we can probably do the tick/trades scraping | ||||
|         # inside our eventkit handler instead to bypass this entirely? | ||||
| 
 | ||||
|         currency = '' | ||||
| 
 | ||||
|         # fqsn parsing stage | ||||
|         # ------------------ | ||||
|         if '.ib' in pattern: | ||||
|             from ..data._source import unpack_fqsn | ||||
|             _, symbol, expiry = unpack_fqsn(pattern) | ||||
| 
 | ||||
|             broker, symbol, expiry = unpack_fqsn(pattern) | ||||
|         else: | ||||
|             symbol = pattern | ||||
|             expiry = '' | ||||
| 
 | ||||
|         # another hack for forex pairs lul. | ||||
|         if ( | ||||
|             '.idealpro' in symbol | ||||
|             # or '/' in symbol | ||||
|         ): | ||||
|             exch = 'IDEALPRO' | ||||
|             symbol = symbol.removesuffix('.idealpro') | ||||
|             if '/' in symbol: | ||||
|                 symbol, currency = symbol.split('/') | ||||
| 
 | ||||
|         else: | ||||
|             # TODO: yes, a cache.. | ||||
|         # try: | ||||
|         #     # give the cache a go | ||||
|         #     return self._contracts[symbol] | ||||
|  | @ -611,80 +518,45 @@ class Client: | |||
|             symbol, _, expiry = symbol.rpartition('.') | ||||
| 
 | ||||
|         # use heuristics to figure out contract "type" | ||||
|             symbol, exch = symbol.upper().rsplit('.', maxsplit=1) | ||||
|         sym, exch = symbol.upper().rsplit('.', maxsplit=1) | ||||
| 
 | ||||
|         return symbol, currency, exch, expiry | ||||
| 
 | ||||
|     async def find_contracts( | ||||
|         self, | ||||
|         pattern: Optional[str] = None, | ||||
|         contract: Optional[Contract] = None, | ||||
|         qualify: bool = True, | ||||
|         err_on_qualify: bool = True, | ||||
| 
 | ||||
|     ) -> Contract: | ||||
| 
 | ||||
|         if pattern is not None: | ||||
|             symbol, currency, exch, expiry = self.parse_patt2fqsn( | ||||
|                 pattern, | ||||
|             ) | ||||
|             sectype = '' | ||||
| 
 | ||||
|         else: | ||||
|             assert contract | ||||
|             symbol = contract.symbol | ||||
|             sectype = contract.secType | ||||
|             exch = contract.exchange or contract.primaryExchange | ||||
|             expiry = contract.lastTradeDateOrContractMonth | ||||
|             currency = contract.currency | ||||
| 
 | ||||
|         # contract searching stage | ||||
|         # ------------------------ | ||||
|         qualify: bool = True | ||||
| 
 | ||||
|         # futes | ||||
|         if exch in _futes_venues: | ||||
|             if expiry: | ||||
|                 # get the "front" contract | ||||
|                 con = await self.get_fute( | ||||
|                     symbol=symbol, | ||||
|                 contract = await self.get_fute( | ||||
|                     symbol=sym, | ||||
|                     exchange=exch, | ||||
|                     expiry=expiry, | ||||
|                 ) | ||||
| 
 | ||||
|             else: | ||||
|                 # get the "front" contract | ||||
|                 con = await self.get_fute( | ||||
|                     symbol=symbol, | ||||
|                 contract = await self.get_fute( | ||||
|                     symbol=sym, | ||||
|                     exchange=exch, | ||||
|                     front=True, | ||||
|                 ) | ||||
| 
 | ||||
|         elif ( | ||||
|             exch in ('IDEALPRO') | ||||
|             or sectype == 'CASH' | ||||
|         ): | ||||
|             # if '/' in symbol: | ||||
|             #     currency = '' | ||||
|             #     symbol, currency = symbol.split('/') | ||||
|             qualify = False | ||||
| 
 | ||||
|         elif exch in ('FOREX'): | ||||
|             currency = '' | ||||
|             symbol, currency = sym.split('/') | ||||
|             con = ibis.Forex( | ||||
|                 pair=''.join((symbol, currency)), | ||||
|                 symbol=symbol, | ||||
|                 currency=currency, | ||||
|             ) | ||||
|             con.bars_kwargs = {'whatToShow': 'MIDPOINT'} | ||||
| 
 | ||||
|         # commodities | ||||
|         elif exch == 'CMDTY':  # eg. XAUUSD.CMDTY | ||||
|             con_kwargs, bars_kwargs = _adhoc_symbol_map[symbol] | ||||
|             con_kwargs, bars_kwargs = _adhoc_cmdty_data_map[sym] | ||||
|             con = ibis.Commodity(**con_kwargs) | ||||
|             con.bars_kwargs = bars_kwargs | ||||
| 
 | ||||
|         # crypto$ | ||||
|         elif exch == 'PAXOS':  # btc.paxos | ||||
|             con = ibis.Crypto( | ||||
|                 symbol=symbol, | ||||
|                 currency=currency, | ||||
|             ) | ||||
| 
 | ||||
|         # stonks | ||||
|         else: | ||||
|             # TODO: metadata system for all these exchange rules.. | ||||
|  | @ -697,50 +569,33 @@ class Client: | |||
|                 exch = 'SMART' | ||||
| 
 | ||||
|             else: | ||||
|                 # XXX: order is super important here since | ||||
|                 # a primary == 'SMART' won't ever work. | ||||
|                 primaryExchange = exch | ||||
|                 exch = 'SMART' | ||||
|                 primaryExchange = exch | ||||
| 
 | ||||
|             con = ibis.Stock( | ||||
|                 symbol=symbol, | ||||
|                 symbol=sym, | ||||
|                 exchange=exch, | ||||
|                 primaryExchange=primaryExchange, | ||||
|                 currency=currency, | ||||
|             ) | ||||
|             exch = 'SMART' if not exch else exch | ||||
| 
 | ||||
|         contracts = [con] | ||||
|         if qualify: | ||||
|         try: | ||||
|                 contracts = await self.ib.qualifyContractsAsync(con) | ||||
|             except RequestError as err: | ||||
|                 msg = err.message | ||||
|                 if ( | ||||
|                     'No security definition' in msg | ||||
|                     and not err_on_qualify | ||||
|                 ): | ||||
|                     log.warning( | ||||
|                         f'Could not find def for {con}') | ||||
|                     return None | ||||
| 
 | ||||
|             exch = 'SMART' if not exch else exch | ||||
|             if qualify: | ||||
|                 contract = (await self.ib.qualifyContractsAsync(con))[0] | ||||
|             else: | ||||
|                     raise | ||||
|             if not contracts: | ||||
|                 assert contract | ||||
| 
 | ||||
|         except IndexError: | ||||
|             raise ValueError(f"No contract could be found {con}") | ||||
| 
 | ||||
|         # pack all contracts into cache | ||||
|         for tract in contracts: | ||||
|             exch: str = tract.primaryExchange or tract.exchange or exch | ||||
|             pattern = f'{symbol}.{exch}' | ||||
|             expiry = tract.lastTradeDateOrContractMonth | ||||
|             # add an entry with expiry suffix if available | ||||
|             if expiry: | ||||
|                 pattern += f'.{expiry}' | ||||
|         self._contracts[pattern] = contract | ||||
| 
 | ||||
|             self._contracts[pattern.lower()] = tract | ||||
|         # add an aditional entry with expiry suffix if available | ||||
|         conexp = contract.lastTradeDateOrContractMonth | ||||
|         if conexp: | ||||
|             self._contracts[pattern + f'.{conexp}'] = contract | ||||
| 
 | ||||
|         return contracts | ||||
|         return contract | ||||
| 
 | ||||
|     async def get_head_time( | ||||
|         self, | ||||
|  | @ -759,10 +614,9 @@ class Client: | |||
|     async def get_sym_details( | ||||
|         self, | ||||
|         symbol: str, | ||||
| 
 | ||||
|     ) -> tuple[Contract, Ticker, ContractDetails]: | ||||
| 
 | ||||
|         contract = (await self.find_contracts(symbol))[0] | ||||
|         contract = await self.find_contract(symbol) | ||||
|         ticker: Ticker = self.ib.reqMktData( | ||||
|             contract, | ||||
|             snapshot=True, | ||||
|  | @ -950,73 +804,6 @@ class Client: | |||
|         return self.ib.positions(account=account) | ||||
| 
 | ||||
| 
 | ||||
| def con2fqsn( | ||||
|     con: Contract, | ||||
|     _cache: dict[int, (str, bool)] = {} | ||||
| 
 | ||||
| ) -> tuple[str, bool]: | ||||
|     ''' | ||||
|     Convert contracts to fqsn-style strings to be used both in symbol-search | ||||
|     matching and as feed tokens passed to the front end data deed layer. | ||||
| 
 | ||||
|     Previously seen contracts are cached by id. | ||||
| 
 | ||||
|     ''' | ||||
|     # should be real volume for this contract by default | ||||
|     calc_price = False | ||||
|     if con.conId: | ||||
|         try: | ||||
|             return _cache[con.conId] | ||||
|         except KeyError: | ||||
|             pass | ||||
| 
 | ||||
|     suffix = con.primaryExchange or con.exchange | ||||
|     symbol = con.symbol | ||||
|     expiry = con.lastTradeDateOrContractMonth or '' | ||||
| 
 | ||||
|     match con: | ||||
|         case Option(): | ||||
|             # TODO: option symbol parsing and sane display: | ||||
|             symbol = con.localSymbol.replace(' ', '') | ||||
| 
 | ||||
|         case ibis.Commodity(): | ||||
|             # commodities and forex don't have an exchange name and | ||||
|             # no real volume so we have to calculate the price | ||||
|             suffix = con.secType | ||||
| 
 | ||||
|             # no real volume on this tract | ||||
|             calc_price = True | ||||
| 
 | ||||
|         case ibis.Forex() | ibis.Contract(secType='CASH'): | ||||
|             dst, src = con.localSymbol.split('.') | ||||
|             symbol = ''.join([dst, src]) | ||||
|             suffix = con.exchange | ||||
| 
 | ||||
|             # no real volume on forex feeds.. | ||||
|             calc_price = True | ||||
| 
 | ||||
|     if not suffix: | ||||
|         entry = _adhoc_symbol_map.get( | ||||
|             con.symbol or con.localSymbol | ||||
|         ) | ||||
|         if entry: | ||||
|             meta, kwargs = entry | ||||
|             cid = meta.get('conId') | ||||
|             if cid: | ||||
|                 assert con.conId == meta['conId'] | ||||
|             suffix = meta['exchange'] | ||||
| 
 | ||||
|     # append a `.<suffix>` to the returned symbol | ||||
|     # key for derivatives that normally is the expiry | ||||
|     # date key. | ||||
|     if expiry: | ||||
|         suffix += f'.{expiry}' | ||||
| 
 | ||||
|     fqsn_key = '.'.join((symbol, suffix)).lower() | ||||
|     _cache[con.conId] = fqsn_key, calc_price | ||||
|     return fqsn_key, calc_price | ||||
| 
 | ||||
| 
 | ||||
| # per-actor API ep caching | ||||
| _client_cache: dict[tuple[str, int], Client] = {} | ||||
| _scan_ignore: set[tuple[str, int]] = set() | ||||
|  | @ -1024,23 +811,10 @@ _scan_ignore: set[tuple[str, int]] = set() | |||
| 
 | ||||
| def get_config() -> dict[str, Any]: | ||||
| 
 | ||||
|     conf, path = config.load('brokers') | ||||
|     conf, path = config.load() | ||||
| 
 | ||||
|     section = conf.get('ib') | ||||
| 
 | ||||
|     accounts = section.get('accounts') | ||||
|     if not accounts: | ||||
|         raise ValueError( | ||||
|             'brokers.toml -> `ib.accounts` must be defined\n' | ||||
|             f'location: {path}' | ||||
|         ) | ||||
| 
 | ||||
|     names = list(accounts.keys()) | ||||
|     accts = section['accounts'] = bidict(accounts) | ||||
|     log.info( | ||||
|         f'brokers.toml defines {len(accts)} accounts: ' | ||||
|         f'{pformat(names)}' | ||||
|     ) | ||||
| 
 | ||||
|     if section is None: | ||||
|         log.warning(f'No config section found for ib in {path}') | ||||
|         return {} | ||||
|  | @ -1134,12 +908,6 @@ async def load_aio_clients( | |||
|                     # careful. | ||||
|                     timeout=connect_timeout, | ||||
|                 ) | ||||
|                 # create and cache client | ||||
|                 client = Client(ib) | ||||
| 
 | ||||
|                 # update all actor-global caches | ||||
|                 log.info(f"Caching client for {sockaddr}") | ||||
|                 _client_cache[sockaddr] = client | ||||
|                 break | ||||
| 
 | ||||
|             except ( | ||||
|  | @ -1163,9 +931,21 @@ async def load_aio_clients( | |||
|                 log.warning( | ||||
|                     f'Failed to connect on {port} for {i} time, retrying...') | ||||
| 
 | ||||
|         # create and cache client | ||||
|         client = Client(ib) | ||||
| 
 | ||||
|         # Pre-collect all accounts available for this | ||||
|         # connection and map account names to this client | ||||
|         # instance. | ||||
|         pps = ib.positions() | ||||
|         if pps: | ||||
|             for pp in pps: | ||||
|                 accounts_found[ | ||||
|                     accounts_def.inverse[pp.account] | ||||
|                 ] = client | ||||
| 
 | ||||
|         # if there are accounts without positions we should still | ||||
|         # register them for this client | ||||
|         for value in ib.accountValues(): | ||||
|             acct_number = value.account | ||||
| 
 | ||||
|  | @ -1186,6 +966,10 @@ async def load_aio_clients( | |||
|             f'{pformat(accounts_found)}' | ||||
|         ) | ||||
| 
 | ||||
|         # update all actor-global caches | ||||
|         log.info(f"Caching client for {sockaddr}") | ||||
|         _client_cache[sockaddr] = client | ||||
| 
 | ||||
|         # XXX: why aren't we just updating this directy above | ||||
|         # instead of using the intermediary `accounts_found`? | ||||
|         _accounts2clients.update(accounts_found) | ||||
|  | @ -1206,7 +990,7 @@ async def load_aio_clients( | |||
|         for acct, client in _accounts2clients.items(): | ||||
|             log.info(f'Disconnecting {acct}@{client}') | ||||
|             client.ib.disconnect() | ||||
|             _client_cache.pop((host, port), None) | ||||
|             _client_cache.pop((host, port)) | ||||
| 
 | ||||
| 
 | ||||
| async def load_clients_for_trio( | ||||
|  | @ -1235,6 +1019,9 @@ async def load_clients_for_trio( | |||
|             await asyncio.sleep(float('inf')) | ||||
| 
 | ||||
| 
 | ||||
| _proxies: dict[str, MethodProxy] = {} | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_client_proxies() -> tuple[ | ||||
|     dict[str, MethodProxy], | ||||
|  | @ -1242,6 +1029,7 @@ async def open_client_proxies() -> tuple[ | |||
| ]: | ||||
|     async with ( | ||||
|         tractor.trionics.maybe_open_context( | ||||
|             # acm_func=open_client_proxies, | ||||
|             acm_func=tractor.to_asyncio.open_channel_from, | ||||
|             kwargs={'target': load_clients_for_trio}, | ||||
| 
 | ||||
|  | @ -1256,14 +1044,13 @@ async def open_client_proxies() -> tuple[ | |||
|         if cache_hit: | ||||
|             log.info(f'Re-using cached clients: {clients}') | ||||
| 
 | ||||
|         proxies = {} | ||||
|         for acct_name, client in clients.items(): | ||||
|             proxy = await stack.enter_async_context( | ||||
|                 open_client_proxy(client), | ||||
|             ) | ||||
|             proxies[acct_name] = proxy | ||||
|             _proxies[acct_name] = proxy | ||||
| 
 | ||||
|         yield proxies, clients | ||||
|         yield _proxies, clients | ||||
| 
 | ||||
| 
 | ||||
| def get_preferred_data_client( | ||||
|  | @ -1412,13 +1199,11 @@ async def open_client_proxy( | |||
|     event_table = {} | ||||
| 
 | ||||
|     async with ( | ||||
| 
 | ||||
|         to_asyncio.open_channel_from( | ||||
|             open_aio_client_method_relay, | ||||
|             client=client, | ||||
|             event_consumers=event_table, | ||||
|         ) as (first, chan), | ||||
| 
 | ||||
|         trio.open_nursery() as relay_n, | ||||
|     ): | ||||
| 
 | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -41,8 +41,7 @@ from trio_typing import TaskStatus | |||
| from piker.data._sharedmem import ShmArray | ||||
| from .._util import SymbolNotFound, NoData | ||||
| from .api import ( | ||||
|     # _adhoc_futes_set, | ||||
|     con2fqsn, | ||||
|     _adhoc_futes_set, | ||||
|     log, | ||||
|     load_aio_clients, | ||||
|     ibis, | ||||
|  | @ -208,6 +207,8 @@ async def get_bars( | |||
| 
 | ||||
|         except RequestError as err: | ||||
|             msg = err.message | ||||
|             # why do we always need to rebind this? | ||||
|             # _err = err | ||||
| 
 | ||||
|             if 'No market data permissions for' in msg: | ||||
|                 # TODO: signalling for no permissions searches | ||||
|  | @ -216,8 +217,8 @@ async def get_bars( | |||
|                 ) | ||||
| 
 | ||||
|             elif ( | ||||
|                 err.code == 162 and | ||||
|                 'HMDS query returned no data' in err.message | ||||
|                 err.code == 162 | ||||
|                 and 'HMDS query returned no data' in err.message | ||||
|             ): | ||||
|                 # XXX: this is now done in the storage mgmt layer | ||||
|                 # and we shouldn't implicitly decrement the frame dt | ||||
|  | @ -236,14 +237,6 @@ async def get_bars( | |||
|                     frame_size=2000, | ||||
|                 ) | ||||
| 
 | ||||
|             # elif ( | ||||
|             #     err.code == 162 and | ||||
|             #     'Trading TWS session is connected from a different IP | ||||
|             #     address' in err.message | ||||
|             # ): | ||||
|             #     log.warning("ignoring ip address warning") | ||||
|             #     continue | ||||
| 
 | ||||
|             elif _pacing in msg: | ||||
| 
 | ||||
|                 log.warning( | ||||
|  | @ -301,13 +294,7 @@ async def get_bars( | |||
|                 else: | ||||
| 
 | ||||
|                     log.warning('Sending CONNECTION RESET') | ||||
|                     res = await data_reset_hack(reset_type='connection') | ||||
|                     if not res: | ||||
|                         log.warning( | ||||
|                             'NO VNC DETECTED!\n' | ||||
|                             'Manually press ctrl-alt-f on your IB java app' | ||||
|                         ) | ||||
|                         # break | ||||
|                     await data_reset_hack(reset_type='connection') | ||||
| 
 | ||||
|                     with trio.move_on_after(timeout) as cs: | ||||
|                         for name, ev in [ | ||||
|  | @ -426,7 +413,6 @@ asset_type_map = { | |||
|     'WAR': 'warrant', | ||||
|     'IOPT': 'warran', | ||||
|     'BAG': 'bag', | ||||
|     'CRYPTO': 'crypto',  # bc it's diff then fiat? | ||||
|     # 'NEWS': 'news', | ||||
| } | ||||
| 
 | ||||
|  | @ -567,17 +553,38 @@ async def open_aio_quote_stream( | |||
| 
 | ||||
| 
 | ||||
| # TODO: cython/mypyc/numba this! | ||||
| # or we can at least cache a majority of the values | ||||
| # except for the ones we expect to change?.. | ||||
| def normalize( | ||||
|     ticker: Ticker, | ||||
|     calc_price: bool = False | ||||
| 
 | ||||
| ) -> dict: | ||||
| 
 | ||||
|     # should be real volume for this contract by default | ||||
|     calc_price = False | ||||
| 
 | ||||
|     # check for special contract types | ||||
|     con = ticker.contract | ||||
|     fqsn, calc_price = con2fqsn(con) | ||||
|     if type(con) in ( | ||||
|         ibis.Commodity, | ||||
|         ibis.Forex, | ||||
|     ): | ||||
|         # commodities and forex don't have an exchange name and | ||||
|         # no real volume so we have to calculate the price | ||||
|         suffix = con.secType | ||||
|         # no real volume on this tract | ||||
|         calc_price = True | ||||
| 
 | ||||
|     else: | ||||
|         suffix = con.primaryExchange | ||||
|         if not suffix: | ||||
|             suffix = con.exchange | ||||
| 
 | ||||
|         # append a `.<suffix>` to the returned symbol | ||||
|         # key for derivatives that normally is the expiry | ||||
|         # date key. | ||||
|         expiry = con.lastTradeDateOrContractMonth | ||||
|         if expiry: | ||||
|             suffix += f'.{expiry}' | ||||
| 
 | ||||
|     # convert named tuples to dicts so we send usable keys | ||||
|     new_ticks = [] | ||||
|  | @ -609,7 +616,9 @@ def normalize( | |||
| 
 | ||||
|     # generate fqsn with possible specialized suffix | ||||
|     # for derivatives, note the lowercase. | ||||
|     data['symbol'] = data['fqsn'] = fqsn | ||||
|     data['symbol'] = data['fqsn'] = '.'.join( | ||||
|         (con.symbol, suffix) | ||||
|     ).lower() | ||||
| 
 | ||||
|     # convert named tuples to dicts for transport | ||||
|     tbts = data.get('tickByTicks') | ||||
|  | @ -674,13 +683,6 @@ async def stream_quotes( | |||
|             # TODO: more consistent field translation | ||||
|             atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']] | ||||
| 
 | ||||
|             if atype in { | ||||
|                 'forex', | ||||
|                 'index', | ||||
|                 'commodity', | ||||
|             }: | ||||
|                 syminfo['no_vlm'] = True | ||||
| 
 | ||||
|             # for stocks it seems TWS reports too small a tick size | ||||
|             # such that you can't submit orders with that granularity? | ||||
|             min_tick = 0.01 if atype == 'stock' else 0 | ||||
|  | @ -707,9 +709,9 @@ async def stream_quotes( | |||
|                 }, | ||||
| 
 | ||||
|             } | ||||
|             return init_msgs, syminfo | ||||
|             return init_msgs | ||||
| 
 | ||||
|         init_msgs, syminfo = mk_init_msgs() | ||||
|         init_msgs = mk_init_msgs() | ||||
| 
 | ||||
|         # TODO: we should instead spawn a task that waits on a feed to start | ||||
|         # and let it wait indefinitely..instead of this hard coded stuff. | ||||
|  | @ -718,14 +720,7 @@ async def stream_quotes( | |||
| 
 | ||||
|         # it might be outside regular trading hours so see if we can at | ||||
|         # least grab history. | ||||
|         if ( | ||||
|             isnan(first_ticker.last) | ||||
|             and type(first_ticker.contract) not in ( | ||||
|                 ibis.Commodity, | ||||
|                 ibis.Forex, | ||||
|                 ibis.Crypto, | ||||
|             ) | ||||
|         ): | ||||
|         if isnan(first_ticker.last): | ||||
|             task_status.started((init_msgs, first_quote)) | ||||
| 
 | ||||
|             # it's not really live but this will unblock | ||||
|  | @ -748,16 +743,10 @@ async def stream_quotes( | |||
|             task_status.started((init_msgs, first_quote)) | ||||
| 
 | ||||
|             async with aclosing(stream): | ||||
|                 if syminfo.get('no_vlm', False): | ||||
| 
 | ||||
|                     # generally speaking these feeds don't | ||||
|                     # include vlm data. | ||||
|                     atype = syminfo['asset_type'] | ||||
|                     log.info( | ||||
|                         f'Non-vlm asset {sym}@{atype}, skipping quote poll...' | ||||
|                     ) | ||||
| 
 | ||||
|                 else: | ||||
|                 if type(first_ticker.contract) not in ( | ||||
|                     ibis.Commodity, | ||||
|                     ibis.Forex | ||||
|                 ): | ||||
|                     # wait for real volume on feed (trading might be closed) | ||||
|                     while True: | ||||
|                         ticker = await stream.receive() | ||||
|  | @ -816,9 +805,6 @@ async def data_reset_hack( | |||
|           successful. | ||||
|         - other OS support? | ||||
|         - integration with ``ib-gw`` run in docker + Xorg? | ||||
|         - is it possible to offer a local server that can be accessed by | ||||
|           a client? Would be sure be handy for running native java blobs | ||||
|           that need to be wrangle. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
|  | @ -849,10 +835,7 @@ async def data_reset_hack( | |||
|             client.mouse.click() | ||||
|             client.keyboard.press('Ctrl', 'Alt', key)  # keys are stacked | ||||
| 
 | ||||
|     try: | ||||
|     await tractor.to_asyncio.run_task(vnc_click_hack) | ||||
|     except OSError: | ||||
|         return False | ||||
| 
 | ||||
|     # we don't really need the ``xdotool`` approach any more B) | ||||
|     return True | ||||
|  | @ -867,30 +850,14 @@ async def open_symbol_search( | |||
|     # TODO: load user defined symbol set locally for fast search? | ||||
|     await ctx.started({}) | ||||
| 
 | ||||
|     async with ( | ||||
|         open_client_proxies() as (proxies, clients), | ||||
|         open_data_client() as data_proxy, | ||||
|     ): | ||||
|     async with open_data_client() as proxy: | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             # select a non-history client for symbol search to lighten | ||||
|             # the load in the main data node. | ||||
|             proxy = data_proxy | ||||
|             for name, proxy in proxies.items(): | ||||
|                 if proxy is data_proxy: | ||||
|                     continue | ||||
|                 break | ||||
| 
 | ||||
|             ib_client = proxy._aio_ns.ib | ||||
|             log.info(f'Using {ib_client} for symbol search') | ||||
| 
 | ||||
|             last = time.time() | ||||
|             async for pattern in stream: | ||||
|                 log.info(f'received {pattern}') | ||||
|                 now = time.time() | ||||
| 
 | ||||
|                 # this causes tractor hang... | ||||
|                 # assert 0 | ||||
|             async for pattern in stream: | ||||
|                 log.debug(f'received {pattern}') | ||||
|                 now = time.time() | ||||
| 
 | ||||
|                 assert pattern, 'IB can not accept blank search pattern' | ||||
| 
 | ||||
|  | @ -919,7 +886,7 @@ async def open_symbol_search( | |||
| 
 | ||||
|                     continue | ||||
| 
 | ||||
|                 log.info(f'searching for {pattern}') | ||||
|                 log.debug(f'searching for {pattern}') | ||||
| 
 | ||||
|                 last = time.time() | ||||
| 
 | ||||
|  | @ -930,8 +897,6 @@ async def open_symbol_search( | |||
|                 async def stash_results(target: Awaitable[list]): | ||||
|                     stock_results.extend(await target) | ||||
| 
 | ||||
|                 for i in range(10): | ||||
|                     with trio.move_on_after(3) as cs: | ||||
|                 async with trio.open_nursery() as sn: | ||||
|                     sn.start_soon( | ||||
|                         stash_results, | ||||
|  | @ -944,26 +909,17 @@ async def open_symbol_search( | |||
|                     # trigger async request | ||||
|                     await trio.sleep(0) | ||||
| 
 | ||||
|                     if cs.cancelled_caught: | ||||
|                         log.warning( | ||||
|                             f'Search timeout? {proxy._aio_ns.ib.client}' | ||||
|                     # match against our ad-hoc set immediately | ||||
|                     adhoc_matches = fuzzy.extractBests( | ||||
|                         pattern, | ||||
|                         list(_adhoc_futes_set), | ||||
|                         score_cutoff=90, | ||||
|                     ) | ||||
|                         continue | ||||
|                     else: | ||||
|                         break | ||||
| 
 | ||||
|                     # # match against our ad-hoc set immediately | ||||
|                     # adhoc_matches = fuzzy.extractBests( | ||||
|                     #     pattern, | ||||
|                     #     list(_adhoc_futes_set), | ||||
|                     #     score_cutoff=90, | ||||
|                     # ) | ||||
|                     # log.info(f'fuzzy matched adhocs: {adhoc_matches}') | ||||
|                     # adhoc_match_results = {} | ||||
|                     # if adhoc_matches: | ||||
|                     #     # TODO: do we need to pull contract details? | ||||
|                     #     adhoc_match_results = {i[0]: {} for i in | ||||
|                     #     adhoc_matches} | ||||
|                     log.info(f'fuzzy matched adhocs: {adhoc_matches}') | ||||
|                     adhoc_match_results = {} | ||||
|                     if adhoc_matches: | ||||
|                         # TODO: do we need to pull contract details? | ||||
|                         adhoc_match_results = {i[0]: {} for i in adhoc_matches} | ||||
| 
 | ||||
|                 log.debug(f'fuzzy matching stocks {stock_results}') | ||||
|                 stock_matches = fuzzy.extractBests( | ||||
|  | @ -972,8 +928,7 @@ async def open_symbol_search( | |||
|                     score_cutoff=50, | ||||
|                 ) | ||||
| 
 | ||||
|                 # matches = adhoc_match_results | { | ||||
|                 matches = { | ||||
|                 matches = adhoc_match_results | { | ||||
|                     item[0]: {} for item in stock_matches | ||||
|                 } | ||||
|                 # TODO: we used to deliver contract details | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,64 +0,0 @@ | |||
| ``kraken`` backend | ||||
| ------------------ | ||||
| though they don't have the most liquidity of all the cexes they sure are | ||||
| accommodating to those of us who appreciate a little ``xmr``. | ||||
| 
 | ||||
| status | ||||
| ****** | ||||
| current support is *production grade* and both real-time data and order | ||||
| management should be correct and fast. this backend is used by core devs | ||||
| for live trading. | ||||
| 
 | ||||
| 
 | ||||
| config | ||||
| ****** | ||||
| In order to get order mode support your ``brokers.toml`` | ||||
| needs to have something like the following: | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|    [kraken] | ||||
|    accounts.spot = 'spot' | ||||
|    key_descr = "spot" | ||||
|    api_key = "69696969696969696696969696969696969696969696969696969696" | ||||
|    secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696" | ||||
| 
 | ||||
| 
 | ||||
| If everything works correctly you should see any current positions | ||||
| loaded in the pps pane on chart load and you should also be able to | ||||
| check your trade records in the file:: | ||||
| 
 | ||||
|     <pikerk_conf_dir>/ledgers/trades_kraken_spot.toml | ||||
| 
 | ||||
| 
 | ||||
| An example ledger file will have entries written verbatim from the | ||||
| trade events schema: | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|     [TFJBKK-SMBZS-VJ4UWS] | ||||
|     ordertxid = "SMBZSA-7CNQU-3HWLNJ" | ||||
|     postxid = "SMBZSE-M7IF5-CFI7LT" | ||||
|     pair = "XXMRZEUR" | ||||
|     time = 1655691993.4133966 | ||||
|     type = "buy" | ||||
|     ordertype = "limit" | ||||
|     price = "103.97000000" | ||||
|     cost = "499.99999977" | ||||
|     fee = "0.80000000" | ||||
|     vol = "4.80907954" | ||||
|     margin = "0.00000000" | ||||
|     misc = "" | ||||
| 
 | ||||
| 
 | ||||
| your ``pps.toml`` file will have position entries like, | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|    [kraken.spot."xmreur.kraken"] | ||||
|    size = 4.80907954 | ||||
|    ppu = 103.97000000 | ||||
|    bsuid = "XXMRZEUR" | ||||
|    clears = [ | ||||
|     { tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" }, | ||||
|    ] | ||||
|  | @ -1,61 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Kraken backend. | ||||
| 
 | ||||
| Sub-modules within break into the core functionalities: | ||||
| 
 | ||||
| - ``broker.py`` part for orders / trading endpoints | ||||
| - ``feed.py`` for real-time data feed endpoints | ||||
| - ``api.py`` for the core API machinery which is ``trio``-ized | ||||
|   wrapping around ``ib_insync``. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| from piker.log import get_logger | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| from .api import ( | ||||
|     get_client, | ||||
| ) | ||||
| from .feed import ( | ||||
|     open_history_client, | ||||
|     open_symbol_search, | ||||
|     stream_quotes, | ||||
| ) | ||||
| from .broker import ( | ||||
|     trades_dialogue, | ||||
|     norm_trade_records, | ||||
| ) | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'get_client', | ||||
|     'trades_dialogue', | ||||
|     'open_history_client', | ||||
|     'open_symbol_search', | ||||
|     'stream_quotes', | ||||
|     'norm_trade_records', | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| # tractor RPC enable arg | ||||
| __enable_modules__: list[str] = [ | ||||
|     'api', | ||||
|     'feed', | ||||
|     'broker', | ||||
| ] | ||||
|  | @ -1,540 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Kraken web API wrapping. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from datetime import datetime | ||||
| import itertools | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Union, | ||||
| ) | ||||
| import time | ||||
| 
 | ||||
| from bidict import bidict | ||||
| import pendulum | ||||
| import asks | ||||
| from fuzzywuzzy import process as fuzzy | ||||
| import numpy as np | ||||
| import urllib.parse | ||||
| import hashlib | ||||
| import hmac | ||||
| import base64 | ||||
| import trio | ||||
| 
 | ||||
| from piker import config | ||||
| from piker.brokers._util import ( | ||||
|     resproc, | ||||
|     SymbolNotFound, | ||||
|     BrokerError, | ||||
|     DataThrottle, | ||||
| ) | ||||
| from piker.pp import Transaction | ||||
| from . import log | ||||
| 
 | ||||
| # <uri>/<version>/ | ||||
| _url = 'https://api.kraken.com/0' | ||||
| 
 | ||||
| 
 | ||||
| # Broker specific ohlc schema which includes a vwap field | ||||
| _ohlc_dtype = [ | ||||
|     ('index', int), | ||||
|     ('time', int), | ||||
|     ('open', float), | ||||
|     ('high', float), | ||||
|     ('low', float), | ||||
|     ('close', float), | ||||
|     ('volume', float), | ||||
|     ('count', int), | ||||
|     ('bar_wap', float), | ||||
| ] | ||||
| 
 | ||||
| # UI components allow this to be declared such that additional | ||||
| # (historical) fields can be exposed. | ||||
| ohlc_dtype = np.dtype(_ohlc_dtype) | ||||
| 
 | ||||
| _show_wap_in_history = True | ||||
| _symbol_info_translation: dict[str, str] = { | ||||
|     'tick_decimals': 'pair_decimals', | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| def get_config() -> dict[str, Any]: | ||||
| 
 | ||||
|     conf, path = config.load() | ||||
|     section = conf.get('kraken') | ||||
| 
 | ||||
|     if section is None: | ||||
|         log.warning(f'No config section found for kraken in {path}') | ||||
|         return {} | ||||
| 
 | ||||
|     return section | ||||
| 
 | ||||
| 
 | ||||
| def get_kraken_signature( | ||||
|     urlpath: str, | ||||
|     data: dict[str, Any], | ||||
|     secret: str | ||||
| ) -> str: | ||||
|     postdata = urllib.parse.urlencode(data) | ||||
|     encoded = (str(data['nonce']) + postdata).encode() | ||||
|     message = urlpath.encode() + hashlib.sha256(encoded).digest() | ||||
| 
 | ||||
|     mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512) | ||||
|     sigdigest = base64.b64encode(mac.digest()) | ||||
|     return sigdigest.decode() | ||||
| 
 | ||||
| 
 | ||||
| class InvalidKey(ValueError): | ||||
|     ''' | ||||
|     EAPI:Invalid key | ||||
|     This error is returned when the API key used for the call is | ||||
|     either expired or disabled, please review the API key in your | ||||
|     Settings -> API tab of account management or generate a new one | ||||
|     and update your application. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| 
 | ||||
| class Client: | ||||
| 
 | ||||
|     # global symbol normalization table | ||||
|     _ntable: dict[str, str] = {} | ||||
|     _atable: bidict[str, str] = bidict() | ||||
| 
 | ||||
|     def __init__( | ||||
|         self, | ||||
|         config: dict[str, str], | ||||
|         name: str = '', | ||||
|         api_key: str = '', | ||||
|         secret: str = '' | ||||
|     ) -> None: | ||||
|         self._sesh = asks.Session(connections=4) | ||||
|         self._sesh.base_location = _url | ||||
|         self._sesh.headers.update({ | ||||
|             'User-Agent': | ||||
|                 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)' | ||||
|         }) | ||||
|         self.conf: dict[str, str] = config | ||||
|         self._pairs: list[str] = [] | ||||
|         self._name = name | ||||
|         self._api_key = api_key | ||||
|         self._secret = secret | ||||
| 
 | ||||
|     @property | ||||
|     def pairs(self) -> dict[str, Any]: | ||||
|         if self._pairs is None: | ||||
|             raise RuntimeError( | ||||
|                 "Make sure to run `cache_symbols()` on startup!" | ||||
|             ) | ||||
|             # retreive and cache all symbols | ||||
| 
 | ||||
|         return self._pairs | ||||
| 
 | ||||
|     async def _public( | ||||
|         self, | ||||
|         method: str, | ||||
|         data: dict, | ||||
|     ) -> dict[str, Any]: | ||||
|         resp = await self._sesh.post( | ||||
|             path=f'/public/{method}', | ||||
|             json=data, | ||||
|             timeout=float('inf') | ||||
|         ) | ||||
|         return resproc(resp, log) | ||||
| 
 | ||||
|     async def _private( | ||||
|         self, | ||||
|         method: str, | ||||
|         data: dict, | ||||
|         uri_path: str | ||||
|     ) -> dict[str, Any]: | ||||
|         headers = { | ||||
|             'Content-Type': | ||||
|                 'application/x-www-form-urlencoded', | ||||
|             'API-Key': | ||||
|                 self._api_key, | ||||
|             'API-Sign': | ||||
|                 get_kraken_signature(uri_path, data, self._secret) | ||||
|         } | ||||
|         resp = await self._sesh.post( | ||||
|             path=f'/private/{method}', | ||||
|             data=data, | ||||
|             headers=headers, | ||||
|             timeout=float('inf') | ||||
|         ) | ||||
|         return resproc(resp, log) | ||||
| 
 | ||||
|     async def endpoint( | ||||
|         self, | ||||
|         method: str, | ||||
|         data: dict[str, Any] | ||||
| 
 | ||||
|     ) -> dict[str, Any]: | ||||
|         uri_path = f'/0/private/{method}' | ||||
|         data['nonce'] = str(int(1000*time.time())) | ||||
|         return await self._private(method, data, uri_path) | ||||
| 
 | ||||
|     async def get_balances( | ||||
|         self, | ||||
|     ) -> dict[str, float]: | ||||
|         ''' | ||||
|         Return the set of asset balances for this account | ||||
|         by symbol. | ||||
| 
 | ||||
|         ''' | ||||
|         resp = await self.endpoint( | ||||
|             'Balance', | ||||
|             {}, | ||||
|         ) | ||||
|         by_bsuid = resp['result'] | ||||
|         return { | ||||
|             self._atable[sym].lower(): float(bal) | ||||
|             for sym, bal in by_bsuid.items() | ||||
|         } | ||||
| 
 | ||||
|     async def get_assets(self) -> dict[str, dict]: | ||||
|         resp = await self._public('Assets', {}) | ||||
|         return resp['result'] | ||||
| 
 | ||||
|     async def cache_assets(self) -> None: | ||||
|         assets = self.assets = await self.get_assets() | ||||
|         for bsuid, info in assets.items(): | ||||
|             self._atable[bsuid] = info['altname'] | ||||
| 
 | ||||
|     async def get_trades( | ||||
|         self, | ||||
|         fetch_limit: int = 10, | ||||
| 
 | ||||
|     ) -> dict[str, Any]: | ||||
|         ''' | ||||
|         Get the trades (aka cleared orders) history from the rest endpoint: | ||||
|         https://docs.kraken.com/rest/#operation/getTradeHistory | ||||
| 
 | ||||
|         ''' | ||||
|         ofs = 0 | ||||
|         trades_by_id: dict[str, Any] = {} | ||||
| 
 | ||||
|         for i in itertools.count(): | ||||
|             if i >= fetch_limit: | ||||
|                 break | ||||
| 
 | ||||
|             # increment 'ofs' pagination offset | ||||
|             ofs = i*50 | ||||
| 
 | ||||
|             resp = await self.endpoint( | ||||
|                 'TradesHistory', | ||||
|                 {'ofs': ofs}, | ||||
|             ) | ||||
|             by_id = resp['result']['trades'] | ||||
|             trades_by_id.update(by_id) | ||||
| 
 | ||||
|             # we can get up to 50 results per query | ||||
|             if ( | ||||
|                 len(by_id) < 50 | ||||
|             ): | ||||
|                 err = resp.get('error') | ||||
|                 if err: | ||||
|                     raise BrokerError(err) | ||||
| 
 | ||||
|                 # we know we received the max amount of | ||||
|                 # trade results so there may be more history. | ||||
|                 # catch the end of the trades | ||||
|                 count = resp['result']['count'] | ||||
|                 break | ||||
| 
 | ||||
|         # santity check on update | ||||
|         assert count == len(trades_by_id.values()) | ||||
|         return trades_by_id | ||||
| 
 | ||||
|     async def get_xfers( | ||||
|         self, | ||||
|         asset: str, | ||||
|         src_asset: str = '', | ||||
| 
 | ||||
|     ) -> dict[str, Transaction]: | ||||
|         ''' | ||||
|         Get asset balance transfer transactions. | ||||
| 
 | ||||
|         Currently only withdrawals are supported. | ||||
| 
 | ||||
|         ''' | ||||
|         xfers: list[dict] = (await self.endpoint( | ||||
|             'WithdrawStatus', | ||||
|             {'asset': asset}, | ||||
|         ))['result'] | ||||
| 
 | ||||
|         # eg. resp schema: | ||||
|         # 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset': | ||||
|         #     'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid': | ||||
|         #     'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44', | ||||
|         #     'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z', | ||||
|         #     'amount': '0.00300726', 'fee': '0.00001000', 'time': | ||||
|         #     1658347714, 'status': 'Success'}]} | ||||
| 
 | ||||
|         trans: dict[str, Transaction] = {} | ||||
|         for entry in xfers: | ||||
|             # look up the normalized name | ||||
|             asset = self._atable[entry['asset']].lower() | ||||
| 
 | ||||
|             # XXX: this is in the asset units (likely) so it isn't | ||||
|             # quite the same as a commisions cost necessarily..) | ||||
|             cost = float(entry['fee']) | ||||
| 
 | ||||
|             tran = Transaction( | ||||
|                 fqsn=asset + '.kraken', | ||||
|                 tid=entry['txid'], | ||||
|                 dt=pendulum.from_timestamp(entry['time']), | ||||
|                 bsuid=f'{asset}{src_asset}', | ||||
|                 size=-1*( | ||||
|                     float(entry['amount']) | ||||
|                     + | ||||
|                     cost | ||||
|                 ), | ||||
|                 # since this will be treated as a "sell" it | ||||
|                 # shouldn't be needed to compute the be price. | ||||
|                 price='NaN', | ||||
| 
 | ||||
|                 # XXX: see note above | ||||
|                 cost=0, | ||||
|             ) | ||||
|             trans[tran.tid] = tran | ||||
| 
 | ||||
|         return trans | ||||
| 
 | ||||
|     async def submit_limit( | ||||
|         self, | ||||
|         symbol: str, | ||||
|         price: float, | ||||
|         action: str, | ||||
|         size: float, | ||||
|         reqid: str = None, | ||||
|         validate: bool = False  # set True test call without a real submission | ||||
| 
 | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Place an order and return integer request id provided by client. | ||||
| 
 | ||||
|         ''' | ||||
|         # Build common data dict for common keys from both endpoints | ||||
|         data = { | ||||
|             "pair": symbol, | ||||
|             "price": str(price), | ||||
|             "validate": validate | ||||
|         } | ||||
|         if reqid is None: | ||||
|             # Build order data for kraken api | ||||
|             data |= { | ||||
|                 "ordertype": "limit", | ||||
|                 "type": action, | ||||
|                 "volume": str(size), | ||||
|             } | ||||
|             return await self.endpoint('AddOrder', data) | ||||
| 
 | ||||
|         else: | ||||
|             # Edit order data for kraken api | ||||
|             data["txid"] = reqid | ||||
|             return await self.endpoint('EditOrder', data) | ||||
| 
 | ||||
|     async def submit_cancel( | ||||
|         self, | ||||
|         reqid: str, | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Send cancel request for order id ``reqid``. | ||||
| 
 | ||||
|         ''' | ||||
|         # txid is a transaction id given by kraken | ||||
|         return await self.endpoint('CancelOrder', {"txid": reqid}) | ||||
| 
 | ||||
|     async def symbol_info( | ||||
|         self, | ||||
|         pair: Optional[str] = None, | ||||
| 
 | ||||
|     ) -> dict[str, dict[str, str]]: | ||||
| 
 | ||||
|         if pair is not None: | ||||
|             pairs = {'pair': pair} | ||||
|         else: | ||||
|             pairs = None  # get all pairs | ||||
| 
 | ||||
|         resp = await self._public('AssetPairs', pairs) | ||||
|         err = resp['error'] | ||||
|         if err: | ||||
|             symbolname = pairs['pair'] if pair else None | ||||
|             raise SymbolNotFound(f'{symbolname}.kraken') | ||||
| 
 | ||||
|         pairs = resp['result'] | ||||
| 
 | ||||
|         if pair is not None: | ||||
|             _, data = next(iter(pairs.items())) | ||||
|             return data | ||||
|         else: | ||||
|             return pairs | ||||
| 
 | ||||
|     async def cache_symbols( | ||||
|         self, | ||||
|     ) -> dict: | ||||
|         if not self._pairs: | ||||
|             self._pairs = await self.symbol_info() | ||||
| 
 | ||||
|             ntable = {} | ||||
|             for restapikey, info in self._pairs.items(): | ||||
|                 ntable[restapikey] = ntable[info['wsname']] = info['altname'] | ||||
| 
 | ||||
|             self._ntable.update(ntable) | ||||
| 
 | ||||
|         return self._pairs | ||||
| 
 | ||||
|     async def search_symbols( | ||||
|         self, | ||||
|         pattern: str, | ||||
|         limit: int = None, | ||||
|     ) -> dict[str, Any]: | ||||
|         if self._pairs is not None: | ||||
|             data = self._pairs | ||||
|         else: | ||||
|             data = await self.symbol_info() | ||||
| 
 | ||||
|         matches = fuzzy.extractBests( | ||||
|             pattern, | ||||
|             data, | ||||
|             score_cutoff=50, | ||||
|         ) | ||||
|         # repack in dict form | ||||
|         return {item[0]['altname']: item[0] for item in matches} | ||||
| 
 | ||||
|     async def bars( | ||||
|         self, | ||||
|         symbol: str = 'XBTUSD', | ||||
| 
 | ||||
|         # UTC 2017-07-02 12:53:20 | ||||
|         since: Optional[Union[int, datetime]] = None, | ||||
|         count: int = 720,  # <- max allowed per query | ||||
|         as_np: bool = True, | ||||
| 
 | ||||
|     ) -> dict: | ||||
| 
 | ||||
|         if since is None: | ||||
|             since = pendulum.now('UTC').start_of('minute').subtract( | ||||
|                 minutes=count).timestamp() | ||||
| 
 | ||||
|         elif isinstance(since, int): | ||||
|             since = pendulum.from_timestamp(since).timestamp() | ||||
| 
 | ||||
|         else:  # presumably a pendulum datetime | ||||
|             since = since.timestamp() | ||||
| 
 | ||||
|         # UTC 2017-07-02 12:53:20 is oldest seconds value | ||||
|         since = str(max(1499000000, int(since))) | ||||
|         json = await self._public( | ||||
|             'OHLC', | ||||
|             data={ | ||||
|                 'pair': symbol, | ||||
|                 'since': since, | ||||
|             }, | ||||
|         ) | ||||
|         try: | ||||
|             res = json['result'] | ||||
|             res.pop('last') | ||||
|             bars = next(iter(res.values())) | ||||
| 
 | ||||
|             new_bars = [] | ||||
| 
 | ||||
|             first = bars[0] | ||||
|             last_nz_vwap = first[-3] | ||||
|             if last_nz_vwap == 0: | ||||
|                 # use close if vwap is zero | ||||
|                 last_nz_vwap = first[-4] | ||||
| 
 | ||||
|             # convert all fields to native types | ||||
|             for i, bar in enumerate(bars): | ||||
|                 # normalize weird zero-ed vwap values..cmon kraken.. | ||||
|                 # indicates vwap didn't change since last bar | ||||
|                 vwap = float(bar.pop(-3)) | ||||
|                 if vwap != 0: | ||||
|                     last_nz_vwap = vwap | ||||
|                 if vwap == 0: | ||||
|                     vwap = last_nz_vwap | ||||
| 
 | ||||
|                 # re-insert vwap as the last of the fields | ||||
|                 bar.append(vwap) | ||||
| 
 | ||||
|                 new_bars.append( | ||||
|                     (i,) + tuple( | ||||
|                         ftype(bar[j]) for j, (name, ftype) in enumerate( | ||||
|                             _ohlc_dtype[1:] | ||||
|                         ) | ||||
|                     ) | ||||
|                 ) | ||||
|             array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars | ||||
|             return array | ||||
|         except KeyError: | ||||
|             errmsg = json['error'][0] | ||||
| 
 | ||||
|             if 'not found' in errmsg: | ||||
|                 raise SymbolNotFound(errmsg + f': {symbol}') | ||||
| 
 | ||||
|             elif 'Too many requests' in errmsg: | ||||
|                 raise DataThrottle(f'{symbol}') | ||||
| 
 | ||||
|             else: | ||||
|                 raise BrokerError(errmsg) | ||||
| 
 | ||||
|     @classmethod | ||||
|     def normalize_symbol( | ||||
|         cls, | ||||
|         ticker: str | ||||
|     ) -> str: | ||||
|         ''' | ||||
|         Normalize symbol names to to a 3x3 pair from the global | ||||
|         definition map which we build out from the data retreived from | ||||
|         the 'AssetPairs' endpoint, see methods above. | ||||
| 
 | ||||
|         ''' | ||||
|         ticker = cls._ntable[ticker] | ||||
|         symlen = len(ticker) | ||||
|         if symlen != 6: | ||||
|             raise ValueError(f'Unhandled symbol: {ticker}') | ||||
| 
 | ||||
|         return ticker.lower() | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_client() -> Client: | ||||
| 
 | ||||
|     conf = get_config() | ||||
|     if conf: | ||||
|         client = Client( | ||||
|             conf, | ||||
|             name=conf['key_descr'], | ||||
|             api_key=conf['api_key'], | ||||
|             secret=conf['secret'] | ||||
|         ) | ||||
|     else: | ||||
|         client = Client({}) | ||||
| 
 | ||||
|     # at startup, load all symbols, and asset info in | ||||
|     # batch requests. | ||||
|     async with trio.open_nursery() as nurse: | ||||
|         nurse.start_soon(client.cache_assets) | ||||
|         await client.cache_symbols() | ||||
| 
 | ||||
|     yield client | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,499 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Real-time and historical data feed endpoints. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from datetime import datetime | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Callable, | ||||
| ) | ||||
| import time | ||||
| 
 | ||||
| from async_generator import aclosing | ||||
| from fuzzywuzzy import process as fuzzy | ||||
| import numpy as np | ||||
| import pendulum | ||||
| from trio_typing import TaskStatus | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| from piker._cacheables import open_cached_client | ||||
| from piker.brokers._util import ( | ||||
|     BrokerError, | ||||
|     DataThrottle, | ||||
|     DataUnavailable, | ||||
| ) | ||||
| from piker.log import get_console_log | ||||
| from piker.data import ShmArray | ||||
| from piker.data.types import Struct | ||||
| from piker.data._web_bs import open_autorecon_ws, NoBsWs | ||||
| from . import log | ||||
| from .api import ( | ||||
|     Client, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # https://www.kraken.com/features/api#get-tradable-pairs | ||||
| class Pair(Struct): | ||||
|     altname: str  # alternate pair name | ||||
|     wsname: str  # WebSocket pair name (if available) | ||||
|     aclass_base: str  # asset class of base component | ||||
|     base: str  # asset id of base component | ||||
|     aclass_quote: str  # asset class of quote component | ||||
|     quote: str  # asset id of quote component | ||||
|     lot: str  # volume lot size | ||||
| 
 | ||||
|     pair_decimals: int  # scaling decimal places for pair | ||||
|     lot_decimals: int  # scaling decimal places for volume | ||||
| 
 | ||||
|     # amount to multiply lot volume by to get currency volume | ||||
|     lot_multiplier: float | ||||
| 
 | ||||
|     # array of leverage amounts available when buying | ||||
|     leverage_buy: list[int] | ||||
|     # array of leverage amounts available when selling | ||||
|     leverage_sell: list[int] | ||||
| 
 | ||||
|     # fee schedule array in [volume, percent fee] tuples | ||||
|     fees: list[tuple[int, float]] | ||||
| 
 | ||||
|     # maker fee schedule array in [volume, percent fee] tuples (if on | ||||
|     # maker/taker) | ||||
|     fees_maker: list[tuple[int, float]] | ||||
| 
 | ||||
|     fee_volume_currency: str  # volume discount currency | ||||
|     margin_call: str  # margin call level | ||||
|     margin_stop: str  # stop-out/liquidation margin level | ||||
|     ordermin: float  # minimum order volume for pair | ||||
| 
 | ||||
| 
 | ||||
| class OHLC(Struct): | ||||
|     ''' | ||||
|     Description of the flattened OHLC quote format. | ||||
| 
 | ||||
|     For schema details see: | ||||
|         https://docs.kraken.com/websockets/#message-ohlc | ||||
| 
 | ||||
|     ''' | ||||
|     chan_id: int  # internal kraken id | ||||
|     chan_name: str  # eg. ohlc-1  (name-interval) | ||||
|     pair: str  # fx pair | ||||
|     time: float  # Begin time of interval, in seconds since epoch | ||||
|     etime: float  # End time of interval, in seconds since epoch | ||||
|     open: float  # Open price of interval | ||||
|     high: float  # High price within interval | ||||
|     low: float  # Low price within interval | ||||
|     close: float  # Close price of interval | ||||
|     vwap: float  # Volume weighted average price within interval | ||||
|     volume: float  # Accumulated volume **within interval** | ||||
|     count: int  # Number of trades within interval | ||||
|     # (sampled) generated tick data | ||||
|     ticks: list[Any] = [] | ||||
| 
 | ||||
| 
 | ||||
| async def stream_messages( | ||||
|     ws: NoBsWs, | ||||
| ): | ||||
|     ''' | ||||
|     Message stream parser and heartbeat handler. | ||||
| 
 | ||||
|     Deliver ws subscription messages as well as handle heartbeat logic | ||||
|     though a single async generator. | ||||
| 
 | ||||
|     ''' | ||||
|     too_slow_count = last_hb = 0 | ||||
| 
 | ||||
|     while True: | ||||
| 
 | ||||
|         with trio.move_on_after(5) as cs: | ||||
|             msg = await ws.recv_msg() | ||||
| 
 | ||||
|         # trigger reconnection if heartbeat is laggy | ||||
|         if cs.cancelled_caught: | ||||
| 
 | ||||
|             too_slow_count += 1 | ||||
| 
 | ||||
|             if too_slow_count > 20: | ||||
|                 log.warning( | ||||
|                     "Heartbeat is too slow, resetting ws connection") | ||||
| 
 | ||||
|                 await ws._connect() | ||||
|                 too_slow_count = 0 | ||||
|                 continue | ||||
| 
 | ||||
|         match msg: | ||||
|             case {'event': 'heartbeat'}: | ||||
|                 now = time.time() | ||||
|                 delay = now - last_hb | ||||
|                 last_hb = now | ||||
| 
 | ||||
|                 # XXX: why tf is this not printing without --tl flag? | ||||
|                 log.debug(f"Heartbeat after {delay}") | ||||
|                 # print(f"Heartbeat after {delay}") | ||||
| 
 | ||||
|                 continue | ||||
| 
 | ||||
|             case _: | ||||
|                 # passthrough sub msgs | ||||
|                 yield msg | ||||
| 
 | ||||
| 
 | ||||
| async def process_data_feed_msgs( | ||||
|     ws: NoBsWs, | ||||
| ): | ||||
|     ''' | ||||
|     Parse and pack data feed messages. | ||||
| 
 | ||||
|     ''' | ||||
|     async for msg in stream_messages(ws): | ||||
|         match msg: | ||||
|             case { | ||||
|                 'errorMessage': errmsg | ||||
|             }: | ||||
|                 raise BrokerError(errmsg) | ||||
| 
 | ||||
|             case { | ||||
|                 'event': 'subscriptionStatus', | ||||
|             } as sub: | ||||
|                 log.info( | ||||
|                     'WS subscription is active:\n' | ||||
|                     f'{sub}' | ||||
|                 ) | ||||
|                 continue | ||||
| 
 | ||||
|             case [ | ||||
|                 chan_id, | ||||
|                 *payload_array, | ||||
|                 chan_name, | ||||
|                 pair | ||||
|             ]: | ||||
|                 if 'ohlc' in chan_name: | ||||
|                     ohlc = OHLC( | ||||
|                         chan_id, | ||||
|                         chan_name, | ||||
|                         pair, | ||||
|                         *payload_array[0] | ||||
|                     ) | ||||
|                     ohlc.typecast() | ||||
|                     yield 'ohlc', ohlc | ||||
| 
 | ||||
|                 elif 'spread' in chan_name: | ||||
| 
 | ||||
|                     bid, ask, ts, bsize, asize = map( | ||||
|                         float, payload_array[0]) | ||||
| 
 | ||||
|                     # TODO: really makes you think IB has a horrible API... | ||||
|                     quote = { | ||||
|                         'symbol': pair.replace('/', ''), | ||||
|                         'ticks': [ | ||||
|                             {'type': 'bid', 'price': bid, 'size': bsize}, | ||||
|                             {'type': 'bsize', 'price': bid, 'size': bsize}, | ||||
| 
 | ||||
|                             {'type': 'ask', 'price': ask, 'size': asize}, | ||||
|                             {'type': 'asize', 'price': ask, 'size': asize}, | ||||
|                         ], | ||||
|                     } | ||||
|                     yield 'l1', quote | ||||
| 
 | ||||
|                 # elif 'book' in msg[-2]: | ||||
|                 #     chan_id, *payload_array, chan_name, pair = msg | ||||
|                 #     print(msg) | ||||
| 
 | ||||
|             case _: | ||||
|                 print(f'UNHANDLED MSG: {msg}') | ||||
|                 # yield msg | ||||
| 
 | ||||
| 
 | ||||
| def normalize( | ||||
|     ohlc: OHLC, | ||||
| 
 | ||||
| ) -> dict: | ||||
|     quote = ohlc.to_dict() | ||||
|     quote['broker_ts'] = quote['time'] | ||||
|     quote['brokerd_ts'] = time.time() | ||||
|     quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '') | ||||
|     quote['last'] = quote['close'] | ||||
|     quote['bar_wap'] = ohlc.vwap | ||||
| 
 | ||||
|     # seriously eh? what's with this non-symmetry everywhere | ||||
|     # in subscription systems... | ||||
|     # XXX: piker style is always lowercases symbols. | ||||
|     topic = quote['pair'].replace('/', '').lower() | ||||
| 
 | ||||
|     # print(quote) | ||||
|     return topic, quote | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_history_client( | ||||
|     symbol: str, | ||||
| 
 | ||||
| ) -> tuple[Callable, int]: | ||||
| 
 | ||||
|     # TODO implement history getter for the new storage layer. | ||||
|     async with open_cached_client('kraken') as client: | ||||
| 
 | ||||
|         # lol, kraken won't send any more then the "last" | ||||
|         # 720 1m bars.. so we have to just ignore further | ||||
|         # requests of this type.. | ||||
|         queries: int = 0 | ||||
| 
 | ||||
|         async def get_ohlc( | ||||
|             end_dt: Optional[datetime] = None, | ||||
|             start_dt: Optional[datetime] = None, | ||||
| 
 | ||||
|         ) -> tuple[ | ||||
|             np.ndarray, | ||||
|             datetime,  # start | ||||
|             datetime,  # end | ||||
|         ]: | ||||
| 
 | ||||
|             nonlocal queries | ||||
|             if queries > 0: | ||||
|                 raise DataUnavailable | ||||
| 
 | ||||
|             count = 0 | ||||
|             while count <= 3: | ||||
|                 try: | ||||
|                     array = await client.bars( | ||||
|                         symbol, | ||||
|                         since=end_dt, | ||||
|                     ) | ||||
|                     count += 1 | ||||
|                     queries += 1 | ||||
|                     break | ||||
|                 except DataThrottle: | ||||
|                     log.warning(f'kraken OHLC throttle for {symbol}') | ||||
|                     await trio.sleep(1) | ||||
| 
 | ||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) | ||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) | ||||
|             return array, start_dt, end_dt | ||||
| 
 | ||||
|         yield get_ohlc, {'erlangs': 1, 'rate': 1} | ||||
| 
 | ||||
| 
 | ||||
| async def backfill_bars( | ||||
| 
 | ||||
|     sym: str, | ||||
|     shm: ShmArray,  # type: ignore # noqa | ||||
|     count: int = 10,  # NOTE: any more and we'll overrun the underlying buffer | ||||
|     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Fill historical bars into shared mem / storage afap. | ||||
|     ''' | ||||
|     with trio.CancelScope() as cs: | ||||
|         async with open_cached_client('kraken') as client: | ||||
|             bars = await client.bars(symbol=sym) | ||||
|             shm.push(bars) | ||||
|             task_status.started(cs) | ||||
| 
 | ||||
| 
 | ||||
| async def stream_quotes( | ||||
| 
 | ||||
|     send_chan: trio.abc.SendChannel, | ||||
|     symbols: list[str], | ||||
|     feed_is_live: trio.Event, | ||||
|     loglevel: str = None, | ||||
| 
 | ||||
|     # backend specific | ||||
|     sub_type: str = 'ohlc', | ||||
| 
 | ||||
|     # startup sync | ||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Subscribe for ohlc stream of quotes for ``pairs``. | ||||
| 
 | ||||
|     ``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>. | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging | ||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) | ||||
| 
 | ||||
|     ws_pairs = {} | ||||
|     sym_infos = {} | ||||
| 
 | ||||
|     async with open_cached_client('kraken') as client, send_chan as send_chan: | ||||
| 
 | ||||
|         # keep client cached for real-time section | ||||
|         for sym in symbols: | ||||
| 
 | ||||
|             # transform to upper since piker style is always lower | ||||
|             sym = sym.upper() | ||||
| 
 | ||||
|             si = Pair(**await client.symbol_info(sym))  # validation | ||||
|             syminfo = si.to_dict() | ||||
|             syminfo['price_tick_size'] = 1 / 10**si.pair_decimals | ||||
|             syminfo['lot_tick_size'] = 1 / 10**si.lot_decimals | ||||
|             syminfo['asset_type'] = 'crypto' | ||||
|             sym_infos[sym] = syminfo | ||||
|             ws_pairs[sym] = si.wsname | ||||
| 
 | ||||
|         symbol = symbols[0].lower() | ||||
| 
 | ||||
|         init_msgs = { | ||||
|             # pass back token, and bool, signalling if we're the writer | ||||
|             # and that history has been written | ||||
|             symbol: { | ||||
|                 'symbol_info': sym_infos[sym], | ||||
|                 'shm_write_opts': {'sum_tick_vml': False}, | ||||
|                 'fqsn': sym, | ||||
|             }, | ||||
|         } | ||||
| 
 | ||||
|         @acm | ||||
|         async def subscribe(ws: NoBsWs): | ||||
| 
 | ||||
|             # XXX: setup subs | ||||
|             # https://docs.kraken.com/websockets/#message-subscribe | ||||
|             # specific logic for this in kraken's sync client: | ||||
|             # https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188 | ||||
|             ohlc_sub = { | ||||
|                 'event': 'subscribe', | ||||
|                 'pair': list(ws_pairs.values()), | ||||
|                 'subscription': { | ||||
|                     'name': 'ohlc', | ||||
|                     'interval': 1, | ||||
|                 }, | ||||
|             } | ||||
| 
 | ||||
|             # TODO: we want to eventually allow unsubs which should | ||||
|             # be completely fine to request from a separate task | ||||
|             # since internally the ws methods appear to be FIFO | ||||
|             # locked. | ||||
|             await ws.send_msg(ohlc_sub) | ||||
| 
 | ||||
|             # trade data (aka L1) | ||||
|             l1_sub = { | ||||
|                 'event': 'subscribe', | ||||
|                 'pair': list(ws_pairs.values()), | ||||
|                 'subscription': { | ||||
|                     'name': 'spread', | ||||
|                     # 'depth': 10} | ||||
|                 }, | ||||
|             } | ||||
| 
 | ||||
|             # pull a first quote and deliver | ||||
|             await ws.send_msg(l1_sub) | ||||
| 
 | ||||
|             yield | ||||
| 
 | ||||
|             # unsub from all pairs on teardown | ||||
|             await ws.send_msg({ | ||||
|                 'pair': list(ws_pairs.values()), | ||||
|                 'event': 'unsubscribe', | ||||
|                 'subscription': ['ohlc', 'spread'], | ||||
|             }) | ||||
| 
 | ||||
|             # XXX: do we need to ack the unsub? | ||||
|             # await ws.recv_msg() | ||||
| 
 | ||||
|         # see the tips on reconnection logic: | ||||
|         # https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds | ||||
|         ws: NoBsWs | ||||
|         async with ( | ||||
|             open_autorecon_ws( | ||||
|                 'wss://ws.kraken.com/', | ||||
|                 fixture=subscribe, | ||||
|             ) as ws, | ||||
|             aclosing(process_data_feed_msgs(ws)) as msg_gen, | ||||
|         ): | ||||
|             # pull a first quote and deliver | ||||
|             typ, ohlc_last = await anext(msg_gen) | ||||
|             topic, quote = normalize(ohlc_last) | ||||
| 
 | ||||
|             task_status.started((init_msgs,  quote)) | ||||
| 
 | ||||
|             # lol, only "closes" when they're margin squeezing clients ;P | ||||
|             feed_is_live.set() | ||||
| 
 | ||||
|             # keep start of last interval for volume tracking | ||||
|             last_interval_start = ohlc_last.etime | ||||
| 
 | ||||
|             # start streaming | ||||
|             async for typ, ohlc in msg_gen: | ||||
| 
 | ||||
|                 if typ == 'ohlc': | ||||
| 
 | ||||
|                     # TODO: can get rid of all this by using | ||||
|                     # ``trades`` subscription... | ||||
| 
 | ||||
|                     # generate tick values to match time & sales pane: | ||||
|                     # https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m | ||||
|                     volume = ohlc.volume | ||||
| 
 | ||||
|                     # new OHLC sample interval | ||||
|                     if ohlc.etime > last_interval_start: | ||||
|                         last_interval_start = ohlc.etime | ||||
|                         tick_volume = volume | ||||
| 
 | ||||
|                     else: | ||||
|                         # this is the tick volume *within the interval* | ||||
|                         tick_volume = volume - ohlc_last.volume | ||||
| 
 | ||||
|                     ohlc_last = ohlc | ||||
|                     last = ohlc.close | ||||
| 
 | ||||
|                     if tick_volume: | ||||
|                         ohlc.ticks.append({ | ||||
|                             'type': 'trade', | ||||
|                             'price': last, | ||||
|                             'size': tick_volume, | ||||
|                         }) | ||||
| 
 | ||||
|                     topic, quote = normalize(ohlc) | ||||
| 
 | ||||
|                 elif typ == 'l1': | ||||
|                     quote = ohlc | ||||
|                     topic = quote['symbol'].lower() | ||||
| 
 | ||||
|                 await send_chan.send({topic: quote}) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_symbol_search( | ||||
|     ctx: tractor.Context, | ||||
| 
 | ||||
| ) -> Client: | ||||
|     async with open_cached_client('kraken') as client: | ||||
| 
 | ||||
|         # load all symbols locally for fast search | ||||
|         cache = await client.cache_symbols() | ||||
|         await ctx.started(cache) | ||||
| 
 | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             async for pattern in stream: | ||||
| 
 | ||||
|                 matches = fuzzy.extractBests( | ||||
|                     pattern, | ||||
|                     cache, | ||||
|                     score_cutoff=50, | ||||
|                 ) | ||||
|                 # repack in dict form | ||||
|                 await stream.send( | ||||
|                     {item[0]['altname']: item[0] | ||||
|                      for item in matches} | ||||
|                 ) | ||||
|  | @ -22,10 +22,54 @@ from enum import Enum | |||
| from typing import Optional | ||||
| 
 | ||||
| from bidict import bidict | ||||
| from pydantic import BaseModel, validator | ||||
| 
 | ||||
| from ..data._source import Symbol | ||||
| from ..data.types import Struct | ||||
| from ..pp import Position | ||||
| from ._messages import BrokerdPosition, Status | ||||
| 
 | ||||
| 
 | ||||
| class Position(BaseModel): | ||||
|     ''' | ||||
|     Basic pp (personal position) model with attached fills history. | ||||
| 
 | ||||
|     This type should be IPC wire ready? | ||||
| 
 | ||||
|     ''' | ||||
|     symbol: Symbol | ||||
| 
 | ||||
|     # last size and avg entry price | ||||
|     size: float | ||||
|     avg_price: float  # TODO: contextual pricing | ||||
| 
 | ||||
|     # ordered record of known constituent trade messages | ||||
|     fills: list[Status] = [] | ||||
| 
 | ||||
|     def update_from_msg( | ||||
|         self, | ||||
|         msg: BrokerdPosition, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         # XXX: better place to do this? | ||||
|         symbol = self.symbol | ||||
| 
 | ||||
|         lot_size_digits = symbol.lot_size_digits | ||||
|         avg_price, size = ( | ||||
|             round(msg['avg_price'], ndigits=symbol.tick_size_digits), | ||||
|             round(msg['size'], ndigits=lot_size_digits), | ||||
|         ) | ||||
| 
 | ||||
|         self.avg_price = avg_price | ||||
|         self.size = size | ||||
| 
 | ||||
|     @property | ||||
|     def dsize(self) -> float: | ||||
|         ''' | ||||
|         The "dollar" size of the pp, normally in trading (fiat) unit | ||||
|         terms. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.avg_price * self.size | ||||
| 
 | ||||
| 
 | ||||
| _size_units = bidict({ | ||||
|  | @ -40,30 +84,33 @@ SizeUnit = Enum( | |||
| ) | ||||
| 
 | ||||
| 
 | ||||
| class Allocator(Struct): | ||||
| class Allocator(BaseModel): | ||||
| 
 | ||||
|     class Config: | ||||
|         validate_assignment = True | ||||
|         copy_on_model_validation = False | ||||
|         arbitrary_types_allowed = True | ||||
| 
 | ||||
|         # required to get the account validator lookup working? | ||||
|         extra = 'allow' | ||||
|         underscore_attrs_are_private = False | ||||
| 
 | ||||
|     symbol: Symbol | ||||
|     account: Optional[str] = 'paper' | ||||
| 
 | ||||
|     _size_units: bidict[str, Optional[str]] = _size_units | ||||
| 
 | ||||
|     # TODO: for enums this clearly doesn't fucking work, you can't set | ||||
|     # a default at startup by passing in a `dict` but yet you can set | ||||
|     # that value through assignment..for wtv cucked reason.. honestly, pure | ||||
|     # unintuitive garbage. | ||||
|     _size_unit: str = 'currency' | ||||
|     size_unit: str = 'currency' | ||||
|     _size_units: dict[str, Optional[str]] = _size_units | ||||
| 
 | ||||
|     @property | ||||
|     def size_unit(self) -> str: | ||||
|         return self._size_unit | ||||
| 
 | ||||
|     @size_unit.setter | ||||
|     def size_unit(self, v: str) -> Optional[str]: | ||||
|     @validator('size_unit', pre=True) | ||||
|     def maybe_lookup_key(cls, v): | ||||
|         # apply the corresponding enum key for the text "description" value | ||||
|         if v not in _size_units: | ||||
|             v = _size_units.inverse[v] | ||||
|             return _size_units.inverse[v] | ||||
| 
 | ||||
|         assert v in _size_units | ||||
|         self._size_unit = v | ||||
|         return v | ||||
| 
 | ||||
|     # TODO: if we ever want ot support non-uniform entry-slot-proportion | ||||
|  | @ -96,7 +143,7 @@ class Allocator(Struct): | |||
|     def next_order_info( | ||||
|         self, | ||||
| 
 | ||||
|         # we only need a startup size for exit calcs, we can then | ||||
|         # we only need a startup size for exit calcs, we can the | ||||
|         # determine how large slots should be if the initial pp size was | ||||
|         # larger then the current live one, and the live one is smaller | ||||
|         # then the initial config settings. | ||||
|  | @ -126,7 +173,7 @@ class Allocator(Struct): | |||
|             l_sub_pp = self.units_limit - abs_live_size | ||||
| 
 | ||||
|         elif size_unit == 'currency': | ||||
|             live_cost_basis = abs_live_size * live_pp.ppu | ||||
|             live_cost_basis = abs_live_size * live_pp.avg_price | ||||
|             slot_size = currency_per_slot / price | ||||
|             l_sub_pp = (self.currency_limit - live_cost_basis) / price | ||||
| 
 | ||||
|  | @ -137,14 +184,12 @@ class Allocator(Struct): | |||
| 
 | ||||
|         # an entry (adding-to or starting a pp) | ||||
|         if ( | ||||
|             action == 'buy' and live_size > 0 or | ||||
|             action == 'sell' and live_size < 0 or | ||||
|             live_size == 0 | ||||
|             or (action == 'buy' and live_size > 0) | ||||
|             or action == 'sell' and live_size < 0 | ||||
|         ): | ||||
|             order_size = min( | ||||
|                 slot_size, | ||||
|                 max(l_sub_pp, 0), | ||||
|             ) | ||||
| 
 | ||||
|             order_size = min(slot_size, l_sub_pp) | ||||
| 
 | ||||
|         # an exit (removing-from or going to net-zero pp) | ||||
|         else: | ||||
|  | @ -160,7 +205,7 @@ class Allocator(Struct): | |||
|             if size_unit == 'currency': | ||||
|                 # compute the "projected" limit's worth of units at the | ||||
|                 # current pp (weighted) price: | ||||
|                 slot_size = currency_per_slot / live_pp.ppu | ||||
|                 slot_size = currency_per_slot / live_pp.avg_price | ||||
| 
 | ||||
|             else: | ||||
|                 slot_size = u_per_slot | ||||
|  | @ -199,12 +244,7 @@ class Allocator(Struct): | |||
|         if order_size < slot_size: | ||||
|             # compute a fractional slots size to display | ||||
|             slots_used = self.slots_used( | ||||
|                 Position( | ||||
|                     symbol=sym, | ||||
|                     size=order_size, | ||||
|                     ppu=price, | ||||
|                     bsuid=sym, | ||||
|                 ) | ||||
|                 Position(symbol=sym, size=order_size, avg_price=price) | ||||
|             ) | ||||
| 
 | ||||
|         return { | ||||
|  | @ -231,8 +271,8 @@ class Allocator(Struct): | |||
|         abs_pp_size = abs(pp.size) | ||||
| 
 | ||||
|         if self.size_unit == 'currency': | ||||
|             # live_currency_size = size or (abs_pp_size * pp.ppu) | ||||
|             live_currency_size = abs_pp_size * pp.ppu | ||||
|             # live_currency_size = size or (abs_pp_size * pp.avg_price) | ||||
|             live_currency_size = abs_pp_size * pp.avg_price | ||||
|             prop = live_currency_size / self.currency_limit | ||||
| 
 | ||||
|         else: | ||||
|  | @ -244,6 +284,14 @@ class Allocator(Struct): | |||
|         return round(prop * self.slots) | ||||
| 
 | ||||
| 
 | ||||
| _derivs = ( | ||||
|     'future', | ||||
|     'continuous_future', | ||||
|     'option', | ||||
|     'futures_option', | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def mk_allocator( | ||||
| 
 | ||||
|     symbol: Symbol, | ||||
|  | @ -252,7 +300,7 @@ def mk_allocator( | |||
|     # default allocation settings | ||||
|     defaults: dict[str, float] = { | ||||
|         'account': None,  # select paper by default | ||||
|         # 'size_unit': 'currency', | ||||
|         'size_unit': 'currency', | ||||
|         'units_limit': 400, | ||||
|         'currency_limit': 5e3, | ||||
|         'slots': 4, | ||||
|  | @ -270,9 +318,42 @@ def mk_allocator( | |||
|         'currency_limit': 6e3, | ||||
|         'slots': 6, | ||||
|     } | ||||
| 
 | ||||
|     defaults.update(user_def) | ||||
| 
 | ||||
|     return Allocator( | ||||
|     alloc = Allocator( | ||||
|         symbol=symbol, | ||||
|         **defaults, | ||||
|     ) | ||||
| 
 | ||||
|     asset_type = symbol.type_key | ||||
| 
 | ||||
|     # specific configs by asset class / type | ||||
| 
 | ||||
|     if asset_type in _derivs: | ||||
|         # since it's harder to know how currency "applies" in this case | ||||
|         # given leverage properties | ||||
|         alloc.size_unit = '# units' | ||||
| 
 | ||||
|         # set units limit to slots size thus making make the next | ||||
|         # entry step 1.0 | ||||
|         alloc.units_limit = alloc.slots | ||||
| 
 | ||||
|     # if the current position is already greater then the limit | ||||
|     # settings, increase the limit to the current position | ||||
|     if alloc.size_unit == 'currency': | ||||
|         startup_size = startup_pp.size * startup_pp.avg_price | ||||
| 
 | ||||
|         if startup_size > alloc.currency_limit: | ||||
|             alloc.currency_limit = round(startup_size, ndigits=2) | ||||
| 
 | ||||
|     else: | ||||
|         startup_size = abs(startup_pp.size) | ||||
| 
 | ||||
|         if startup_size > alloc.units_limit: | ||||
|             alloc.units_limit = startup_size | ||||
| 
 | ||||
|             if asset_type in _derivs: | ||||
|                 alloc.slots = alloc.units_limit | ||||
| 
 | ||||
|     return alloc | ||||
|  |  | |||
|  | @ -31,7 +31,6 @@ from ..log import get_logger | |||
| from ._ems import _emsd_main | ||||
| from .._daemon import maybe_open_emsd | ||||
| from ._messages import Order, Cancel | ||||
| from ..brokers import get_brokermod | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  | @ -59,11 +58,11 @@ class OrderBook: | |||
| 
 | ||||
|     def send( | ||||
|         self, | ||||
|         msg: Order | dict, | ||||
|         msg: Order, | ||||
| 
 | ||||
|     ) -> dict: | ||||
|         self._sent_orders[msg.oid] = msg | ||||
|         self._to_ems.send_nowait(msg) | ||||
|         self._to_ems.send_nowait(msg.dict()) | ||||
|         return msg | ||||
| 
 | ||||
|     def update( | ||||
|  | @ -74,8 +73,9 @@ class OrderBook: | |||
| 
 | ||||
|     ) -> dict: | ||||
|         cmd = self._sent_orders[uuid] | ||||
|         msg = cmd.copy(update=data) | ||||
|         self._sent_orders[uuid] = msg | ||||
|         msg = cmd.dict() | ||||
|         msg.update(data) | ||||
|         self._sent_orders[uuid] = Order(**msg) | ||||
|         self._to_ems.send_nowait(msg) | ||||
|         return cmd | ||||
| 
 | ||||
|  | @ -83,18 +83,12 @@ class OrderBook: | |||
|         """Cancel an order (or alert) in the EMS. | ||||
| 
 | ||||
|         """ | ||||
|         cmd = self._sent_orders.get(uuid) | ||||
|         if not cmd: | ||||
|             log.error( | ||||
|                 f'Unknown order {uuid}!?\n' | ||||
|                 f'Maybe there is a stale entry or line?\n' | ||||
|                 f'You should report this as a bug!' | ||||
|             ) | ||||
|         cmd = self._sent_orders[uuid] | ||||
|         msg = Cancel( | ||||
|             oid=uuid, | ||||
|             symbol=cmd.symbol, | ||||
|         ) | ||||
|         self._to_ems.send_nowait(msg) | ||||
|         self._to_ems.send_nowait(msg.dict()) | ||||
| 
 | ||||
| 
 | ||||
| _orders: OrderBook = None | ||||
|  | @ -155,17 +149,10 @@ async def relay_order_cmds_from_sync_code( | |||
|     book = get_orders() | ||||
|     async with book._from_order_book.subscribe() as orders_stream: | ||||
|         async for cmd in orders_stream: | ||||
|             sym = cmd.symbol | ||||
|             msg = pformat(cmd) | ||||
|             if sym == symbol_key: | ||||
|                 log.info(f'Send order cmd:\n{msg}') | ||||
|             if cmd['symbol'] == symbol_key: | ||||
|                 log.info(f'Send order cmd:\n{pformat(cmd)}') | ||||
|                 # send msg over IPC / wire | ||||
|                 await to_ems_stream.send(cmd) | ||||
|             else: | ||||
|                 log.warning( | ||||
|                     f'Ignoring unmatched order cmd for {sym} != {symbol_key}:' | ||||
|                     f'\n{msg}' | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
|  | @ -217,35 +204,20 @@ async def open_ems( | |||
|     from ..data._source import unpack_fqsn | ||||
|     broker, symbol, suffix = unpack_fqsn(fqsn) | ||||
| 
 | ||||
|     mode: str = 'live' | ||||
| 
 | ||||
|     async with maybe_open_emsd(broker) as portal: | ||||
| 
 | ||||
|         mod = get_brokermod(broker) | ||||
|         if not getattr(mod, 'trades_dialogue', None): | ||||
|             mode = 'paper' | ||||
| 
 | ||||
|         async with ( | ||||
|             # connect to emsd | ||||
|             portal.open_context( | ||||
| 
 | ||||
|                 _emsd_main, | ||||
|                 fqsn=fqsn, | ||||
|                 exec_mode=mode, | ||||
| 
 | ||||
|             ) as ( | ||||
|                 ctx, | ||||
|                 ( | ||||
|                     positions, | ||||
|                     accounts, | ||||
|                     dialogs, | ||||
|                 ) | ||||
|             ), | ||||
|             ) as (ctx, (positions, accounts)), | ||||
| 
 | ||||
|             # open 2-way trade command stream | ||||
|             ctx.open_stream() as trades_stream, | ||||
|         ): | ||||
|             # start sync code order msg delivery task | ||||
|             async with trio.open_nursery() as n: | ||||
|                 n.start_soon( | ||||
|                     relay_order_cmds_from_sync_code, | ||||
|  | @ -253,10 +225,4 @@ async def open_ems( | |||
|                     trades_stream | ||||
|                 ) | ||||
| 
 | ||||
|                 yield ( | ||||
|                     book, | ||||
|                     trades_stream, | ||||
|                     positions, | ||||
|                     accounts, | ||||
|                     dialogs, | ||||
|                 ) | ||||
|                 yield book, trades_stream, positions, accounts | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -15,95 +15,22 @@ | |||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Clearing sub-system message and protocols. | ||||
| Clearing system messagingn types and protocols. | ||||
| 
 | ||||
| """ | ||||
| # from collections import ( | ||||
| #     ChainMap, | ||||
| #     deque, | ||||
| # ) | ||||
| from typing import ( | ||||
|     Optional, | ||||
|     Literal, | ||||
| ) | ||||
| from typing import Optional, Union | ||||
| 
 | ||||
| # TODO: try out just encoding/send direction for now? | ||||
| # import msgspec | ||||
| from pydantic import BaseModel | ||||
| 
 | ||||
| from ..data._source import Symbol | ||||
| from ..data.types import Struct | ||||
| 
 | ||||
| 
 | ||||
| # TODO: a composite for tracking msg flow on 2-legged | ||||
| # dialogs. | ||||
| # class Dialog(ChainMap): | ||||
| #     ''' | ||||
| #     Msg collection abstraction to easily track the state changes of | ||||
| #     a msg flow in one high level, query-able and immutable construct. | ||||
| 
 | ||||
| #     The main use case is to query data from a (long-running) | ||||
| #     msg-transaction-sequence | ||||
| 
 | ||||
| 
 | ||||
| #     ''' | ||||
| #     def update( | ||||
| #         self, | ||||
| #         msg, | ||||
| #     ) -> None: | ||||
| #         self.maps.insert(0, msg.to_dict()) | ||||
| 
 | ||||
| #     def flatten(self) -> dict: | ||||
| #         return dict(self) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: ``msgspec`` stuff worth paying attention to: | ||||
| # - schema evolution: | ||||
| # https://jcristharif.com/msgspec/usage.html#schema-evolution | ||||
| # - for eg. ``BrokerdStatus``, instead just have separate messages? | ||||
| # - use literals for a common msg determined by diff keys? | ||||
| #   - https://jcristharif.com/msgspec/usage.html#literal | ||||
| 
 | ||||
| # -------------- | ||||
| # Client -> emsd | ||||
| # -------------- | ||||
| 
 | ||||
| class Order(Struct): | ||||
| 
 | ||||
|     # TODO: ideally we can combine these 2 fields into | ||||
|     # 1 and just use the size polarity to determine a buy/sell. | ||||
|     # i would like to see this become more like | ||||
|     # https://jcristharif.com/msgspec/usage.html#literal | ||||
|     # action: Literal[ | ||||
|     #     'live', | ||||
|     #     'dark', | ||||
|     #     'alert', | ||||
|     # ] | ||||
| 
 | ||||
|     action: Literal[ | ||||
|         'buy', | ||||
|         'sell', | ||||
|         'alert', | ||||
|     ] | ||||
|     # determines whether the create execution | ||||
|     # will be submitted to the ems or directly to | ||||
|     # the backend broker | ||||
|     exec_mode: Literal[ | ||||
|         'dark', | ||||
|         'live', | ||||
|         # 'paper',  no right? | ||||
|     ] | ||||
| 
 | ||||
|     # internal ``emdsd`` unique "order id" | ||||
|     oid: str  # uuid4 | ||||
|     symbol: str | Symbol | ||||
|     account: str  # should we set a default as '' ? | ||||
| 
 | ||||
|     price: float | ||||
|     size: float  # -ve is "sell", +ve is "buy" | ||||
| 
 | ||||
|     brokers: Optional[list[str]] = [] | ||||
| 
 | ||||
| 
 | ||||
| class Cancel(Struct): | ||||
|     ''' | ||||
|     Cancel msg for removing a dark (ems triggered) or | ||||
| class Cancel(BaseModel): | ||||
|     '''Cancel msg for removing a dark (ems triggered) or | ||||
|     broker-submitted (live) trigger/order. | ||||
| 
 | ||||
|     ''' | ||||
|  | @ -112,57 +39,82 @@ class Cancel(Struct): | |||
|     symbol: str | ||||
| 
 | ||||
| 
 | ||||
| # -------------- | ||||
| class Order(BaseModel): | ||||
| 
 | ||||
|     action: str  # {'buy', 'sell', 'alert'} | ||||
|     # internal ``emdsd`` unique "order id" | ||||
|     oid: str  # uuid4 | ||||
|     symbol: Union[str, Symbol] | ||||
|     account: str  # should we set a default as '' ? | ||||
| 
 | ||||
|     price: float | ||||
|     size: float | ||||
|     brokers: list[str] | ||||
| 
 | ||||
|     # Assigned once initial ack is received | ||||
|     # ack_time_ns: Optional[int] = None | ||||
| 
 | ||||
|     # determines whether the create execution | ||||
|     # will be submitted to the ems or directly to | ||||
|     # the backend broker | ||||
|     exec_mode: str  # {'dark', 'live', 'paper'} | ||||
| 
 | ||||
|     class Config: | ||||
|         # just for pre-loading a ``Symbol`` when used | ||||
|         # in the order mode staging process | ||||
|         arbitrary_types_allowed = True | ||||
|         # don't copy this model instance when used in | ||||
|         # a recursive model | ||||
|         copy_on_model_validation = False | ||||
| 
 | ||||
| # Client <- emsd | ||||
| # -------------- | ||||
| # update msgs from ems which relay state change info | ||||
| # from the active clearing engine. | ||||
| 
 | ||||
| class Status(Struct): | ||||
| 
 | ||||
| class Status(BaseModel): | ||||
| 
 | ||||
|     name: str = 'status' | ||||
|     oid: str  # uuid4 | ||||
|     time_ns: int | ||||
|     oid: str  # uuid4 ems-order dialog id | ||||
| 
 | ||||
|     resp: Literal[ | ||||
|       'pending',  # acked by broker but not yet open | ||||
|       'open', | ||||
|       'dark_open',  # dark/algo triggered order is open in ems clearing loop | ||||
|       'triggered',  # above triggered order sent to brokerd, or an alert closed | ||||
|       'closed',  # fully cleared all size/units | ||||
|       'fill',  # partial execution | ||||
|       'canceled', | ||||
|       'error', | ||||
|     ] | ||||
|     # { | ||||
|     #   'dark_submitted', | ||||
|     #   'dark_cancelled', | ||||
|     #   'dark_triggered', | ||||
| 
 | ||||
|     #   'broker_submitted', | ||||
|     #   'broker_cancelled', | ||||
|     #   'broker_executed', | ||||
|     #   'broker_filled', | ||||
|     #   'broker_errored', | ||||
| 
 | ||||
|     #   'alert_submitted', | ||||
|     #   'alert_triggered', | ||||
| 
 | ||||
|     # } | ||||
|     resp: str  # "response", see above | ||||
| 
 | ||||
|     # symbol: str | ||||
| 
 | ||||
|     # trigger info | ||||
|     trigger_price: Optional[float] = None | ||||
|     # price: float | ||||
| 
 | ||||
|     # broker: Optional[str] = None | ||||
| 
 | ||||
|     # this maps normally to the ``BrokerdOrder.reqid`` below, an id | ||||
|     # normally allocated internally by the backend broker routing system | ||||
|     reqid: Optional[int | str] = None | ||||
|     broker_reqid: Optional[Union[int, str]] = None | ||||
| 
 | ||||
|     # the (last) source order/request msg if provided | ||||
|     # (eg. the Order/Cancel which causes this msg) and | ||||
|     # acts as a back-reference to the corresponding | ||||
|     # request message which was the source of this msg. | ||||
|     req: Optional[Order | Cancel] = None | ||||
| 
 | ||||
|     # XXX: better design/name here? | ||||
|     # flag that can be set to indicate a message for an order | ||||
|     # event that wasn't originated by piker's emsd (eg. some external | ||||
|     # trading system which does it's own order control but that you | ||||
|     # might want to "track" using piker UIs/systems). | ||||
|     src: Optional[str] = None | ||||
| 
 | ||||
|     # for relaying a boxed brokerd-dialog-side msg data "through" the | ||||
|     # ems layer to clients. | ||||
|     # for relaying backend msg data "through" the ems layer | ||||
|     brokerd_msg: dict = {} | ||||
| 
 | ||||
| 
 | ||||
| # --------------- | ||||
| # emsd -> brokerd | ||||
| # --------------- | ||||
| # requests *sent* from ems to respective backend broker daemon | ||||
| 
 | ||||
| class BrokerdCancel(Struct): | ||||
| class BrokerdCancel(BaseModel): | ||||
| 
 | ||||
|     action: str = 'cancel' | ||||
|     oid: str  # piker emsd order id | ||||
|  | @ -175,38 +127,34 @@ class BrokerdCancel(Struct): | |||
|     # for setting a unique order id then this value will be relayed back | ||||
|     # on the emsd order request stream as the ``BrokerdOrderAck.reqid`` | ||||
|     # field | ||||
|     reqid: Optional[int | str] = None | ||||
|     reqid: Optional[Union[int, str]] = None | ||||
| 
 | ||||
| 
 | ||||
| class BrokerdOrder(Struct): | ||||
| class BrokerdOrder(BaseModel): | ||||
| 
 | ||||
|     action: str  # {buy, sell} | ||||
|     oid: str | ||||
|     account: str | ||||
|     time_ns: int | ||||
| 
 | ||||
|     # TODO: if we instead rely on a +ve/-ve size to determine | ||||
|     # the action we more or less don't need this field right? | ||||
|     action: str = ''  # {buy, sell} | ||||
| 
 | ||||
|     # "broker request id": broker specific/internal order id if this is | ||||
|     # None, creates a new order otherwise if the id is valid the backend | ||||
|     # api must modify the existing matching order. If the broker allows | ||||
|     # for setting a unique order id then this value will be relayed back | ||||
|     # on the emsd order request stream as the ``BrokerdOrderAck.reqid`` | ||||
|     # field | ||||
|     reqid: Optional[int | str] = None | ||||
|     reqid: Optional[Union[int, str]] = None | ||||
| 
 | ||||
|     symbol: str  # fqsn | ||||
|     symbol: str  # symbol.<providername> ? | ||||
|     price: float | ||||
|     size: float | ||||
| 
 | ||||
| 
 | ||||
| # --------------- | ||||
| # emsd <- brokerd | ||||
| # --------------- | ||||
| # requests *received* to ems from broker backend | ||||
| 
 | ||||
| class BrokerdOrderAck(Struct): | ||||
| 
 | ||||
| class BrokerdOrderAck(BaseModel): | ||||
|     ''' | ||||
|     Immediate reponse to a brokerd order request providing the broker | ||||
|     specific unique order id so that the EMS can associate this | ||||
|  | @ -217,32 +165,39 @@ class BrokerdOrderAck(Struct): | |||
|     name: str = 'ack' | ||||
| 
 | ||||
|     # defined and provided by backend | ||||
|     reqid: int | str | ||||
|     reqid: Union[int, str] | ||||
| 
 | ||||
|     # emsd id originally sent in matching request msg | ||||
|     oid: str | ||||
|     account: str = '' | ||||
| 
 | ||||
| 
 | ||||
| class BrokerdStatus(Struct): | ||||
| class BrokerdStatus(BaseModel): | ||||
| 
 | ||||
|     name: str = 'status' | ||||
|     reqid: int | str | ||||
|     reqid: Union[int, str] | ||||
|     time_ns: int | ||||
|     status: Literal[ | ||||
|         'open', | ||||
|         'canceled', | ||||
|         'fill', | ||||
|         'pending', | ||||
|         'error', | ||||
|     ] | ||||
| 
 | ||||
|     account: str | ||||
|     # XXX: should be best effort set for every update | ||||
|     account: str = '' | ||||
| 
 | ||||
|     # { | ||||
|     #   'submitted', | ||||
|     #   'cancelled', | ||||
|     #   'filled', | ||||
|     # } | ||||
|     status: str | ||||
| 
 | ||||
|     filled: float = 0.0 | ||||
|     reason: str = '' | ||||
|     remaining: float = 0.0 | ||||
| 
 | ||||
|     # external: bool = False | ||||
|     # XXX: better design/name here? | ||||
|     # flag that can be set to indicate a message for an order | ||||
|     # event that wasn't originated by piker's emsd (eg. some external | ||||
|     # trading system which does it's own order control but that you | ||||
|     # might want to "track" using piker UIs/systems). | ||||
|     external: bool = False | ||||
| 
 | ||||
|     # XXX: not required schema as of yet | ||||
|     broker_details: dict = { | ||||
|  | @ -250,14 +205,14 @@ class BrokerdStatus(Struct): | |||
|     } | ||||
| 
 | ||||
| 
 | ||||
| class BrokerdFill(Struct): | ||||
| class BrokerdFill(BaseModel): | ||||
|     ''' | ||||
|     A single message indicating a "fill-details" event from the broker | ||||
|     if avaiable. | ||||
| 
 | ||||
|     ''' | ||||
|     name: str = 'fill' | ||||
|     reqid: int | str | ||||
|     reqid: Union[int, str] | ||||
|     time_ns: int | ||||
| 
 | ||||
|     # order exeuction related | ||||
|  | @ -275,7 +230,7 @@ class BrokerdFill(Struct): | |||
|     broker_time: float | ||||
| 
 | ||||
| 
 | ||||
| class BrokerdError(Struct): | ||||
| class BrokerdError(BaseModel): | ||||
|     ''' | ||||
|     Optional error type that can be relayed to emsd for error handling. | ||||
| 
 | ||||
|  | @ -287,14 +242,14 @@ class BrokerdError(Struct): | |||
| 
 | ||||
|     # if no brokerd order request was actually submitted (eg. we errored | ||||
|     # at the ``pikerd`` layer) then there will be ``reqid`` allocated. | ||||
|     reqid: Optional[int | str] = None | ||||
|     reqid: Optional[Union[int, str]] = None | ||||
| 
 | ||||
|     symbol: str | ||||
|     reason: str | ||||
|     broker_details: dict = {} | ||||
| 
 | ||||
| 
 | ||||
| class BrokerdPosition(Struct): | ||||
| class BrokerdPosition(BaseModel): | ||||
|     '''Position update event from brokerd. | ||||
| 
 | ||||
|     ''' | ||||
|  | @ -303,6 +258,6 @@ class BrokerdPosition(Struct): | |||
|     broker: str | ||||
|     account: str | ||||
|     symbol: str | ||||
|     currency: str | ||||
|     size: float | ||||
|     avg_price: float | ||||
|     currency: str = '' | ||||
|  |  | |||
|  | @ -18,49 +18,33 @@ | |||
| Fake trading for forward testing. | ||||
| 
 | ||||
| """ | ||||
| from collections import defaultdict | ||||
| from contextlib import asynccontextmanager | ||||
| from datetime import datetime | ||||
| from operator import itemgetter | ||||
| import itertools | ||||
| import time | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Callable, | ||||
| ) | ||||
| from typing import Tuple, Optional, Callable | ||||
| import uuid | ||||
| 
 | ||||
| from bidict import bidict | ||||
| import pendulum | ||||
| import trio | ||||
| import tractor | ||||
| from dataclasses import dataclass | ||||
| 
 | ||||
| from .. import data | ||||
| from ..data._source import Symbol | ||||
| from ..data.types import Struct | ||||
| from ..pp import ( | ||||
|     Position, | ||||
|     Transaction, | ||||
| ) | ||||
| from ..data._normalize import iterticks | ||||
| from ..data._source import unpack_fqsn | ||||
| from ..log import get_logger | ||||
| from ._messages import ( | ||||
|     BrokerdCancel, | ||||
|     BrokerdOrder, | ||||
|     BrokerdOrderAck, | ||||
|     BrokerdStatus, | ||||
|     BrokerdFill, | ||||
|     BrokerdPosition, | ||||
|     BrokerdError, | ||||
|     BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus, | ||||
|     BrokerdFill, BrokerdPosition, BrokerdError | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class PaperBoi(Struct): | ||||
| @dataclass | ||||
| class PaperBoi: | ||||
|     """ | ||||
|     Emulates a broker order client providing the same API and | ||||
|     delivering an order-event response stream but with methods for | ||||
|  | @ -74,15 +58,14 @@ class PaperBoi(Struct): | |||
| 
 | ||||
|     # map of paper "live" orders which be used | ||||
|     # to simulate fills based on paper engine settings | ||||
|     _buys: defaultdict[str, bidict] | ||||
|     _sells: defaultdict[str, bidict] | ||||
|     _buys: bidict | ||||
|     _sells: bidict | ||||
|     _reqids: bidict | ||||
|     _positions: dict[str, Position] | ||||
|     _trade_ledger: dict[str, Any] | ||||
|     _positions: dict[str, BrokerdPosition] | ||||
| 
 | ||||
|     # init edge case L1 spread | ||||
|     last_ask: tuple[float, float] = (float('inf'), 0)  # price, size | ||||
|     last_bid: tuple[float, float] = (0, 0) | ||||
|     last_ask: Tuple[float, float] = (float('inf'), 0)  # price, size | ||||
|     last_bid: Tuple[float, float] = (0, 0) | ||||
| 
 | ||||
|     async def submit_limit( | ||||
|         self, | ||||
|  | @ -92,24 +75,27 @@ class PaperBoi(Struct): | |||
|         action: str, | ||||
|         size: float, | ||||
|         reqid: Optional[str], | ||||
| 
 | ||||
|     ) -> int: | ||||
|         ''' | ||||
|         Place an order and return integer request id provided by client. | ||||
|         """Place an order and return integer request id provided by client. | ||||
| 
 | ||||
|         """ | ||||
|         is_modify: bool = False | ||||
|         if reqid is None: | ||||
|             reqid = str(uuid.uuid4()) | ||||
| 
 | ||||
|         else: | ||||
|             # order is already existing, this is a modify | ||||
|             (oid, symbol, action, old_price) = self._reqids[reqid] | ||||
|             assert old_price != price | ||||
|             is_modify = True | ||||
| 
 | ||||
|         # register order internally | ||||
|         self._reqids[reqid] = (oid, symbol, action, price) | ||||
| 
 | ||||
|         ''' | ||||
|         if action == 'alert': | ||||
|             # bypass all fill simulation | ||||
|             return reqid | ||||
| 
 | ||||
|         entry = self._reqids.get(reqid) | ||||
|         if entry: | ||||
|             # order is already existing, this is a modify | ||||
|             (oid, symbol, action, old_price) = entry | ||||
|         else: | ||||
|             # register order internally | ||||
|             self._reqids[reqid] = (oid, symbol, action, price) | ||||
| 
 | ||||
|         # TODO: net latency model | ||||
|         # we checkpoint here quickly particulalry | ||||
|         # for dark orders since we want the dark_executed | ||||
|  | @ -121,18 +107,15 @@ class PaperBoi(Struct): | |||
|             size = -size | ||||
| 
 | ||||
|         msg = BrokerdStatus( | ||||
|             status='open', | ||||
|             # account=f'paper_{self.broker}', | ||||
|             account='paper', | ||||
|             status='submitted', | ||||
|             reqid=reqid, | ||||
|             broker=self.broker, | ||||
|             time_ns=time.time_ns(), | ||||
|             filled=0.0, | ||||
|             reason='paper_trigger', | ||||
|             remaining=size, | ||||
| 
 | ||||
|             broker_details={'name': 'paperboi'}, | ||||
|         ) | ||||
|         await self.ems_trades_stream.send(msg) | ||||
|         await self.ems_trades_stream.send(msg.dict()) | ||||
| 
 | ||||
|         # if we're already a clearing price simulate an immediate fill | ||||
|         if ( | ||||
|  | @ -140,28 +123,28 @@ class PaperBoi(Struct): | |||
|             ) or ( | ||||
|             action == 'sell' and (clear_price := self.last_bid[0]) >= price | ||||
|         ): | ||||
|             await self.fake_fill( | ||||
|                 symbol, | ||||
|                 clear_price, | ||||
|                 size, | ||||
|                 action, | ||||
|                 reqid, | ||||
|                 oid, | ||||
|             ) | ||||
|             await self.fake_fill(symbol, clear_price, size, action, reqid, oid) | ||||
| 
 | ||||
|         # register this submissions as a paper live order | ||||
|         else: | ||||
|             # set the simulated order in the respective table for lookup | ||||
|             # and trigger by the simulated clearing task normally | ||||
|             # running ``simulate_fills()``. | ||||
|             # register this submissions as a paper live order | ||||
| 
 | ||||
|             # submit order to book simulation fill loop | ||||
|             if action == 'buy': | ||||
|                 orders = self._buys | ||||
| 
 | ||||
|             elif action == 'sell': | ||||
|                 orders = self._sells | ||||
| 
 | ||||
|             # {symbol -> bidict[oid, (<price data>)]} | ||||
|             orders[symbol][oid] = (price, size, reqid, action) | ||||
|             # set the simulated order in the respective table for lookup | ||||
|             # and trigger by the simulated clearing task normally | ||||
|             # running ``simulate_fills()``. | ||||
| 
 | ||||
|             if is_modify: | ||||
|                 # remove any existing order for the old price | ||||
|                 orders[symbol].pop((oid, old_price)) | ||||
| 
 | ||||
|             # buys/sells: (symbol  -> (price -> order)) | ||||
|             orders.setdefault(symbol, {})[(oid, price)] = (size, reqid, action) | ||||
| 
 | ||||
|         return reqid | ||||
| 
 | ||||
|  | @ -174,26 +157,26 @@ class PaperBoi(Struct): | |||
|         oid, symbol, action, price = self._reqids[reqid] | ||||
| 
 | ||||
|         if action == 'buy': | ||||
|             self._buys[symbol].pop(oid, None) | ||||
|             self._buys[symbol].pop((oid, price)) | ||||
|         elif action == 'sell': | ||||
|             self._sells[symbol].pop(oid, None) | ||||
|             self._sells[symbol].pop((oid, price)) | ||||
| 
 | ||||
|         # TODO: net latency model | ||||
|         await trio.sleep(0.05) | ||||
| 
 | ||||
|         msg = BrokerdStatus( | ||||
|             status='canceled', | ||||
|             account='paper', | ||||
|             status='cancelled', | ||||
|             oid=oid, | ||||
|             reqid=reqid, | ||||
|             broker=self.broker, | ||||
|             time_ns=time.time_ns(), | ||||
|             broker_details={'name': 'paperboi'}, | ||||
|         ) | ||||
|         await self.ems_trades_stream.send(msg) | ||||
|         await self.ems_trades_stream.send(msg.dict()) | ||||
| 
 | ||||
|     async def fake_fill( | ||||
|         self, | ||||
| 
 | ||||
|         fqsn: str, | ||||
|         symbol: str, | ||||
|         price: float, | ||||
|         size: float, | ||||
|         action: str,  # one of {'buy', 'sell'} | ||||
|  | @ -212,15 +195,16 @@ class PaperBoi(Struct): | |||
|         """ | ||||
|         # TODO: net latency model | ||||
|         await trio.sleep(0.05) | ||||
|         fill_time_ns = time.time_ns() | ||||
|         fill_time_s = time.time() | ||||
| 
 | ||||
|         fill_msg = BrokerdFill( | ||||
|         msg = BrokerdFill( | ||||
| 
 | ||||
|             reqid=reqid, | ||||
|             time_ns=fill_time_ns, | ||||
|             time_ns=time.time_ns(), | ||||
| 
 | ||||
|             action=action, | ||||
|             size=size, | ||||
|             price=price, | ||||
| 
 | ||||
|             broker_time=datetime.now().timestamp(), | ||||
|             broker_details={ | ||||
|                 'paper_info': { | ||||
|  | @ -230,66 +214,79 @@ class PaperBoi(Struct): | |||
|                 'name': self.broker + '_paper', | ||||
|             }, | ||||
|         ) | ||||
|         await self.ems_trades_stream.send(fill_msg) | ||||
| 
 | ||||
|         self._trade_ledger.update(fill_msg.to_dict()) | ||||
|         await self.ems_trades_stream.send(msg.dict()) | ||||
| 
 | ||||
|         if order_complete: | ||||
| 
 | ||||
|             msg = BrokerdStatus( | ||||
| 
 | ||||
|                 reqid=reqid, | ||||
|                 time_ns=time.time_ns(), | ||||
|                 # account=f'paper_{self.broker}', | ||||
|                 account='paper', | ||||
|                 status='closed', | ||||
| 
 | ||||
|                 status='filled', | ||||
|                 filled=size, | ||||
|                 remaining=0 if order_complete else remaining, | ||||
|             ) | ||||
|             await self.ems_trades_stream.send(msg) | ||||
| 
 | ||||
|         # lookup any existing position | ||||
|         key = fqsn.rstrip(f'.{self.broker}') | ||||
|         pp = self._positions.setdefault( | ||||
|             fqsn, | ||||
|             Position( | ||||
|                 Symbol( | ||||
|                     key=key, | ||||
|                     broker_info={self.broker: {}}, | ||||
|                 ), | ||||
|                 size=size, | ||||
|                 ppu=price, | ||||
|                 bsuid=key, | ||||
|             ) | ||||
|         ) | ||||
|         t = Transaction( | ||||
|             fqsn=fqsn, | ||||
|             tid=oid, | ||||
|                 action=action, | ||||
|                 size=size, | ||||
|                 price=price, | ||||
|             cost=0,  # TODO: cost model | ||||
|             dt=pendulum.from_timestamp(fill_time_s), | ||||
|             bsuid=key, | ||||
|         ) | ||||
|         pp.add_clear(t) | ||||
| 
 | ||||
|         pp_msg = BrokerdPosition( | ||||
|                 broker_details={ | ||||
|                     'paper_info': { | ||||
|                         'oid': oid, | ||||
|                     }, | ||||
|                     'name': self.broker, | ||||
|                 }, | ||||
|             ) | ||||
|             await self.ems_trades_stream.send(msg.dict()) | ||||
| 
 | ||||
|         # lookup any existing position | ||||
|         token = f'{symbol}.{self.broker}' | ||||
|         pp_msg = self._positions.setdefault( | ||||
|             token, | ||||
|             BrokerdPosition( | ||||
|                 broker=self.broker, | ||||
|                 account='paper', | ||||
|             symbol=fqsn, | ||||
|                 symbol=symbol, | ||||
|                 # TODO: we need to look up the asset currency from | ||||
|                 # broker info. i guess for crypto this can be | ||||
|                 # inferred from the pair? | ||||
|                 currency='', | ||||
|             size=pp.size, | ||||
|             avg_price=pp.ppu, | ||||
|                 size=0.0, | ||||
|                 avg_price=0, | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         await self.ems_trades_stream.send(pp_msg) | ||||
|         # "avg position price" calcs | ||||
|         # TODO: eventually it'd be nice to have a small set of routines | ||||
|         # to do this stuff from a sequence of cleared orders to enable | ||||
|         # so called "contextual positions". | ||||
|         new_size = size + pp_msg.size | ||||
| 
 | ||||
|         # old size minus the new size gives us size differential with | ||||
|         # +ve -> increase in pp size | ||||
|         # -ve -> decrease in pp size | ||||
|         size_diff = abs(new_size) - abs(pp_msg.size) | ||||
| 
 | ||||
|         if new_size == 0: | ||||
|             pp_msg.avg_price = 0 | ||||
| 
 | ||||
|         elif size_diff > 0: | ||||
|             # only update the "average position price" when the position | ||||
|             # size increases not when it decreases (i.e. the position is | ||||
|             # being made smaller) | ||||
|             pp_msg.avg_price = ( | ||||
|                 abs(size) * price + pp_msg.avg_price * abs(pp_msg.size) | ||||
|             ) / abs(new_size) | ||||
| 
 | ||||
|         pp_msg.size = new_size | ||||
| 
 | ||||
|         await self.ems_trades_stream.send(pp_msg.dict()) | ||||
| 
 | ||||
| 
 | ||||
| async def simulate_fills( | ||||
|     quote_stream: tractor.MsgStream,  # noqa | ||||
|     quote_stream: 'tractor.ReceiveStream',  # noqa | ||||
|     client: PaperBoi, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # TODO: more machinery to better simulate real-world market things: | ||||
|  | @ -309,103 +306,61 @@ async def simulate_fills( | |||
| 
 | ||||
|     # this stream may eventually contain multiple symbols | ||||
|     async for quotes in quote_stream: | ||||
| 
 | ||||
|         for sym, quote in quotes.items(): | ||||
| 
 | ||||
|             for tick in iterticks( | ||||
|                 quote, | ||||
|                 # dark order price filter(s) | ||||
|                 types=('ask', 'bid', 'trade', 'last') | ||||
|             ): | ||||
|                 tick_price = tick['price'] | ||||
|                 # print(tick) | ||||
|                 tick_price = tick.get('price') | ||||
|                 ttype = tick['type'] | ||||
| 
 | ||||
|                 buys: bidict[str, tuple] = client._buys[sym] | ||||
|                 iter_buys = reversed(sorted( | ||||
|                     buys.values(), | ||||
|                     key=itemgetter(0), | ||||
|                 )) | ||||
|                 if ttype in ('ask',): | ||||
| 
 | ||||
|                 def buy_on_ask(our_price): | ||||
|                     return tick_price <= our_price | ||||
| 
 | ||||
|                 sells: bidict[str, tuple] = client._sells[sym] | ||||
|                 iter_sells = sorted( | ||||
|                     sells.values(), | ||||
|                     key=itemgetter(0) | ||||
|                 ) | ||||
| 
 | ||||
|                 def sell_on_bid(our_price): | ||||
|                     return tick_price >= our_price | ||||
| 
 | ||||
|                 match tick: | ||||
|                     case { | ||||
|                         'price': tick_price, | ||||
|                         # 'type': ('ask' | 'trade' | 'last'), | ||||
|                         'type': 'ask', | ||||
|                     }: | ||||
|                     client.last_ask = ( | ||||
|                         tick_price, | ||||
|                         tick.get('size', client.last_ask[1]), | ||||
|                     ) | ||||
| 
 | ||||
|                         iter_entries = zip( | ||||
|                             iter_buys, | ||||
|                             itertools.repeat(buy_on_ask) | ||||
|                         ) | ||||
|                     orders = client._buys.get(sym, {}) | ||||
| 
 | ||||
|                     book_sequence = reversed( | ||||
|                         sorted(orders.keys(), key=itemgetter(1))) | ||||
| 
 | ||||
|                     def pred(our_price): | ||||
|                         return tick_price < our_price | ||||
| 
 | ||||
|                 elif ttype in ('bid',): | ||||
| 
 | ||||
|                     case { | ||||
|                         'price': tick_price, | ||||
|                         # 'type': ('bid' | 'trade' | 'last'), | ||||
|                         'type': 'bid', | ||||
|                     }: | ||||
|                     client.last_bid = ( | ||||
|                         tick_price, | ||||
|                         tick.get('size', client.last_bid[1]), | ||||
|                     ) | ||||
| 
 | ||||
|                         iter_entries = zip( | ||||
|                             iter_sells, | ||||
|                             itertools.repeat(sell_on_bid) | ||||
|                         ) | ||||
|                     orders = client._sells.get(sym, {}) | ||||
|                     book_sequence = sorted(orders.keys(), key=itemgetter(1)) | ||||
| 
 | ||||
|                     case { | ||||
|                         'price': tick_price, | ||||
|                         'type': ('trade' | 'last'), | ||||
|                     }: | ||||
|                         # in the clearing price / last price case we | ||||
|                         # want to iterate both sides of our book for | ||||
|                         # clears since we don't know which direction the | ||||
|                         # price is going to move (especially with HFT) | ||||
|                         # and thus we simply interleave both sides (buys | ||||
|                         # and sells) until one side clears and then | ||||
|                         # break until the next tick? | ||||
|                         def interleave(): | ||||
|                             for pair in zip( | ||||
|                                 iter_buys, | ||||
|                                 iter_sells, | ||||
|                             ): | ||||
|                                 for order_info, pred in zip( | ||||
|                                     pair, | ||||
|                                     itertools.cycle([buy_on_ask, sell_on_bid]), | ||||
|                                 ): | ||||
|                                     yield order_info, pred | ||||
|                     def pred(our_price): | ||||
|                         return tick_price > our_price | ||||
| 
 | ||||
|                         iter_entries = interleave() | ||||
|                 elif ttype in ('trade', 'last'): | ||||
|                     # TODO: simulate actual book queues and our orders | ||||
|                     # place in it, might require full L2 data? | ||||
|                     continue | ||||
| 
 | ||||
|                 # iterate all potentially clearable book prices | ||||
|                 # in FIFO order per side. | ||||
|                 for order_info, pred in iter_entries: | ||||
|                     (our_price, size, reqid, action) = order_info | ||||
|                 # iterate book prices descending | ||||
|                 for oid, our_price in book_sequence: | ||||
|                     if pred(our_price): | ||||
| 
 | ||||
|                     clearable = pred(our_price) | ||||
|                     if clearable: | ||||
|                         # pop and retreive order info | ||||
|                         oid = { | ||||
|                             'buy': buys, | ||||
|                             'sell': sells | ||||
|                         }[action].inverse.pop(order_info) | ||||
|                         # retreive order info | ||||
|                         (size, reqid, action) = orders.pop((oid, our_price)) | ||||
| 
 | ||||
|                         # clearing price would have filled entirely | ||||
|                         await client.fake_fill( | ||||
|                             fqsn=sym, | ||||
|                             symbol=sym, | ||||
|                             # todo slippage to determine fill price | ||||
|                             price=tick_price, | ||||
|                             size=size, | ||||
|  | @ -413,6 +368,9 @@ async def simulate_fills( | |||
|                             reqid=reqid, | ||||
|                             oid=oid, | ||||
|                         ) | ||||
|                     else: | ||||
|                         # prices are iterated in sorted order so we're done | ||||
|                         break | ||||
| 
 | ||||
| 
 | ||||
| async def handle_order_requests( | ||||
|  | @ -422,83 +380,68 @@ async def handle_order_requests( | |||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     request_msg: dict | ||||
|     # order_request: dict | ||||
|     async for request_msg in ems_order_stream: | ||||
|         match request_msg: | ||||
|             case {'action': ('buy' | 'sell')}: | ||||
|                 order = BrokerdOrder(**request_msg) | ||||
|                 account = order.account | ||||
| 
 | ||||
|                 # error on bad inputs | ||||
|                 reason = None | ||||
|         action = request_msg['action'] | ||||
| 
 | ||||
|         if action in {'buy', 'sell'}: | ||||
| 
 | ||||
|             account = request_msg['account'] | ||||
|             if account != 'paper': | ||||
|                     reason = f'No account found:`{account}` (paper only)?' | ||||
| 
 | ||||
|                 elif order.size == 0: | ||||
|                     reason = 'Invalid size: 0' | ||||
| 
 | ||||
|                 if reason: | ||||
|                     log.error(reason) | ||||
|                 log.error( | ||||
|                     'This is a paper account, only a `paper` selection is valid' | ||||
|                 ) | ||||
|                 await ems_order_stream.send(BrokerdError( | ||||
|                         oid=order.oid, | ||||
|                         symbol=order.symbol, | ||||
|                         reason=reason, | ||||
|                     )) | ||||
|                     oid=request_msg['oid'], | ||||
|                     symbol=request_msg['symbol'], | ||||
|                     reason=f'Paper only. No account found: `{account}` ?', | ||||
|                 ).dict()) | ||||
|                 continue | ||||
| 
 | ||||
|                 reqid = order.reqid or str(uuid.uuid4()) | ||||
| 
 | ||||
|                 # deliver ack that order has been submitted to broker routing | ||||
|                 await ems_order_stream.send( | ||||
|                     BrokerdOrderAck( | ||||
|                         oid=order.oid, | ||||
|                         reqid=reqid, | ||||
|                     ) | ||||
|                 ) | ||||
|             # validate | ||||
|             order = BrokerdOrder(**request_msg) | ||||
| 
 | ||||
|             # call our client api to submit the order | ||||
|             reqid = await client.submit_limit( | ||||
| 
 | ||||
|                 oid=order.oid, | ||||
|                     symbol=f'{order.symbol}.{client.broker}', | ||||
|                 symbol=order.symbol, | ||||
|                 price=order.price, | ||||
|                 action=order.action, | ||||
|                 size=order.size, | ||||
| 
 | ||||
|                 # XXX: by default 0 tells ``ib_insync`` methods that | ||||
|                 # there is no existing order so ask the client to create | ||||
|                 # a new one (which it seems to do by allocating an int | ||||
|                 # counter - collision prone..) | ||||
|                     reqid=reqid, | ||||
|                 reqid=order.reqid, | ||||
|             ) | ||||
| 
 | ||||
|             # elif action == 'cancel': | ||||
|             case {'action': 'cancel'}: | ||||
|             # deliver ack that order has been submitted to broker routing | ||||
|             await ems_order_stream.send( | ||||
|                 BrokerdOrderAck( | ||||
| 
 | ||||
|                     # ems order request id | ||||
|                     oid=order.oid, | ||||
| 
 | ||||
|                     # broker specific request id | ||||
|                     reqid=reqid, | ||||
| 
 | ||||
|                 ).dict() | ||||
|             ) | ||||
| 
 | ||||
|         elif action == 'cancel': | ||||
|             msg = BrokerdCancel(**request_msg) | ||||
| 
 | ||||
|             await client.submit_cancel( | ||||
|                 reqid=msg.reqid | ||||
|             ) | ||||
| 
 | ||||
|             case _: | ||||
|         else: | ||||
|             log.error(f'Unknown order command: {request_msg}') | ||||
| 
 | ||||
| 
 | ||||
| _reqids: bidict[str, tuple] = {} | ||||
| _buys: defaultdict[ | ||||
|     str,  # symbol | ||||
|     bidict[ | ||||
|         str,  # oid | ||||
|         tuple[float, float, str, str],  # order info | ||||
|     ] | ||||
| ] = defaultdict(bidict) | ||||
| _sells: defaultdict[ | ||||
|     str,  # symbol | ||||
|     bidict[ | ||||
|         str,  # oid | ||||
|         tuple[float, float, str, str],  # order info | ||||
|     ] | ||||
| ] = defaultdict(bidict) | ||||
| _positions: dict[str, Position] = {} | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def trades_dialogue( | ||||
| 
 | ||||
|  | @ -508,56 +451,39 @@ async def trades_dialogue( | |||
|     loglevel: str = None, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     tractor.log.get_console_log(loglevel) | ||||
| 
 | ||||
|     async with ( | ||||
| 
 | ||||
|         data.open_feed( | ||||
|             [fqsn], | ||||
|             loglevel=loglevel, | ||||
|         ) as feed, | ||||
| 
 | ||||
|     ): | ||||
|         pp_msgs: list[BrokerdPosition] = [] | ||||
|         pos: Position | ||||
|         token: str  # f'{symbol}.{self.broker}' | ||||
|         for token, pos in _positions.items(): | ||||
|             pp_msgs.append(BrokerdPosition( | ||||
|                 broker=broker, | ||||
|                 account='paper', | ||||
|                 symbol=pos.symbol.front_fqsn(), | ||||
|                 size=pos.size, | ||||
|                 avg_price=pos.ppu, | ||||
|             )) | ||||
| 
 | ||||
|         # TODO: load paper positions per broker from .toml config file | ||||
|         # and pass as symbol to position data mapping: ``dict[str, dict]`` | ||||
|         await ctx.started((pp_msgs, ['paper'])) | ||||
|         # await ctx.started(all_positions) | ||||
|         await ctx.started(({}, {'paper',})) | ||||
| 
 | ||||
|         async with ( | ||||
|             ctx.open_stream() as ems_stream, | ||||
|             trio.open_nursery() as n, | ||||
|         ): | ||||
| 
 | ||||
|             client = PaperBoi( | ||||
|                 broker, | ||||
|                 ems_stream, | ||||
|                 _buys=_buys, | ||||
|                 _sells=_sells, | ||||
|                 _buys={}, | ||||
|                 _sells={}, | ||||
| 
 | ||||
|                 _reqids=_reqids, | ||||
|                 _reqids={}, | ||||
| 
 | ||||
|                 # TODO: load paper positions from ``positions.toml`` | ||||
|                 _positions=_positions, | ||||
| 
 | ||||
|                 # TODO: load postions from ledger file | ||||
|                 _trade_ledger={}, | ||||
|                 _positions={}, | ||||
|             ) | ||||
| 
 | ||||
|             n.start_soon( | ||||
|                 handle_order_requests, | ||||
|                 client, | ||||
|                 ems_stream, | ||||
|             ) | ||||
|             n.start_soon(handle_order_requests, client, ems_stream) | ||||
| 
 | ||||
|             # paper engine simulator clearing task | ||||
|             await simulate_fills(feed.stream, client) | ||||
|  | @ -585,7 +511,6 @@ async def open_paperboi( | |||
|         # (we likely don't need more then one proc for basic | ||||
|         # simulated order clearing) | ||||
|         if portal is None: | ||||
|             log.info('Starting new paper-engine actor') | ||||
|             portal = await tn.start_actor( | ||||
|                 service_name, | ||||
|                 enable_modules=[__name__] | ||||
|  | @ -598,4 +523,5 @@ async def open_paperboi( | |||
|                 loglevel=loglevel, | ||||
| 
 | ||||
|         ) as (ctx, first): | ||||
| 
 | ||||
|             yield ctx, first | ||||
|  |  | |||
|  | @ -83,9 +83,9 @@ def pikerd(loglevel, host, tl, pdb, tsdb): | |||
| 
 | ||||
|                 ) | ||||
|                 log.info( | ||||
|                     f'`marketstored` up!\n' | ||||
|                     f'pid: {pid}\n' | ||||
|                     f'container id: {cid[:12]}\n' | ||||
|                     f'`marketstore` up!\n' | ||||
|                     f'`marketstored` pid: {pid}\n' | ||||
|                     f'docker container id: {cid}\n' | ||||
|                     f'config: {pformat(config)}' | ||||
|                 ) | ||||
| 
 | ||||
|  |  | |||
|  | @ -21,7 +21,6 @@ Broker configuration mgmt. | |||
| import platform | ||||
| import sys | ||||
| import os | ||||
| from os import path | ||||
| from os.path import dirname | ||||
| import shutil | ||||
| from typing import Optional | ||||
|  | @ -112,7 +111,6 @@ if _parent_user: | |||
| 
 | ||||
| _conf_names: set[str] = { | ||||
|     'brokers', | ||||
|     'pps', | ||||
|     'trades', | ||||
|     'watchlists', | ||||
| } | ||||
|  | @ -149,21 +147,19 @@ def get_conf_path( | |||
|     conf_name: str = 'brokers', | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Return the top-level default config path normally under | ||||
|     ``~/.config/piker`` on linux for a given ``conf_name``, the config | ||||
|     name. | ||||
|     """Return the default config path normally under | ||||
|     ``~/.config/piker`` on linux. | ||||
| 
 | ||||
|     Contains files such as: | ||||
|     - brokers.toml | ||||
|     - pp.toml | ||||
|     - watchlists.toml | ||||
|     - trades.toml | ||||
| 
 | ||||
|     # maybe coming soon ;) | ||||
|     - signals.toml | ||||
|     - strats.toml | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     assert conf_name in _conf_names | ||||
|     fn = _conf_fn_w_ext(conf_name) | ||||
|     return os.path.join( | ||||
|  | @ -177,7 +173,7 @@ def repodir(): | |||
|     Return the abspath to the repo directory. | ||||
| 
 | ||||
|     ''' | ||||
|     dirpath = path.abspath( | ||||
|     dirpath = os.path.abspath( | ||||
|         # we're 3 levels down in **this** module file | ||||
|         dirname(dirname(os.path.realpath(__file__))) | ||||
|     ) | ||||
|  | @ -186,9 +182,7 @@ def repodir(): | |||
| 
 | ||||
| def load( | ||||
|     conf_name: str = 'brokers', | ||||
|     path: str = None, | ||||
| 
 | ||||
|     **tomlkws, | ||||
|     path: str = None | ||||
| 
 | ||||
| ) -> (dict, str): | ||||
|     ''' | ||||
|  | @ -196,7 +190,6 @@ def load( | |||
| 
 | ||||
|     ''' | ||||
|     path = path or get_conf_path(conf_name) | ||||
| 
 | ||||
|     if not os.path.isfile(path): | ||||
|         fn = _conf_fn_w_ext(conf_name) | ||||
| 
 | ||||
|  | @ -209,11 +202,8 @@ def load( | |||
|         # if one exists. | ||||
|         if os.path.isfile(template): | ||||
|             shutil.copyfile(template, path) | ||||
|         else: | ||||
|             with open(path, 'w'): | ||||
|                 pass  # touch | ||||
| 
 | ||||
|     config = toml.load(path, **tomlkws) | ||||
|     config = toml.load(path) | ||||
|     log.debug(f"Read config file {path}") | ||||
|     return config, path | ||||
| 
 | ||||
|  | @ -222,7 +212,6 @@ def write( | |||
|     config: dict,  # toml config as dict | ||||
|     name: str = 'brokers', | ||||
|     path: str = None, | ||||
|     **toml_kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     '''' | ||||
|  | @ -246,14 +235,11 @@ def write( | |||
|         f"{path}" | ||||
|     ) | ||||
|     with open(path, 'w') as cf: | ||||
|         return toml.dump( | ||||
|             config, | ||||
|             cf, | ||||
|             **toml_kwargs, | ||||
|         ) | ||||
|         return toml.dump(config, cf) | ||||
| 
 | ||||
| 
 | ||||
| def load_accounts( | ||||
| 
 | ||||
|     providers: Optional[list[str]] = None | ||||
| 
 | ||||
| ) -> bidict[str, Optional[str]]: | ||||
|  |  | |||
|  | @ -37,13 +37,8 @@ from docker.models.containers import Container as DockerContainer | |||
| from docker.errors import ( | ||||
|     DockerException, | ||||
|     APIError, | ||||
|     # ContainerError, | ||||
| ) | ||||
| import requests | ||||
| from requests.exceptions import ( | ||||
|     ConnectionError, | ||||
|     ReadTimeout, | ||||
| ) | ||||
| from requests.exceptions import ConnectionError, ReadTimeout | ||||
| 
 | ||||
| from ..log import get_logger, get_console_log | ||||
| from .. import config | ||||
|  | @ -55,8 +50,8 @@ class DockerNotStarted(Exception): | |||
|     'Prolly you dint start da daemon bruh' | ||||
| 
 | ||||
| 
 | ||||
| class ApplicationLogError(Exception): | ||||
|     'App in container reported an error in logs' | ||||
| class ContainerError(RuntimeError): | ||||
|     'Error reported via app-container logging level' | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
|  | @ -101,9 +96,9 @@ async def open_docker( | |||
|         # not perms? | ||||
|         raise | ||||
| 
 | ||||
|     # finally: | ||||
|     #     if client: | ||||
|     #         client.close() | ||||
|     finally: | ||||
|         if client: | ||||
|             client.close() | ||||
| 
 | ||||
| 
 | ||||
| class Container: | ||||
|  | @ -161,7 +156,7 @@ class Container: | |||
| 
 | ||||
|                     # print(f'level: {level}') | ||||
|                     if level in ('error', 'fatal'): | ||||
|                         raise ApplicationLogError(msg) | ||||
|                         raise ContainerError(msg) | ||||
| 
 | ||||
|                 if patt in msg: | ||||
|                     return True | ||||
|  | @ -190,29 +185,12 @@ class Container: | |||
|             if 'is not running' in err.explanation: | ||||
|                 return False | ||||
| 
 | ||||
|     def hard_kill(self, start: float) -> None: | ||||
|         delay = time.time() - start | ||||
|         # get out the big guns, bc apparently marketstore | ||||
|         # doesn't actually know how to terminate gracefully | ||||
|         # :eyeroll:... | ||||
|         log.error( | ||||
|             f'SIGKILL-ing: {self.cntr.id} after {delay}s\n' | ||||
|         ) | ||||
|         self.try_signal('SIGKILL') | ||||
|         self.cntr.wait( | ||||
|             timeout=3, | ||||
|             condition='not-running', | ||||
|         ) | ||||
| 
 | ||||
|     async def cancel( | ||||
|         self, | ||||
|         stop_msg: str, | ||||
|         hard_kill: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         cid = self.cntr.id | ||||
| 
 | ||||
|         # first try a graceful cancel | ||||
|         log.cancel( | ||||
|             f'SIGINT cancelling container: {cid}\n' | ||||
|  | @ -221,25 +199,15 @@ class Container: | |||
|         self.try_signal('SIGINT') | ||||
| 
 | ||||
|         start = time.time() | ||||
|         for _ in range(6): | ||||
|         for _ in range(30): | ||||
| 
 | ||||
|             with trio.move_on_after(0.5) as cs: | ||||
|                 log.cancel('polling for CNTR logs...') | ||||
| 
 | ||||
|                 try: | ||||
|                 cs.shield = True | ||||
|                 await self.process_logs_until(stop_msg) | ||||
|                 except ApplicationLogError: | ||||
|                     hard_kill = True | ||||
|                 else: | ||||
|                     # if we aren't cancelled on above checkpoint then we | ||||
|                     # assume we read the expected stop msg and | ||||
|                     # terminated. | ||||
|                     break | ||||
| 
 | ||||
|             if cs.cancelled_caught: | ||||
|                 # on timeout just try a hard kill after | ||||
|                 # a quick container sync-wait. | ||||
|                 hard_kill = True | ||||
|                 # if we aren't cancelled on above checkpoint then we | ||||
|                 # assume we read the expected stop msg and terminated. | ||||
|                 break | ||||
| 
 | ||||
|             try: | ||||
|                 log.info(f'Polling for container shutdown:\n{cid}') | ||||
|  | @ -250,7 +218,6 @@ class Container: | |||
|                         condition='not-running', | ||||
|                     ) | ||||
| 
 | ||||
|                 # graceful exit if we didn't time out | ||||
|                 break | ||||
| 
 | ||||
|             except ( | ||||
|  | @ -262,22 +229,24 @@ class Container: | |||
|             except ( | ||||
|                 docker.errors.APIError, | ||||
|                 ConnectionError, | ||||
|                 requests.exceptions.ConnectionError, | ||||
|                 trio.Cancelled, | ||||
|             ): | ||||
|                 log.exception('Docker connection failure') | ||||
|                 self.hard_kill(start) | ||||
|                 raise | ||||
| 
 | ||||
|             except trio.Cancelled: | ||||
|                 log.exception('trio cancelled...') | ||||
|                 self.hard_kill(start) | ||||
|                 break | ||||
|         else: | ||||
|             hard_kill = True | ||||
|             delay = time.time() - start | ||||
|             log.error( | ||||
|                 f'Failed to kill container {cid} after {delay}s\n' | ||||
|                 'sending SIGKILL..' | ||||
|             ) | ||||
|             # get out the big guns, bc apparently marketstore | ||||
|             # doesn't actually know how to terminate gracefully | ||||
|             # :eyeroll:... | ||||
|             self.try_signal('SIGKILL') | ||||
|             self.cntr.wait( | ||||
|                 timeout=3, | ||||
|                 condition='not-running', | ||||
|             ) | ||||
| 
 | ||||
|         if hard_kill: | ||||
|             self.hard_kill(start) | ||||
|         else: | ||||
|         log.cancel(f'Container stopped: {cid}') | ||||
| 
 | ||||
| 
 | ||||
|  | @ -320,12 +289,14 @@ async def open_ahabd( | |||
|         )) | ||||
| 
 | ||||
|         try: | ||||
| 
 | ||||
|             # TODO: we might eventually want a proxy-style msg-prot here | ||||
|             # to allow remote control of containers without needing | ||||
|             # callers to have root perms? | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|         finally: | ||||
|             with trio.CancelScope(shield=True): | ||||
|                 await cntr.cancel(stop_msg) | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -56,7 +56,7 @@ def iterticks( | |||
|                     sig = ( | ||||
|                         time, | ||||
|                         tick['price'], | ||||
|                         tick.get('size') | ||||
|                         tick['size'] | ||||
|                     ) | ||||
| 
 | ||||
|                     if ttype == 'dark_trade': | ||||
|  |  | |||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -27,14 +27,13 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX | |||
| if _USE_POSIX: | ||||
|     from _posixshmem import shm_unlink | ||||
| 
 | ||||
| # import msgspec | ||||
| import numpy as np | ||||
| from numpy.lib import recfunctions as rfn | ||||
| import tractor | ||||
| import numpy as np | ||||
| from pydantic import BaseModel | ||||
| from numpy.lib import recfunctions as rfn | ||||
| 
 | ||||
| from ..log import get_logger | ||||
| from ._source import base_iohlc_dtype | ||||
| from .types import Struct | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  | @ -50,11 +49,7 @@ _rt_buffer_start = int((_days_worth - 1) * _secs_in_day) | |||
| 
 | ||||
| 
 | ||||
| def cuckoff_mantracker(): | ||||
|     ''' | ||||
|     Disable all ``multiprocessing``` "resource tracking" machinery since | ||||
|     it's an absolute multi-threaded mess of non-SC madness. | ||||
| 
 | ||||
|     ''' | ||||
|     from multiprocessing import resource_tracker as mantracker | ||||
| 
 | ||||
|     # Tell the "resource tracker" thing to fuck off. | ||||
|  | @ -112,39 +107,36 @@ class SharedInt: | |||
|                 log.warning(f'Shm for {name} already unlinked?') | ||||
| 
 | ||||
| 
 | ||||
| class _Token(Struct, frozen=True): | ||||
| class _Token(BaseModel): | ||||
|     ''' | ||||
|     Internal represenation of a shared memory "token" | ||||
|     which can be used to key a system wide post shm entry. | ||||
| 
 | ||||
|     ''' | ||||
|     class Config: | ||||
|         frozen = True | ||||
| 
 | ||||
|     shm_name: str  # this servers as a "key" value | ||||
|     shm_first_index_name: str | ||||
|     shm_last_index_name: str | ||||
|     dtype_descr: tuple | ||||
|     size: int  # in struct-array index / row terms | ||||
| 
 | ||||
|     @property | ||||
|     def dtype(self) -> np.dtype: | ||||
|         return np.dtype(list(map(tuple, self.dtype_descr))).descr | ||||
| 
 | ||||
|     def as_msg(self): | ||||
|         return self.to_dict() | ||||
|         return self.dict() | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_msg(cls, msg: dict) -> _Token: | ||||
|         if isinstance(msg, _Token): | ||||
|             return msg | ||||
| 
 | ||||
|         # TODO: native struct decoding | ||||
|         # return _token_dec.decode(msg) | ||||
| 
 | ||||
|         msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr'])) | ||||
|         return _Token(**msg) | ||||
| 
 | ||||
| 
 | ||||
| # _token_dec = msgspec.msgpack.Decoder(_Token) | ||||
| 
 | ||||
| # TODO: this api? | ||||
| # _known_tokens = tractor.ActorVar('_shm_tokens', {}) | ||||
| # _known_tokens = tractor.ContextStack('_known_tokens', ) | ||||
|  | @ -163,7 +155,6 @@ def get_shm_token(key: str) -> _Token: | |||
| 
 | ||||
| def _make_token( | ||||
|     key: str, | ||||
|     size: int, | ||||
|     dtype: Optional[np.dtype] = None, | ||||
| ) -> _Token: | ||||
|     ''' | ||||
|  | @ -176,8 +167,7 @@ def _make_token( | |||
|         shm_name=key, | ||||
|         shm_first_index_name=key + "_first", | ||||
|         shm_last_index_name=key + "_last", | ||||
|         dtype_descr=tuple(np.dtype(dtype).descr), | ||||
|         size=size, | ||||
|         dtype_descr=np.dtype(dtype).descr | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -229,7 +219,6 @@ class ShmArray: | |||
|             shm_first_index_name=self._first._shm.name, | ||||
|             shm_last_index_name=self._last._shm.name, | ||||
|             dtype_descr=tuple(self._array.dtype.descr), | ||||
|             size=self._len, | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|  | @ -444,7 +433,7 @@ class ShmArray: | |||
| def open_shm_array( | ||||
| 
 | ||||
|     key: Optional[str] = None, | ||||
|     size: int = _default_size,  # see above | ||||
|     size: int = _default_size, | ||||
|     dtype: Optional[np.dtype] = None, | ||||
|     readonly: bool = False, | ||||
| 
 | ||||
|  | @ -475,8 +464,7 @@ def open_shm_array( | |||
| 
 | ||||
|     token = _make_token( | ||||
|         key=key, | ||||
|         size=size, | ||||
|         dtype=dtype, | ||||
|         dtype=dtype | ||||
|     ) | ||||
| 
 | ||||
|     # create single entry arrays for storing an first and last indices | ||||
|  | @ -528,7 +516,6 @@ def open_shm_array( | |||
|     # "unlink" created shm on process teardown by | ||||
|     # pushing teardown calls onto actor context stack | ||||
| 
 | ||||
|     # TODO: make this a public API in ``tractor``.. | ||||
|     tractor._actor._lifetime_stack.callback(shmarr.close) | ||||
|     tractor._actor._lifetime_stack.callback(shmarr.destroy) | ||||
| 
 | ||||
|  | @ -537,6 +524,7 @@ def open_shm_array( | |||
| 
 | ||||
| def attach_shm_array( | ||||
|     token: tuple[str, str, tuple[str, str]], | ||||
|     size: int = _default_size, | ||||
|     readonly: bool = True, | ||||
| 
 | ||||
| ) -> ShmArray: | ||||
|  | @ -575,7 +563,7 @@ def attach_shm_array( | |||
|             raise _err | ||||
| 
 | ||||
|     shmarr = np.ndarray( | ||||
|         (token.size,), | ||||
|         (size,), | ||||
|         dtype=token.dtype, | ||||
|         buffer=shm.buf | ||||
|     ) | ||||
|  | @ -643,7 +631,6 @@ def maybe_open_shm_array( | |||
|     use ``attach_shm_array``. | ||||
| 
 | ||||
|     ''' | ||||
|     size = kwargs.pop('size', _default_size) | ||||
|     try: | ||||
|         # see if we already know this key | ||||
|         token = _known_tokens[key] | ||||
|  | @ -651,11 +638,7 @@ def maybe_open_shm_array( | |||
|     except KeyError: | ||||
|         log.warning(f"Could not find {key} in shms cache") | ||||
|         if dtype: | ||||
|             token = _make_token( | ||||
|                 key, | ||||
|                 size=size, | ||||
|                 dtype=dtype, | ||||
|             ) | ||||
|             token = _make_token(key, dtype) | ||||
|             try: | ||||
|                 return attach_shm_array(token=token, **kwargs), False | ||||
|             except FileNotFoundError: | ||||
|  |  | |||
|  | @ -23,7 +23,7 @@ import decimal | |||
| 
 | ||||
| from bidict import bidict | ||||
| import numpy as np | ||||
| from msgspec import Struct | ||||
| from pydantic import BaseModel | ||||
| # from numba import from_dtype | ||||
| 
 | ||||
| 
 | ||||
|  | @ -126,7 +126,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: | |||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| class Symbol(Struct): | ||||
| class Symbol(BaseModel): | ||||
|     ''' | ||||
|     I guess this is some kinda container thing for dealing with | ||||
|     all the different meta-data formats from brokers? | ||||
|  | @ -152,7 +152,9 @@ class Symbol(Struct): | |||
|         info: dict[str, Any], | ||||
|         suffix: str = '', | ||||
| 
 | ||||
|     ) -> Symbol: | ||||
|     # XXX: like wtf.. | ||||
|     # ) -> 'Symbol': | ||||
|     ) -> None: | ||||
| 
 | ||||
|         tick_size = info.get('price_tick_size', 0.01) | ||||
|         lot_tick_size = info.get('lot_tick_size', 0.0) | ||||
|  | @ -173,7 +175,9 @@ class Symbol(Struct): | |||
|         fqsn: str, | ||||
|         info: dict[str, Any], | ||||
| 
 | ||||
|     ) -> Symbol: | ||||
|     # XXX: like wtf.. | ||||
|     # ) -> 'Symbol': | ||||
|     ) -> None: | ||||
|         broker, key, suffix = unpack_fqsn(fqsn) | ||||
|         return cls.from_broker_info( | ||||
|             broker, | ||||
|  | @ -236,7 +240,7 @@ class Symbol(Struct): | |||
| 
 | ||||
|         ''' | ||||
|         tokens = self.tokens() | ||||
|         fqsn = '.'.join(map(str.lower, tokens)) | ||||
|         fqsn = '.'.join(tokens) | ||||
|         return fqsn | ||||
| 
 | ||||
|     def iterfqsns(self) -> list[str]: | ||||
|  |  | |||
|  | @ -19,9 +19,8 @@ ToOlS fOr CoPInG wITh "tHE wEB" protocols. | |||
| 
 | ||||
| """ | ||||
| from contextlib import asynccontextmanager, AsyncExitStack | ||||
| from itertools import count | ||||
| from types import ModuleType | ||||
| from typing import Any, Optional, Callable, AsyncGenerator | ||||
| from typing import Any, Callable, AsyncGenerator | ||||
| import json | ||||
| 
 | ||||
| import trio | ||||
|  | @ -36,8 +35,6 @@ from trio_websocket._impl import ( | |||
| 
 | ||||
| from ..log import get_logger | ||||
| 
 | ||||
| from .types import Struct | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -56,11 +53,13 @@ class NoBsWs: | |||
|     def __init__( | ||||
|         self, | ||||
|         url: str, | ||||
|         token: str, | ||||
|         stack: AsyncExitStack, | ||||
|         fixture: Optional[Callable] = None, | ||||
|         serializer: ModuleType = json | ||||
|         fixture: Callable, | ||||
|         serializer: ModuleType = json, | ||||
|     ): | ||||
|         self.url = url | ||||
|         self.token = token | ||||
|         self.fixture = fixture | ||||
|         self._stack = stack | ||||
|         self._ws: 'WebSocketConnection' = None  # noqa | ||||
|  | @ -83,12 +82,15 @@ class NoBsWs: | |||
|                 self._ws = await self._stack.enter_async_context( | ||||
|                     trio_websocket.open_websocket_url(self.url) | ||||
|                 ) | ||||
| 
 | ||||
|                 if self.fixture is not None: | ||||
|                 # rerun user code fixture | ||||
|                 if self.token == '': | ||||
|                     ret = await self._stack.enter_async_context( | ||||
|                         self.fixture(self) | ||||
|                     ) | ||||
|                 else: | ||||
|                     ret = await self._stack.enter_async_context( | ||||
|                         self.fixture(self, self.token) | ||||
|                     ) | ||||
| 
 | ||||
|                 assert ret is None | ||||
| 
 | ||||
|  | @ -126,26 +128,21 @@ class NoBsWs: | |||
|             except self.recon_errors: | ||||
|                 await self._connect() | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self | ||||
| 
 | ||||
|     async def __anext__(self): | ||||
|         return await self.recv_msg() | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| async def open_autorecon_ws( | ||||
|     url: str, | ||||
| 
 | ||||
|     # TODO: proper type annot smh | ||||
|     fixture: Optional[Callable] = None, | ||||
| 
 | ||||
|     fixture: Callable, | ||||
|     # used for authenticated websockets | ||||
|     token: str = '', | ||||
| ) -> AsyncGenerator[tuple[...],  NoBsWs]: | ||||
|     """Apparently we can QoS for all sorts of reasons..so catch em. | ||||
| 
 | ||||
|     """ | ||||
|     async with AsyncExitStack() as stack: | ||||
|         ws = NoBsWs(url, stack, fixture=fixture) | ||||
|         ws = NoBsWs(url, token, stack, fixture=fixture) | ||||
|         await ws._connect() | ||||
| 
 | ||||
|         try: | ||||
|  | @ -153,86 +150,3 @@ async def open_autorecon_ws( | |||
| 
 | ||||
|         finally: | ||||
|             await stack.aclose() | ||||
| 
 | ||||
| 
 | ||||
| ''' | ||||
| JSONRPC response-request style machinery for transparent multiplexing of msgs | ||||
| over a NoBsWs. | ||||
| ''' | ||||
| 
 | ||||
| 
 | ||||
| class JSONRPCResult(Struct): | ||||
|     jsonrpc: str = '2.0' | ||||
|     id: int | ||||
|     result: Optional[dict] = None | ||||
|     error: Optional[dict] = None | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| async def open_jsonrpc_session( | ||||
|     url: str, | ||||
|     start_id: int = 0, | ||||
|     dtype: type = JSONRPCResult | ||||
| ) -> Callable[[str, dict], dict]: | ||||
| 
 | ||||
|     async with ( | ||||
|         trio.open_nursery() as n, | ||||
|         open_autorecon_ws(url) as ws | ||||
|     ): | ||||
|         rpc_id: Iterable = count(start_id) | ||||
|         rpc_results: dict[int, dict] = {} | ||||
| 
 | ||||
|         async def json_rpc(method: str, params: dict) -> dict: | ||||
|             ''' | ||||
|             perform a json rpc call and wait for the result, raise exception in | ||||
|             case of error field present on response | ||||
|             ''' | ||||
|             msg = { | ||||
|                 'jsonrpc': '2.0', | ||||
|                 'id': next(rpc_id), | ||||
|                 'method': method, | ||||
|                 'params': params | ||||
|             } | ||||
|             _id = msg['id'] | ||||
| 
 | ||||
|             rpc_results[_id] = { | ||||
|                 'result': None, | ||||
|                 'event': trio.Event() | ||||
|             } | ||||
| 
 | ||||
|             await ws.send_msg(msg) | ||||
| 
 | ||||
|             await rpc_results[_id]['event'].wait() | ||||
| 
 | ||||
|             ret = rpc_results[_id]['result'] | ||||
| 
 | ||||
|             del rpc_results[_id] | ||||
| 
 | ||||
|             if ret.error is not None: | ||||
|                 raise Exception(json.dumps(ret.error, indent=4)) | ||||
| 
 | ||||
|             return ret | ||||
| 
 | ||||
|         async def recv_task(): | ||||
|             ''' | ||||
|             receives every ws message and stores it in its corresponding result | ||||
|             field, then sets the event to wakeup original sender tasks. | ||||
|             ''' | ||||
|             async for msg in ws: | ||||
|                 msg = dtype(**msg) | ||||
| 
 | ||||
|                 if msg.id not in rpc_results: | ||||
|                     log.warning(f'Wasn\'t expecting ws msg: {json.dumps(msg, indent=4)}') | ||||
| 
 | ||||
|                 res = rpc_results.setdefault( | ||||
|                     msg.id, | ||||
|                     {'result': None, 'event': trio.Event()} | ||||
|                 ) | ||||
| 
 | ||||
|                 res['result'] = msg | ||||
|                 res['event'].set() | ||||
| 
 | ||||
| 
 | ||||
|         n.start_soon(recv_task) | ||||
|         yield json_rpc | ||||
|         n.cancel_scope.cancel() | ||||
|  |  | |||
|  | @ -42,6 +42,7 @@ from trio_typing import TaskStatus | |||
| import trimeter | ||||
| import tractor | ||||
| from tractor.trionics import maybe_open_context | ||||
| from pydantic import BaseModel | ||||
| import pendulum | ||||
| import numpy as np | ||||
| 
 | ||||
|  | @ -58,7 +59,6 @@ from ._sharedmem import ( | |||
|     ShmArray, | ||||
| ) | ||||
| from .ingest import get_ingestormod | ||||
| from .types import Struct | ||||
| from ._source import ( | ||||
|     base_iohlc_dtype, | ||||
|     Symbol, | ||||
|  | @ -84,7 +84,7 @@ if TYPE_CHECKING: | |||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class _FeedsBus(Struct): | ||||
| class _FeedsBus(BaseModel): | ||||
|     ''' | ||||
|     Data feeds broadcaster and persistence management. | ||||
| 
 | ||||
|  | @ -100,6 +100,10 @@ class _FeedsBus(Struct): | |||
|           a dedicated cancel scope. | ||||
| 
 | ||||
|     ''' | ||||
|     class Config: | ||||
|         arbitrary_types_allowed = True | ||||
|         underscore_attrs_are_private = False | ||||
| 
 | ||||
|     brokername: str | ||||
|     nursery: trio.Nursery | ||||
|     feeds: dict[str, tuple[dict, dict]] = {} | ||||
|  | @ -309,7 +313,7 @@ async def start_backfill( | |||
|             # when no tsdb "last datum" is provided, we just load | ||||
|             # some near-term history. | ||||
|             periods = { | ||||
|                 1: {'seconds': 4000}, | ||||
|                 1: {'days': 1}, | ||||
|                 60: {'days': 14}, | ||||
|             } | ||||
| 
 | ||||
|  |  | |||
|  | @ -37,7 +37,7 @@ import time | |||
| from math import isnan | ||||
| 
 | ||||
| from bidict import bidict | ||||
| from msgspec.msgpack import encode, decode | ||||
| import msgpack | ||||
| import pyqtgraph as pg | ||||
| import numpy as np | ||||
| import tractor | ||||
|  | @ -774,13 +774,12 @@ async def stream_quotes( | |||
|     async with open_websocket_url(f'ws://{host}:{port}/ws') as ws: | ||||
|         # send subs topics to server | ||||
|         resp = await ws.send_message( | ||||
| 
 | ||||
|             encode({'streams': list(tbks.values())}) | ||||
|             msgpack.dumps({'streams': list(tbks.values())}) | ||||
|         ) | ||||
|         log.info(resp) | ||||
| 
 | ||||
|         async def recv() -> dict[str, Any]: | ||||
|             return decode((await ws.get_message()), encoding='utf-8') | ||||
|             return msgpack.loads((await ws.get_message()), encoding='utf-8') | ||||
| 
 | ||||
|         streams = (await recv())['streams'] | ||||
|         log.info(f"Subscribed to {streams}") | ||||
|  |  | |||
|  | @ -1,84 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Built-in (extension) types. | ||||
| 
 | ||||
| """ | ||||
| import sys | ||||
| from typing import Optional | ||||
| from pprint import pformat | ||||
| 
 | ||||
| import msgspec | ||||
| 
 | ||||
| 
 | ||||
| class Struct( | ||||
|     msgspec.Struct, | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # tag='pikerstruct', | ||||
|     # tag=True, | ||||
| ): | ||||
|     ''' | ||||
|     A "human friendlier" (aka repl buddy) struct subtype. | ||||
| 
 | ||||
|     ''' | ||||
|     def to_dict(self) -> dict: | ||||
|         return { | ||||
|             f: getattr(self, f) | ||||
|             for f in self.__struct_fields__ | ||||
|         } | ||||
| 
 | ||||
|     def __repr__(self): | ||||
|         # only turn on pprint when we detect a python REPL | ||||
|         # at runtime B) | ||||
|         if ( | ||||
|             hasattr(sys, 'ps1') | ||||
|             # TODO: check if we're in pdb | ||||
|         ): | ||||
|             return f'Struct({pformat(self.to_dict())})' | ||||
| 
 | ||||
|         return super().__repr__() | ||||
| 
 | ||||
|     def copy( | ||||
|         self, | ||||
|         update: Optional[dict] = None, | ||||
| 
 | ||||
|     ) -> msgspec.Struct: | ||||
|         ''' | ||||
|         Validate-typecast all self defined fields, return a copy of us | ||||
|         with all such fields. | ||||
| 
 | ||||
|         This is kinda like the default behaviour in `pydantic.BaseModel`. | ||||
| 
 | ||||
|         ''' | ||||
|         if update: | ||||
|             for k, v in update.items(): | ||||
|                 setattr(self, k, v) | ||||
| 
 | ||||
|         # roundtrip serialize to validate | ||||
|         return msgspec.msgpack.Decoder( | ||||
|             type=type(self) | ||||
|         ).decode( | ||||
|             msgspec.msgpack.Encoder().encode(self) | ||||
|         ) | ||||
| 
 | ||||
|     def typecast( | ||||
|         self, | ||||
|         # fields: Optional[list[str]] = None, | ||||
|     ) -> None: | ||||
|         for fname, ftype in self.__annotations__.items(): | ||||
|             setattr(self, fname, ftype(getattr(self, fname))) | ||||
|  | @ -78,8 +78,7 @@ class Fsp: | |||
|     # + the consuming fsp *to* the consumers output | ||||
|     # shm flow. | ||||
|     _flow_registry: dict[ | ||||
|         tuple[_Token, str], | ||||
|         tuple[_Token, Optional[ShmArray]], | ||||
|         tuple[_Token, str], _Token, | ||||
|     ] = {} | ||||
| 
 | ||||
|     def __init__( | ||||
|  | @ -121,6 +120,7 @@ class Fsp: | |||
|     ): | ||||
|         return self.func(*args, **kwargs) | ||||
| 
 | ||||
|     # TODO: lru_cache this? prettty sure it'll work? | ||||
|     def get_shm( | ||||
|         self, | ||||
|         src_shm: ShmArray, | ||||
|  | @ -131,27 +131,12 @@ class Fsp: | |||
|         for this "instance" of a signal processor for | ||||
|         the given ``key``. | ||||
| 
 | ||||
|         The destination shm "token" and array are cached if possible to | ||||
|         minimize multiple stdlib/system calls. | ||||
| 
 | ||||
|         ''' | ||||
|         dst_token, maybe_array = self._flow_registry[ | ||||
|         dst_token = self._flow_registry[ | ||||
|             (src_shm._token, self.name) | ||||
|         ] | ||||
|         if maybe_array is None: | ||||
|             self._flow_registry[ | ||||
|                 (src_shm._token, self.name) | ||||
|             ] = ( | ||||
|                 dst_token, | ||||
|                 # "cache" the ``ShmArray`` such that | ||||
|                 # we call the underlying "attach" code as few | ||||
|                 # times as possible as per: | ||||
|                 # - https://github.com/pikers/piker/issues/359 | ||||
|                 # - https://github.com/pikers/piker/issues/332 | ||||
|                 maybe_array := attach_shm_array(dst_token) | ||||
|             ) | ||||
| 
 | ||||
|         return maybe_array | ||||
|         shm = attach_shm_array(dst_token) | ||||
|         return shm | ||||
| 
 | ||||
| 
 | ||||
| def fsp( | ||||
|  |  | |||
|  | @ -114,7 +114,7 @@ async def fsp_compute( | |||
|         dict[str, np.ndarray],  # multi-output case | ||||
|         np.ndarray,  # single output case | ||||
|     ] | ||||
|     history_output = await anext(out_stream) | ||||
|     history_output = await out_stream.__anext__() | ||||
| 
 | ||||
|     func_name = func.__name__ | ||||
|     profiler(f'{func_name} generated history') | ||||
|  | @ -284,10 +284,9 @@ async def cascade( | |||
|     # TODO: ugh i hate this wind/unwind to list over the wire | ||||
|     # but not sure how else to do it. | ||||
|     for (token, fsp_name, dst_token) in shm_registry: | ||||
|         Fsp._flow_registry[( | ||||
|             _Token.from_msg(token), | ||||
|             fsp_name, | ||||
|         )] = _Token.from_msg(dst_token), None | ||||
|         Fsp._flow_registry[ | ||||
|             (_Token.from_msg(token), fsp_name) | ||||
|         ] = _Token.from_msg(dst_token) | ||||
| 
 | ||||
|     fsp: Fsp = reg.get( | ||||
|         NamespacePath(ns_path) | ||||
|  | @ -375,8 +374,7 @@ async def cascade( | |||
|                             'key': dst_shm_token, | ||||
|                             'first': dst._first.value, | ||||
|                             'last': dst._last.value, | ||||
|                         } | ||||
|                     }) | ||||
|                     }}) | ||||
|                     return tracker, index | ||||
| 
 | ||||
|                 def is_synced( | ||||
|  |  | |||
							
								
								
									
										975
									
								
								piker/pp.py
								
								
								
								
							
							
						
						
									
										975
									
								
								piker/pp.py
								
								
								
								
							|  | @ -1,975 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| 
 | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| ''' | ||||
| Personal/Private position parsing, calculating, summarizing in a way | ||||
| that doesn't try to cuk most humans who prefer to not lose their moneys.. | ||||
| (looking at you `ib` and dirt-bird friends) | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import contextmanager as cm | ||||
| from pprint import pformat | ||||
| import os | ||||
| from os import path | ||||
| from math import copysign | ||||
| import re | ||||
| import time | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| import pendulum | ||||
| from pendulum import datetime, now | ||||
| import tomli | ||||
| import toml | ||||
| 
 | ||||
| from . import config | ||||
| from .brokers import get_brokermod | ||||
| from .clearing._messages import BrokerdPosition, Status | ||||
| from .data._source import Symbol | ||||
| from .log import get_logger | ||||
| from .data.types import Struct | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def open_trade_ledger( | ||||
|     broker: str, | ||||
|     account: str, | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Indempotently create and read in a trade log file from the | ||||
|     ``<configuration_dir>/ledgers/`` directory. | ||||
| 
 | ||||
|     Files are named per broker account of the form | ||||
|     ``<brokername>_<accountname>.toml``. The ``accountname`` here is the | ||||
|     name as defined in the user's ``brokers.toml`` config. | ||||
| 
 | ||||
|     ''' | ||||
|     ldir = path.join(config._config_dir, 'ledgers') | ||||
|     if not path.isdir(ldir): | ||||
|         os.makedirs(ldir) | ||||
| 
 | ||||
|     fname = f'trades_{broker}_{account}.toml' | ||||
|     tradesfile = path.join(ldir, fname) | ||||
| 
 | ||||
|     if not path.isfile(tradesfile): | ||||
|         log.info( | ||||
|             f'Creating new local trades ledger: {tradesfile}' | ||||
|         ) | ||||
|         with open(tradesfile, 'w') as cf: | ||||
|             pass  # touch | ||||
|     with open(tradesfile, 'rb') as cf: | ||||
|         start = time.time() | ||||
|         ledger = tomli.load(cf) | ||||
|         print(f'Ledger load took {time.time() - start}s') | ||||
|         cpy = ledger.copy() | ||||
| 
 | ||||
|     try: | ||||
|         yield cpy | ||||
|     finally: | ||||
|         if cpy != ledger: | ||||
|             # TODO: show diff output? | ||||
|             # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries | ||||
|             print(f'Updating ledger for {tradesfile}:\n') | ||||
|             ledger.update(cpy) | ||||
| 
 | ||||
|             # we write on close the mutated ledger data | ||||
|             with open(tradesfile, 'w') as cf: | ||||
|                 toml.dump(ledger, cf) | ||||
| 
 | ||||
| 
 | ||||
| class Transaction(Struct, frozen=True): | ||||
|     # TODO: should this be ``.to`` (see below)? | ||||
|     fqsn: str | ||||
| 
 | ||||
|     tid: Union[str, int]  # unique transaction id | ||||
|     size: float | ||||
|     price: float | ||||
|     cost: float  # commisions or other additional costs | ||||
|     dt: datetime | ||||
|     expiry: Optional[datetime] = None | ||||
| 
 | ||||
|     # optional key normally derived from the broker | ||||
|     # backend which ensures the instrument-symbol this record | ||||
|     # is for is truly unique. | ||||
|     bsuid: Optional[Union[str, int]] = None | ||||
| 
 | ||||
|     # optional fqsn for the source "asset"/money symbol? | ||||
|     # from: Optional[str] = None | ||||
| 
 | ||||
| 
 | ||||
| class Position(Struct): | ||||
|     ''' | ||||
|     Basic pp (personal/piker position) model with attached clearing | ||||
|     transaction history. | ||||
| 
 | ||||
|     ''' | ||||
|     symbol: Symbol | ||||
| 
 | ||||
|     # can be +ve or -ve for long/short | ||||
|     size: float | ||||
| 
 | ||||
|     # "breakeven price" above or below which pnl moves above and below | ||||
|     # zero for the entirety of the current "trade state". | ||||
|     ppu: float | ||||
| 
 | ||||
|     # unique backend symbol id | ||||
|     bsuid: str | ||||
| 
 | ||||
|     split_ratio: Optional[int] = None | ||||
| 
 | ||||
|     # ordered record of known constituent trade messages | ||||
|     clears: dict[ | ||||
|         Union[str, int, Status],  # trade id | ||||
|         dict[str, Any],  # transaction history summaries | ||||
|     ] = {} | ||||
|     first_clear_dt: Optional[datetime] = None | ||||
| 
 | ||||
|     expiry: Optional[datetime] = None | ||||
| 
 | ||||
|     def to_dict(self) -> dict: | ||||
|         return { | ||||
|             f: getattr(self, f) | ||||
|             for f in self.__struct_fields__ | ||||
|         } | ||||
| 
 | ||||
|     def to_pretoml(self) -> tuple[str, dict]: | ||||
|         ''' | ||||
|         Prep this position's data contents for export to toml including | ||||
|         re-structuring of the ``.clears`` table to an array of | ||||
|         inline-subtables for better ``pps.toml`` compactness. | ||||
| 
 | ||||
|         ''' | ||||
|         d = self.to_dict() | ||||
|         clears = d.pop('clears') | ||||
|         expiry = d.pop('expiry') | ||||
| 
 | ||||
|         if self.split_ratio is None: | ||||
|             d.pop('split_ratio') | ||||
| 
 | ||||
|         # should be obvious from clears/event table | ||||
|         d.pop('first_clear_dt') | ||||
| 
 | ||||
|         # TODO: we need to figure out how to have one top level | ||||
|         # listing venue here even when the backend isn't providing | ||||
|         # it via the trades ledger.. | ||||
|         # drop symbol obj in serialized form | ||||
|         s = d.pop('symbol') | ||||
|         fqsn = s.front_fqsn() | ||||
| 
 | ||||
|         if self.expiry is None: | ||||
|             d.pop('expiry', None) | ||||
|         elif expiry: | ||||
|             d['expiry'] = str(expiry) | ||||
| 
 | ||||
|         toml_clears_list = [] | ||||
| 
 | ||||
|         # reverse sort so latest clears are at top of section? | ||||
|         for tid, data in sorted( | ||||
|             list(clears.items()), | ||||
| 
 | ||||
|             # sort by datetime | ||||
|             key=lambda item: item[1]['dt'], | ||||
|         ): | ||||
|             inline_table = toml.TomlDecoder().get_empty_inline_table() | ||||
| 
 | ||||
|             # serialize datetime to parsable `str` | ||||
|             inline_table['dt'] = str(data['dt']) | ||||
| 
 | ||||
|             # insert optional clear fields in column order | ||||
|             for k in ['ppu', 'accum_size']: | ||||
|                 val = data.get(k) | ||||
|                 if val: | ||||
|                     inline_table[k] = val | ||||
| 
 | ||||
|             # insert required fields | ||||
|             for k in ['price', 'size', 'cost']: | ||||
|                 inline_table[k] = data[k] | ||||
| 
 | ||||
|             inline_table['tid'] = tid | ||||
|             toml_clears_list.append(inline_table) | ||||
| 
 | ||||
|         d['clears'] = toml_clears_list | ||||
| 
 | ||||
|         return fqsn, d | ||||
| 
 | ||||
|     def ensure_state(self) -> None: | ||||
|         ''' | ||||
|         Audit either the `.size` and `.ppu` local instance vars against | ||||
|         the clears table calculations and return the calc-ed values if | ||||
|         they differ and log warnings to console. | ||||
| 
 | ||||
|         ''' | ||||
|         clears = list(self.clears.values()) | ||||
|         self.first_clear_dt = min(list(entry['dt'] for entry in clears)) | ||||
|         last_clear = clears[-1] | ||||
| 
 | ||||
|         csize = self.calc_size() | ||||
|         accum = last_clear['accum_size'] | ||||
|         if not self.expired(): | ||||
|             if ( | ||||
|                 csize != accum | ||||
|                 and csize != round(accum * self.split_ratio or 1) | ||||
|             ): | ||||
|                 raise ValueError(f'Size mismatch: {csize}') | ||||
|         else: | ||||
|             assert csize == 0, 'Contract is expired but non-zero size?' | ||||
| 
 | ||||
|         if self.size != csize: | ||||
|             log.warning( | ||||
|                 'Position state mismatch:\n' | ||||
|                 f'{self.size} => {csize}' | ||||
|             ) | ||||
|             self.size = csize | ||||
| 
 | ||||
|         cppu = self.calc_ppu() | ||||
|         ppu = last_clear['ppu'] | ||||
|         if ( | ||||
|             cppu != ppu | ||||
|             and self.split_ratio is not None | ||||
|             # handle any split info entered (for now) manually by user | ||||
|             and cppu != (ppu / self.split_ratio) | ||||
|         ): | ||||
|             raise ValueError(f'PPU mismatch: {cppu}') | ||||
| 
 | ||||
|         if self.ppu != cppu: | ||||
|             log.warning( | ||||
|                 'Position state mismatch:\n' | ||||
|                 f'{self.ppu} => {cppu}' | ||||
|             ) | ||||
|             self.ppu = cppu | ||||
| 
 | ||||
|     def update_from_msg( | ||||
|         self, | ||||
|         msg: BrokerdPosition, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         # XXX: better place to do this? | ||||
|         symbol = self.symbol | ||||
| 
 | ||||
|         lot_size_digits = symbol.lot_size_digits | ||||
|         ppu, size = ( | ||||
|             round( | ||||
|                 msg['avg_price'], | ||||
|                 ndigits=symbol.tick_size_digits | ||||
|             ), | ||||
|             round( | ||||
|                 msg['size'], | ||||
|                 ndigits=lot_size_digits | ||||
|             ), | ||||
|         ) | ||||
| 
 | ||||
|         self.ppu = ppu | ||||
|         self.size = size | ||||
| 
 | ||||
|     @property | ||||
|     def dsize(self) -> float: | ||||
|         ''' | ||||
|         The "dollar" size of the pp, normally in trading (fiat) unit | ||||
|         terms. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.ppu * self.size | ||||
| 
 | ||||
|     # TODO: idea: "real LIFO" dynamic positioning. | ||||
|     # - when a trade takes place where the pnl for | ||||
|     # the (set of) trade(s) is below the breakeven price | ||||
|     # it may be that the trader took a +ve pnl on a short(er) | ||||
|     # term trade in the same account. | ||||
|     # - in this case we could recalc the be price to | ||||
|     # be reverted back to it's prior value before the nearest term | ||||
|     # trade was opened.? | ||||
|     # def lifo_price() -> float: | ||||
|     #     ... | ||||
| 
 | ||||
|     def calc_ppu( | ||||
|         self, | ||||
|         # include transaction cost in breakeven price | ||||
|         # and presume the worst case of the same cost | ||||
|         # to exit this transaction (even though in reality | ||||
|         # it will be dynamic based on exit stratetgy). | ||||
|         cost_scalar: float = 2, | ||||
| 
 | ||||
|     ) -> float: | ||||
|         ''' | ||||
|         Compute the "price-per-unit" price for the given non-zero sized | ||||
|         rolling position. | ||||
| 
 | ||||
|         The recurrence relation which computes this (exponential) mean | ||||
|         per new clear which **increases** the accumulative postiion size | ||||
|         is: | ||||
| 
 | ||||
|         ppu[-1] = ( | ||||
|             ppu[-2] * accum_size[-2] | ||||
|             + | ||||
|             ppu[-1] * size | ||||
|         ) / accum_size[-1] | ||||
| 
 | ||||
|         where `cost_basis` for the current step is simply the price | ||||
|         * size of the most recent clearing transaction. | ||||
| 
 | ||||
|         ''' | ||||
|         asize_h: list[float] = []  # historical accumulative size | ||||
|         ppu_h: list[float] = []  # historical price-per-unit | ||||
| 
 | ||||
|         clears = list(self.clears.items()) | ||||
| 
 | ||||
|         for i, (tid, entry) in enumerate(clears): | ||||
| 
 | ||||
|             clear_size = entry['size'] | ||||
|             clear_price = entry['price'] | ||||
| 
 | ||||
|             last_accum_size = asize_h[-1] if asize_h else 0 | ||||
|             accum_size = last_accum_size + clear_size | ||||
|             accum_sign = copysign(1, accum_size) | ||||
| 
 | ||||
|             sign_change: bool = False | ||||
| 
 | ||||
|             if accum_size == 0: | ||||
|                 ppu_h.append(0) | ||||
|                 asize_h.append(0) | ||||
|                 continue | ||||
| 
 | ||||
|             # test if the pp somehow went "passed" a net zero size state | ||||
|             # resulting in a change of the "sign" of the size (+ve for | ||||
|             # long, -ve for short). | ||||
|             sign_change = ( | ||||
|                 copysign(1, last_accum_size) + accum_sign == 0 | ||||
|                 and last_accum_size != 0 | ||||
|             ) | ||||
| 
 | ||||
|             # since we passed the net-zero-size state the new size | ||||
|             # after sum should be the remaining size the new | ||||
|             # "direction" (aka, long vs. short) for this clear. | ||||
|             if sign_change: | ||||
|                 clear_size = accum_size | ||||
|                 abs_diff = abs(accum_size) | ||||
|                 asize_h.append(0) | ||||
|                 ppu_h.append(0) | ||||
| 
 | ||||
|             else: | ||||
|                 # old size minus the new size gives us size diff with | ||||
|                 # +ve -> increase in pp size | ||||
|                 # -ve -> decrease in pp size | ||||
|                 abs_diff = abs(accum_size) - abs(last_accum_size) | ||||
| 
 | ||||
|             # XXX: LIFO breakeven price update. only an increaze in size | ||||
|             # of the position contributes the breakeven price, | ||||
|             # a decrease does not (i.e. the position is being made | ||||
|             # smaller). | ||||
|             # abs_clear_size = abs(clear_size) | ||||
|             abs_new_size = abs(accum_size) | ||||
| 
 | ||||
|             if abs_diff > 0: | ||||
| 
 | ||||
|                 cost_basis = ( | ||||
|                     # cost basis for this clear | ||||
|                     clear_price * abs(clear_size) | ||||
|                     + | ||||
|                     # transaction cost | ||||
|                     accum_sign * cost_scalar * entry['cost'] | ||||
|                 ) | ||||
| 
 | ||||
|                 if asize_h: | ||||
|                     size_last = abs(asize_h[-1]) | ||||
|                     cb_last = ppu_h[-1] * size_last | ||||
|                     ppu = (cost_basis + cb_last) / abs_new_size | ||||
| 
 | ||||
|                 else: | ||||
|                     ppu = cost_basis / abs_new_size | ||||
| 
 | ||||
|                 ppu_h.append(ppu) | ||||
|                 asize_h.append(accum_size) | ||||
| 
 | ||||
|             else: | ||||
|                 # on "exit" clears from a given direction, | ||||
|                 # only the size changes not the price-per-unit | ||||
|                 # need to be updated since the ppu remains constant | ||||
|                 # and gets weighted by the new size. | ||||
|                 asize_h.append(accum_size) | ||||
|                 ppu_h.append(ppu_h[-1]) | ||||
| 
 | ||||
|         final_ppu = ppu_h[-1] if ppu_h else 0 | ||||
| 
 | ||||
|         # handle any split info entered (for now) manually by user | ||||
|         if self.split_ratio is not None: | ||||
|             final_ppu /= self.split_ratio | ||||
| 
 | ||||
|         return final_ppu | ||||
| 
 | ||||
|     def expired(self) -> bool: | ||||
|         ''' | ||||
|         Predicate which checks if the contract/instrument is past its expiry. | ||||
| 
 | ||||
|         ''' | ||||
|         return bool(self.expiry) and self.expiry < now() | ||||
| 
 | ||||
|     def calc_size(self) -> float: | ||||
|         ''' | ||||
|         Calculate the unit size of this position in the destination | ||||
|         asset using the clears/trade event table; zero if expired. | ||||
| 
 | ||||
|         ''' | ||||
|         size: float = 0 | ||||
| 
 | ||||
|         # time-expired pps (normally derivatives) are "closed" | ||||
|         # and have a zero size. | ||||
|         if self.expired(): | ||||
|             return 0 | ||||
| 
 | ||||
|         for tid, entry in self.clears.items(): | ||||
|             size += entry['size'] | ||||
| 
 | ||||
|         if self.split_ratio is not None: | ||||
|             size = round(size * self.split_ratio) | ||||
| 
 | ||||
|         return size | ||||
| 
 | ||||
|     def minimize_clears( | ||||
|         self, | ||||
| 
 | ||||
|     ) -> dict[str, dict]: | ||||
|         ''' | ||||
|         Minimize the position's clears entries by removing | ||||
|         all transactions before the last net zero size to avoid | ||||
|         unecessary history irrelevant to the current pp state. | ||||
| 
 | ||||
|         ''' | ||||
|         size: float = 0 | ||||
|         clears_since_zero: list[tuple(str, dict)] = [] | ||||
| 
 | ||||
|         # TODO: we might just want to always do this when iterating | ||||
|         # a ledger? keep a state of the last net-zero and only do the | ||||
|         # full iterate when no state was stashed? | ||||
| 
 | ||||
|         # scan for the last "net zero" position by iterating | ||||
|         # transactions until the next net-zero size, rinse, repeat. | ||||
|         for tid, clear in self.clears.items(): | ||||
|             size += clear['size'] | ||||
|             clears_since_zero.append((tid, clear)) | ||||
| 
 | ||||
|             if size == 0: | ||||
|                 clears_since_zero.clear() | ||||
| 
 | ||||
|         self.clears = dict(clears_since_zero) | ||||
|         return self.clears | ||||
| 
 | ||||
|     def add_clear( | ||||
|         self, | ||||
|         t: Transaction, | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Update clearing table and populate rolling ppu and accumulative | ||||
|         size in both the clears entry and local attrs state. | ||||
| 
 | ||||
|         ''' | ||||
|         clear = self.clears[t.tid] = { | ||||
|             'cost': t.cost, | ||||
|             'price': t.price, | ||||
|             'size': t.size, | ||||
|             'dt': t.dt, | ||||
|         } | ||||
| 
 | ||||
|         # TODO: compute these incrementally instead | ||||
|         # of re-looping through each time resulting in O(n**2) | ||||
|         # behaviour..? | ||||
| 
 | ||||
|         # NOTE: we compute these **after** adding the entry in order to | ||||
|         # make the recurrence relation math work inside | ||||
|         # ``.calc_size()``. | ||||
|         self.size = clear['accum_size'] = self.calc_size() | ||||
|         self.ppu = clear['ppu'] = self.calc_ppu() | ||||
| 
 | ||||
|         return clear | ||||
| 
 | ||||
|     def sugest_split(self) -> float: | ||||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| class PpTable(Struct): | ||||
| 
 | ||||
|     brokername: str | ||||
|     acctid: str | ||||
|     pps: dict[str, Position] | ||||
|     conf: Optional[dict] = {} | ||||
| 
 | ||||
|     def update_from_trans( | ||||
|         self, | ||||
|         trans: dict[str, Transaction], | ||||
|         cost_scalar: float = 2, | ||||
| 
 | ||||
|     ) -> dict[str, Position]: | ||||
| 
 | ||||
|         pps = self.pps | ||||
|         updated: dict[str, Position] = {} | ||||
| 
 | ||||
|         # lifo update all pps from records | ||||
|         for tid, t in trans.items(): | ||||
| 
 | ||||
|             pp = pps.setdefault( | ||||
|                 t.bsuid, | ||||
| 
 | ||||
|                 # if no existing pp, allocate fresh one. | ||||
|                 Position( | ||||
|                     Symbol.from_fqsn( | ||||
|                         t.fqsn, | ||||
|                         info={}, | ||||
|                     ), | ||||
|                     size=0.0, | ||||
|                     ppu=0.0, | ||||
|                     bsuid=t.bsuid, | ||||
|                     expiry=t.expiry, | ||||
|                 ) | ||||
|             ) | ||||
|             clears = pp.clears | ||||
|             if clears: | ||||
|                 first_clear_dt = pp.first_clear_dt | ||||
| 
 | ||||
|                 # don't do updates for ledger records we already have | ||||
|                 # included in the current pps state. | ||||
|                 if ( | ||||
|                     t.tid in clears | ||||
|                     or first_clear_dt and t.dt < first_clear_dt | ||||
|                 ): | ||||
|                     # NOTE: likely you'll see repeats of the same | ||||
|                     # ``Transaction`` passed in here if/when you are restarting | ||||
|                     # a ``brokerd.ib`` where the API will re-report trades from | ||||
|                     # the current session, so we need to make sure we don't | ||||
|                     # "double count" these in pp calculations. | ||||
|                     continue | ||||
| 
 | ||||
|             # update clearing table | ||||
|             pp.add_clear(t) | ||||
|             updated[t.bsuid] = pp | ||||
| 
 | ||||
|         # minimize clears tables and update sizing. | ||||
|         for bsuid, pp in updated.items(): | ||||
|             pp.ensure_state() | ||||
| 
 | ||||
|         return updated | ||||
| 
 | ||||
|     def dump_active( | ||||
|         self, | ||||
|     ) -> tuple[ | ||||
|         dict[str, Position], | ||||
|         dict[str, Position] | ||||
|     ]: | ||||
|         ''' | ||||
|         Iterate all tabulated positions, render active positions to | ||||
|         a ``dict`` format amenable to serialization (via TOML) and drop | ||||
|         from state (``.pps``) as well as return in a ``dict`` all | ||||
|         ``Position``s which have recently closed. | ||||
| 
 | ||||
|         ''' | ||||
|         # NOTE: newly closed position are also important to report/return | ||||
|         # since a consumer, like an order mode UI ;), might want to react | ||||
|         # based on the closure (for example removing the breakeven line | ||||
|         # and clearing the entry from any lists/monitors). | ||||
|         closed_pp_objs: dict[str, Position] = {} | ||||
|         open_pp_objs: dict[str, Position] = {} | ||||
| 
 | ||||
|         pp_objs = self.pps | ||||
|         for bsuid in list(pp_objs): | ||||
|             pp = pp_objs[bsuid] | ||||
| 
 | ||||
|             # XXX: debug hook for size mismatches | ||||
|             # qqqbsuid = 320227571 | ||||
|             # if bsuid == qqqbsuid: | ||||
|             #     breakpoint() | ||||
| 
 | ||||
|             pp.ensure_state() | ||||
| 
 | ||||
|             if ( | ||||
|                 # "net-zero" is a "closed" position | ||||
|                 pp.size == 0 | ||||
| 
 | ||||
|                 # time-expired pps (normally derivatives) are "closed" | ||||
|                 or (pp.expiry and pp.expiry < now()) | ||||
|             ): | ||||
|                 # for expired cases | ||||
|                 pp.size = 0 | ||||
| 
 | ||||
|                 # NOTE: we DO NOT pop the pp here since it can still be | ||||
|                 # used to check for duplicate clears that may come in as | ||||
|                 # new transaction from some backend API and need to be | ||||
|                 # ignored; the closed positions won't be written to the | ||||
|                 # ``pps.toml`` since ``pp_active_entries`` above is what's | ||||
|                 # written. | ||||
|                 closed_pp_objs[bsuid] = pp | ||||
| 
 | ||||
|             else: | ||||
|                 open_pp_objs[bsuid] = pp | ||||
| 
 | ||||
|         return open_pp_objs, closed_pp_objs | ||||
| 
 | ||||
|     def to_toml( | ||||
|         self, | ||||
|     ) -> dict[str, Any]: | ||||
| 
 | ||||
|         active, closed = self.dump_active() | ||||
| 
 | ||||
|         # ONLY dict-serialize all active positions; those that are closed | ||||
|         # we don't store in the ``pps.toml``. | ||||
|         to_toml_dict = {} | ||||
| 
 | ||||
|         for bsuid, pos in active.items(): | ||||
| 
 | ||||
|             # keep the minimal amount of clears that make up this | ||||
|             # position since the last net-zero state. | ||||
|             pos.minimize_clears() | ||||
|             pos.ensure_state() | ||||
| 
 | ||||
|             # serialize to pre-toml form | ||||
|             fqsn, asdict = pos.to_pretoml() | ||||
|             log.info(f'Updating active pp: {fqsn}') | ||||
| 
 | ||||
|             # XXX: ugh, it's cuz we push the section under | ||||
|             # the broker name.. maybe we need to rethink this? | ||||
|             brokerless_key = fqsn.removeprefix(f'{self.brokername}.') | ||||
|             to_toml_dict[brokerless_key] = asdict | ||||
| 
 | ||||
|         return to_toml_dict | ||||
| 
 | ||||
|     def write_config(self) -> None: | ||||
|         ''' | ||||
|         Write the current position table to the user's ``pps.toml``. | ||||
| 
 | ||||
|         ''' | ||||
|         # TODO: show diff output? | ||||
|         # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries | ||||
|         print(f'Updating ``pps.toml`` for {path}:\n') | ||||
| 
 | ||||
|         # active, closed_pp_objs = table.dump_active() | ||||
|         pp_entries = self.to_toml() | ||||
|         self.conf[self.brokername][self.acctid] = pp_entries | ||||
| 
 | ||||
|         # TODO: why tf haven't they already done this for inline | ||||
|         # tables smh.. | ||||
|         enc = PpsEncoder(preserve=True) | ||||
|         # table_bs_type = type(toml.TomlDecoder().get_empty_inline_table()) | ||||
|         enc.dump_funcs[ | ||||
|             toml.decoder.InlineTableDict | ||||
|         ] = enc.dump_inline_table | ||||
| 
 | ||||
|         config.write( | ||||
|             self.conf, | ||||
|             'pps', | ||||
|             encoder=enc, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def load_pps_from_ledger( | ||||
| 
 | ||||
|     brokername: str, | ||||
|     acctname: str, | ||||
| 
 | ||||
|     # post normalization filter on ledger entries to be processed | ||||
|     filter_by: Optional[list[dict]] = None, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     dict[str, Transaction], | ||||
|     dict[str, Position], | ||||
| ]: | ||||
|     ''' | ||||
|     Open a ledger file by broker name and account and read in and | ||||
|     process any trade records into our normalized ``Transaction`` form | ||||
|     and then update the equivalent ``Pptable`` and deliver the two | ||||
|     bsuid-mapped dict-sets of the transactions and pps. | ||||
| 
 | ||||
|     ''' | ||||
|     with ( | ||||
|         open_trade_ledger(brokername, acctname) as ledger, | ||||
|         open_pps(brokername, acctname) as table, | ||||
|     ): | ||||
|         if not ledger: | ||||
|             # null case, no ledger file with content | ||||
|             return {} | ||||
| 
 | ||||
|         mod = get_brokermod(brokername) | ||||
|         src_records: dict[str, Transaction] = mod.norm_trade_records(ledger) | ||||
| 
 | ||||
|         if filter_by: | ||||
|             records = {} | ||||
|             bsuids = set(filter_by) | ||||
|             for tid, r in src_records.items(): | ||||
|                 if r.bsuid in bsuids: | ||||
|                     records[tid] = r | ||||
|         else: | ||||
|             records = src_records | ||||
| 
 | ||||
|         updated = table.update_from_trans(records) | ||||
| 
 | ||||
|     return records, updated | ||||
| 
 | ||||
| 
 | ||||
| # TODO: instead see if we can hack tomli and tomli-w to do the same: | ||||
| # - https://github.com/hukkin/tomli | ||||
| # - https://github.com/hukkin/tomli-w | ||||
| class PpsEncoder(toml.TomlEncoder): | ||||
|     ''' | ||||
|     Special "styled" encoder that makes a ``pps.toml`` redable and | ||||
|     compact by putting `.clears` tables inline and everything else | ||||
|     flat-ish. | ||||
| 
 | ||||
|     ''' | ||||
|     separator = ',' | ||||
| 
 | ||||
|     def dump_list(self, v): | ||||
|         ''' | ||||
|         Dump an inline list with a newline after every element and | ||||
|         with consideration for denoted inline table types. | ||||
| 
 | ||||
|         ''' | ||||
|         retval = "[\n" | ||||
|         for u in v: | ||||
|             if isinstance(u, toml.decoder.InlineTableDict): | ||||
|                 out = self.dump_inline_table(u) | ||||
|             else: | ||||
|                 out = str(self.dump_value(u)) | ||||
| 
 | ||||
|             retval += " " + out + "," + "\n" | ||||
|         retval += "]" | ||||
|         return retval | ||||
| 
 | ||||
|     def dump_inline_table(self, section): | ||||
|         """Preserve inline table in its compact syntax instead of expanding | ||||
|         into subsection. | ||||
|         https://github.com/toml-lang/toml#user-content-inline-table | ||||
|         """ | ||||
|         val_list = [] | ||||
|         for k, v in section.items(): | ||||
|             # if isinstance(v, toml.decoder.InlineTableDict): | ||||
|             if isinstance(v, dict): | ||||
|                 val = self.dump_inline_table(v) | ||||
|             else: | ||||
|                 val = str(self.dump_value(v)) | ||||
| 
 | ||||
|             val_list.append(k + " = " + val) | ||||
| 
 | ||||
|         retval = "{ " + ", ".join(val_list) + " }" | ||||
|         return retval | ||||
| 
 | ||||
|     def dump_sections(self, o, sup): | ||||
|         retstr = "" | ||||
|         if sup != "" and sup[-1] != ".": | ||||
|             sup += '.' | ||||
|         retdict = self._dict() | ||||
|         arraystr = "" | ||||
|         for section in o: | ||||
|             qsection = str(section) | ||||
|             value = o[section] | ||||
| 
 | ||||
|             if not re.match(r'^[A-Za-z0-9_-]+$', section): | ||||
|                 qsection = toml.encoder._dump_str(section) | ||||
| 
 | ||||
|             # arrayoftables = False | ||||
|             if ( | ||||
|                 self.preserve | ||||
|                 and isinstance(value, toml.decoder.InlineTableDict) | ||||
|             ): | ||||
|                 retstr += ( | ||||
|                     qsection | ||||
|                     + | ||||
|                     " = " | ||||
|                     + | ||||
|                     self.dump_inline_table(o[section]) | ||||
|                     + | ||||
|                     '\n'  # only on the final terminating left brace | ||||
|                 ) | ||||
| 
 | ||||
|             # XXX: this code i'm pretty sure is just blatantly bad | ||||
|             # and/or wrong.. | ||||
|             # if isinstance(o[section], list): | ||||
|             #     for a in o[section]: | ||||
|             #         if isinstance(a, dict): | ||||
|             #             arrayoftables = True | ||||
|             # if arrayoftables: | ||||
|             #     for a in o[section]: | ||||
|             #         arraytabstr = "\n" | ||||
|             #         arraystr += "[[" + sup + qsection + "]]\n" | ||||
|             #         s, d = self.dump_sections(a, sup + qsection) | ||||
|             #         if s: | ||||
|             #             if s[0] == "[": | ||||
|             #                 arraytabstr += s | ||||
|             #             else: | ||||
|             #                 arraystr += s | ||||
|             #         while d: | ||||
|             #             newd = self._dict() | ||||
|             #             for dsec in d: | ||||
|             #                 s1, d1 = self.dump_sections(d[dsec], sup + | ||||
|             #                                             qsection + "." + | ||||
|             #                                             dsec) | ||||
|             #                 if s1: | ||||
|             #                     arraytabstr += ("[" + sup + qsection + | ||||
|             #                                     "." + dsec + "]\n") | ||||
|             #                     arraytabstr += s1 | ||||
|             #                 for s1 in d1: | ||||
|             #                     newd[dsec + "." + s1] = d1[s1] | ||||
|             #             d = newd | ||||
|             #         arraystr += arraytabstr | ||||
| 
 | ||||
|             elif isinstance(value, dict): | ||||
|                 retdict[qsection] = o[section] | ||||
| 
 | ||||
|             elif o[section] is not None: | ||||
|                 retstr += ( | ||||
|                     qsection | ||||
|                     + | ||||
|                     " = " | ||||
|                     + | ||||
|                     str(self.dump_value(o[section])) | ||||
|                 ) | ||||
| 
 | ||||
|                 # if not isinstance(value, dict): | ||||
|                 if not isinstance(value, toml.decoder.InlineTableDict): | ||||
|                     # inline tables should not contain newlines: | ||||
|                     # https://toml.io/en/v1.0.0#inline-table | ||||
|                     retstr += '\n' | ||||
| 
 | ||||
|             else: | ||||
|                 raise ValueError(value) | ||||
| 
 | ||||
|         retstr += arraystr | ||||
|         return (retstr, retdict) | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def open_pps( | ||||
|     brokername: str, | ||||
|     acctid: str, | ||||
|     write_on_exit: bool = True, | ||||
| 
 | ||||
| ) -> PpTable: | ||||
|     ''' | ||||
|     Read out broker-specific position entries from | ||||
|     incremental update file: ``pps.toml``. | ||||
| 
 | ||||
|     ''' | ||||
|     conf, path = config.load('pps') | ||||
|     brokersection = conf.setdefault(brokername, {}) | ||||
|     pps = brokersection.setdefault(acctid, {}) | ||||
| 
 | ||||
|     # TODO: ideally we can pass in an existing | ||||
|     # pps state to this right? such that we | ||||
|     # don't have to do a ledger reload all the | ||||
|     # time.. a couple ideas I can think of, | ||||
|     # - mirror this in some client side actor which | ||||
|     #   does the actual ledger updates (say the paper | ||||
|     #   engine proc if we decide to always spawn it?), | ||||
|     # - do diffs against updates from the ledger writer | ||||
|     #   actor and the in-mem state here? | ||||
| 
 | ||||
|     pp_objs = {} | ||||
|     table = PpTable( | ||||
|         brokername, | ||||
|         acctid, | ||||
|         pp_objs, | ||||
|         conf=conf, | ||||
|     ) | ||||
| 
 | ||||
|     # unmarshal/load ``pps.toml`` config entries into object form | ||||
|     # and update `PpTable` obj entries. | ||||
|     for fqsn, entry in pps.items(): | ||||
|         bsuid = entry['bsuid'] | ||||
| 
 | ||||
|         # convert clears sub-tables (only in this form | ||||
|         # for toml re-presentation) back into a master table. | ||||
|         clears_list = entry['clears'] | ||||
| 
 | ||||
|         # index clears entries in "object" form by tid in a top | ||||
|         # level dict instead of a list (as is presented in our | ||||
|         # ``pps.toml``). | ||||
|         clears = pp_objs.setdefault(bsuid, {}) | ||||
| 
 | ||||
|         # TODO: should be make a ``Struct`` for clear/event entries? | ||||
|         # convert "clear events table" from the toml config (list of | ||||
|         # a dicts) and load it into object form for use in position | ||||
|         # processing of new clear events. | ||||
|         trans: list[Transaction] = [] | ||||
| 
 | ||||
|         for clears_table in clears_list: | ||||
|             tid = clears_table.pop('tid') | ||||
|             dtstr = clears_table['dt'] | ||||
|             dt = pendulum.parse(dtstr) | ||||
|             clears_table['dt'] = dt | ||||
|             trans.append(Transaction( | ||||
|                 fqsn=bsuid, | ||||
|                 bsuid=bsuid, | ||||
|                 tid=tid, | ||||
|                 size=clears_table['size'], | ||||
|                 price=clears_table['price'], | ||||
|                 cost=clears_table['cost'], | ||||
|                 dt=dt, | ||||
|             )) | ||||
|             clears[tid] = clears_table | ||||
| 
 | ||||
|         size = entry['size'] | ||||
| 
 | ||||
|         # TODO: remove but, handle old field name for now | ||||
|         ppu = entry.get('ppu', entry.get('be_price', 0)) | ||||
|         split_ratio = entry.get('split_ratio') | ||||
| 
 | ||||
|         expiry = entry.get('expiry') | ||||
|         if expiry: | ||||
|             expiry = pendulum.parse(expiry) | ||||
| 
 | ||||
|         pp = pp_objs[bsuid] = Position( | ||||
|             Symbol.from_fqsn(fqsn, info={}), | ||||
|             size=size, | ||||
|             ppu=ppu, | ||||
|             split_ratio=split_ratio, | ||||
|             expiry=expiry, | ||||
|             bsuid=entry['bsuid'], | ||||
|         ) | ||||
| 
 | ||||
|         # XXX: super critical, we need to be sure to include | ||||
|         # all pps.toml clears to avoid reusing clears that were | ||||
|         # already included in the current incremental update | ||||
|         # state, since today's records may have already been | ||||
|         # processed! | ||||
|         for t in trans: | ||||
|             pp.add_clear(t) | ||||
| 
 | ||||
|         # audit entries loaded from toml | ||||
|         pp.ensure_state() | ||||
| 
 | ||||
|     try: | ||||
|         yield table | ||||
|     finally: | ||||
|         if write_on_exit: | ||||
|             table.write_config() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     import sys | ||||
| 
 | ||||
|     args = sys.argv | ||||
|     assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`' | ||||
|     args = args[1:] | ||||
|     for acctid in args: | ||||
|         broker, name = acctid.split('.') | ||||
|         trans, updated_pps = load_pps_from_ledger(broker, name) | ||||
|         print( | ||||
|             f'Processing transactions into pps for {broker}:{acctid}\n' | ||||
|             f'{pformat(trans)}\n\n' | ||||
|             f'{pformat(updated_pps)}' | ||||
|         ) | ||||
|  | @ -230,19 +230,18 @@ class GodWidget(QWidget): | |||
|             # - we'll probably want per-instrument/provider state here? | ||||
|             #   change the order config form over to the new chart | ||||
| 
 | ||||
|             # XXX: since the pp config is a singleton widget we have to | ||||
|             # also switch it over to the new chart's interal-layout | ||||
|             # self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane) | ||||
|             chart = linkedsplits.chart | ||||
| 
 | ||||
|             # chart is already in memory so just focus it | ||||
|             linkedsplits.show() | ||||
|             linkedsplits.focus() | ||||
|             linkedsplits.graphics_cycle() | ||||
|             await trio.sleep(0) | ||||
| 
 | ||||
|             # XXX: since the pp config is a singleton widget we have to | ||||
|             # also switch it over to the new chart's interal-layout | ||||
|             # self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane) | ||||
|             chart = linkedsplits.chart | ||||
| 
 | ||||
|             # resume feeds *after* rendering chart view asap | ||||
|             if chart: | ||||
|             chart.resume_all_feeds() | ||||
| 
 | ||||
|             # TODO: we need a check to see if the chart | ||||
|  | @ -453,6 +452,13 @@ class LinkedSplits(QWidget): | |||
|         # add crosshair graphic | ||||
|         self.chart.addItem(self.cursor) | ||||
| 
 | ||||
|         # axis placement | ||||
|         if ( | ||||
|             _xaxis_at == 'bottom' and | ||||
|             'bottom' in self.chart.plotItem.axes | ||||
|         ): | ||||
|             self.chart.hideAxis('bottom') | ||||
| 
 | ||||
|         # style? | ||||
|         self.chart.setFrameStyle( | ||||
|             QFrame.StyledPanel | | ||||
|  | @ -517,15 +523,6 @@ class LinkedSplits(QWidget): | |||
|         cpw.hideAxis('left') | ||||
|         cpw.hideAxis('bottom') | ||||
| 
 | ||||
|         if ( | ||||
|             _xaxis_at == 'bottom' and ( | ||||
|                 self.xaxis_chart | ||||
|                 or ( | ||||
|                     not self.subplots | ||||
|                     and self.xaxis_chart is None | ||||
|                 ) | ||||
|             ) | ||||
|         ): | ||||
|         if self.xaxis_chart: | ||||
|             self.xaxis_chart.hideAxis('bottom') | ||||
| 
 | ||||
|  | @ -534,9 +531,13 @@ class LinkedSplits(QWidget): | |||
|             # https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master | ||||
|             # _ = self.xaxis_chart.removeAxis('bottom', unlink=False) | ||||
|             # assert 'bottom' not in self.xaxis_chart.plotItem.axes | ||||
| 
 | ||||
|             self.xaxis_chart = cpw | ||||
|             cpw.showAxis('bottom') | ||||
| 
 | ||||
|         if self.xaxis_chart is None: | ||||
|             self.xaxis_chart = cpw | ||||
| 
 | ||||
|         qframe.chart = cpw | ||||
|         qframe.hbox.addWidget(cpw) | ||||
| 
 | ||||
|  | @ -759,18 +760,9 @@ class ChartPlotWidget(pg.PlotWidget): | |||
| 
 | ||||
|         self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem) | ||||
| 
 | ||||
|         # indempotent startup flag for auto-yrange subsys | ||||
|         # to detect the "first time" y-domain graphics begin | ||||
|         # to be shown in the (main) graphics view. | ||||
|         self._on_screen: bool = False | ||||
| 
 | ||||
|     def resume_all_feeds(self): | ||||
|         try: | ||||
|         for feed in self._feeds.values(): | ||||
|             self.linked.godwidget._root_n.start_soon(feed.resume) | ||||
|         except RuntimeError: | ||||
|             # TODO: cancel the qtractor runtime here? | ||||
|             raise | ||||
| 
 | ||||
|     def pause_all_feeds(self): | ||||
|         for feed in self._feeds.values(): | ||||
|  | @ -867,8 +859,7 @@ class ChartPlotWidget(pg.PlotWidget): | |||
| 
 | ||||
|     def default_view( | ||||
|         self, | ||||
|         bars_from_y: int = 616, | ||||
|         do_ds: bool = True, | ||||
|         bars_from_y: int = 3000, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|  | @ -929,11 +920,8 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|             max=end, | ||||
|             padding=0, | ||||
|         ) | ||||
| 
 | ||||
|         if do_ds: | ||||
|         self.view.maybe_downsample_graphics() | ||||
|         view._set_yrange() | ||||
| 
 | ||||
|         try: | ||||
|             self.linked.graphics_cycle() | ||||
|         except IndexError: | ||||
|  | @ -1267,6 +1255,7 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         If ``bars_range`` is provided use that range. | ||||
| 
 | ||||
|         ''' | ||||
|         # print(f'Chart[{self.name}].maxmin()') | ||||
|         profiler = pg.debug.Profiler( | ||||
|             msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`', | ||||
|             disabled=not pg_profile_enabled(), | ||||
|  | @ -1298,18 +1287,11 @@ class ChartPlotWidget(pg.PlotWidget): | |||
| 
 | ||||
|             key = round(lbar), round(rbar) | ||||
|             res = flow.maxmin(*key) | ||||
| 
 | ||||
|             if ( | ||||
|                 res is None | ||||
|             ): | ||||
|                 log.warning( | ||||
|             if res == (None, None): | ||||
|                 log.error( | ||||
|                     f"{flow_key} no mxmn for bars_range => {key} !?" | ||||
|                 ) | ||||
|                 res = 0, 0 | ||||
|                 if not self._on_screen: | ||||
|                     self.default_view(do_ds=False) | ||||
|                     self._on_screen = True | ||||
| 
 | ||||
|         profiler(f'yrange mxmn: {key} -> {res}') | ||||
|         # print(f'{flow_key} yrange mxmn: {key} -> {res}') | ||||
|         return res | ||||
|  |  | |||
|  | @ -223,20 +223,14 @@ def ds_m4( | |||
|     assert frames >= (xrange / uppx) | ||||
| 
 | ||||
|     # call into ``numba`` | ||||
|     ( | ||||
|         nb, | ||||
|         x_out, | ||||
|         y_out, | ||||
|         ymn, | ||||
|         ymx, | ||||
|     ) = _m4( | ||||
|     nb, i_win, y_out = _m4( | ||||
|         x, | ||||
|         y, | ||||
| 
 | ||||
|         frames, | ||||
| 
 | ||||
|         # TODO: see func below.. | ||||
|         # x_out, | ||||
|         # i_win, | ||||
|         # y_out, | ||||
| 
 | ||||
|         # first index in x data to start at | ||||
|  | @ -249,11 +243,10 @@ def ds_m4( | |||
|     # filter out any overshoot in the input allocation arrays by | ||||
|     # removing zero-ed tail entries which should start at a certain | ||||
|     # index. | ||||
|     x_out = x_out[x_out != 0] | ||||
|     y_out = y_out[:x_out.size] | ||||
|     i_win = i_win[i_win != 0] | ||||
|     y_out = y_out[:i_win.size] | ||||
| 
 | ||||
|     # print(f'M4 output ymn, ymx: {ymn},{ymx}') | ||||
|     return nb, x_out, y_out, ymn, ymx | ||||
|     return nb, i_win, y_out | ||||
| 
 | ||||
| 
 | ||||
| @jit( | ||||
|  | @ -267,8 +260,8 @@ def _m4( | |||
| 
 | ||||
|     frames: int, | ||||
| 
 | ||||
|     # TODO: using this approach, having the ``.zeros()`` alloc lines | ||||
|     # below in pure python, there were segs faults and alloc crashes.. | ||||
|     # TODO: using this approach by having the ``.zeros()`` alloc lines | ||||
|     # below, in put python was causing segs faults and alloc crashes.. | ||||
|     # we might need to see how it behaves with shm arrays and consider | ||||
|     # allocating them once at startup? | ||||
| 
 | ||||
|  | @ -281,22 +274,14 @@ def _m4( | |||
|     x_start: int, | ||||
|     step: float, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     int, | ||||
|     np.ndarray, | ||||
|     np.ndarray, | ||||
|     float, | ||||
|     float, | ||||
| ]: | ||||
|     ''' | ||||
|     Implementation of the m4 algorithm in ``numba``: | ||||
|     http://www.vldb.org/pvldb/vol7/p797-jugel.pdf | ||||
| ) -> int: | ||||
|     # nbins = len(i_win) | ||||
|     # count = len(xs) | ||||
| 
 | ||||
|     ''' | ||||
|     # these are pre-allocated and mutated by ``numba`` | ||||
|     # code in-place. | ||||
|     y_out = np.zeros((frames, 4), ys.dtype) | ||||
|     x_out = np.zeros(frames, xs.dtype) | ||||
|     i_win = np.zeros(frames, xs.dtype) | ||||
| 
 | ||||
|     bincount = 0 | ||||
|     x_left = x_start | ||||
|  | @ -310,34 +295,24 @@ def _m4( | |||
| 
 | ||||
|     # set all bins in the left-most entry to the starting left-most x value | ||||
|     # (aka a row broadcast). | ||||
|     x_out[bincount] = x_left | ||||
|     i_win[bincount] = x_left | ||||
|     # set all y-values to the first value passed in. | ||||
|     y_out[bincount] = ys[0] | ||||
| 
 | ||||
|     # full input y-data mx and mn | ||||
|     mx: float = -np.inf | ||||
|     mn: float = np.inf | ||||
| 
 | ||||
|     # compute OHLC style max / min values per window sized x-frame. | ||||
|     for i in range(len(xs)): | ||||
| 
 | ||||
|         x = xs[i] | ||||
|         y = ys[i] | ||||
| 
 | ||||
|         if x < x_left + step:   # the current window "step" is [bin, bin+1) | ||||
|             ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1]) | ||||
|             ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2]) | ||||
|             y_out[bincount, 1] = min(y, y_out[bincount, 1]) | ||||
|             y_out[bincount, 2] = max(y, y_out[bincount, 2]) | ||||
|             y_out[bincount, 3] = y | ||||
|             mx = max(mx, ymx) | ||||
|             mn = min(mn, ymn) | ||||
| 
 | ||||
|         else: | ||||
|             # Find the next bin | ||||
|             while x >= x_left + step: | ||||
|                 x_left += step | ||||
| 
 | ||||
|             bincount += 1 | ||||
|             x_out[bincount] = x_left | ||||
|             i_win[bincount] = x_left | ||||
|             y_out[bincount] = y | ||||
| 
 | ||||
|     return bincount, x_out, y_out, mn, mx | ||||
|     return bincount, i_win, y_out | ||||
|  |  | |||
|  | @ -105,10 +105,6 @@ def chart_maxmin( | |||
|     mn, mx = out | ||||
| 
 | ||||
|     mx_vlm_in_view = 0 | ||||
| 
 | ||||
|     # TODO: we need to NOT call this to avoid a manual | ||||
|     # np.max/min trigger and especially on the vlm_chart | ||||
|     # flows which aren't shown.. like vlm? | ||||
|     if vlm_chart: | ||||
|         out = vlm_chart.maxmin() | ||||
|         if out: | ||||
|  | @ -136,16 +132,16 @@ class DisplayState: | |||
|     # high level chart handles | ||||
|     linked: LinkedSplits | ||||
|     chart: ChartPlotWidget | ||||
|     vlm_chart: ChartPlotWidget | ||||
| 
 | ||||
|     # axis labels | ||||
|     l1: L1Labels | ||||
|     last_price_sticky: YAxisLabel | ||||
|     vlm_sticky: YAxisLabel | ||||
| 
 | ||||
|     # misc state tracking | ||||
|     vars: dict[str, Any] | ||||
| 
 | ||||
|     vlm_chart: Optional[ChartPlotWidget] = None | ||||
|     vlm_sticky: Optional[YAxisLabel] = None | ||||
|     wap_in_history: bool = False | ||||
| 
 | ||||
| 
 | ||||
|  | @ -185,6 +181,9 @@ async def graphics_update_loop( | |||
|         *ohlcv.array[-1][['index', 'close']] | ||||
|     ) | ||||
| 
 | ||||
|     if vlm_chart: | ||||
|         vlm_sticky = vlm_chart._ysticks['volume'] | ||||
| 
 | ||||
|     maxmin = partial( | ||||
|         chart_maxmin, | ||||
|         chart, | ||||
|  | @ -223,9 +222,33 @@ async def graphics_update_loop( | |||
|     tick_margin = 3 * tick_size | ||||
| 
 | ||||
|     chart.show() | ||||
|     # view = chart.view | ||||
|     last_quote = time.time() | ||||
|     i_last = ohlcv.index | ||||
| 
 | ||||
|     # async def iter_drain_quotes(): | ||||
|     #     # NOTE: all code below this loop is expected to be synchronous | ||||
|     #     # and thus draw instructions are not picked up jntil the next | ||||
|     #     # wait / iteration. | ||||
|     #     async for quotes in stream: | ||||
|     #         while True: | ||||
|     #             try: | ||||
|     #                 moar = stream.receive_nowait() | ||||
|     #             except trio.WouldBlock: | ||||
|     #                 yield quotes | ||||
|     #                 break | ||||
|     #             else: | ||||
|     #                 for sym, quote in moar.items(): | ||||
|     #                     ticks_frame = quote.get('ticks') | ||||
|     #                     if ticks_frame: | ||||
|     #                         quotes[sym].setdefault( | ||||
|     #                             'ticks', []).extend(ticks_frame) | ||||
|     #                     print('pulled extra') | ||||
| 
 | ||||
|     #                 yield quotes | ||||
| 
 | ||||
|     # async for quotes in iter_drain_quotes(): | ||||
| 
 | ||||
|     ds = linked.display_state = DisplayState(**{ | ||||
|         'quotes': {}, | ||||
|         'linked': linked, | ||||
|  | @ -233,6 +256,8 @@ async def graphics_update_loop( | |||
|         'ohlcv': ohlcv, | ||||
|         'chart': chart, | ||||
|         'last_price_sticky': last_price_sticky, | ||||
|         'vlm_chart': vlm_chart, | ||||
|         'vlm_sticky': vlm_sticky, | ||||
|         'l1': l1, | ||||
| 
 | ||||
|         'vars': { | ||||
|  | @ -245,11 +270,6 @@ async def graphics_update_loop( | |||
|         } | ||||
|     }) | ||||
| 
 | ||||
|     if vlm_chart: | ||||
|         vlm_sticky = vlm_chart._ysticks['volume'] | ||||
|         ds.vlm_chart = vlm_chart | ||||
|         ds.vlm_sticky = vlm_sticky | ||||
| 
 | ||||
|     chart.default_view() | ||||
| 
 | ||||
|     # main real-time quotes update loop | ||||
|  | @ -273,7 +293,6 @@ async def graphics_update_loop( | |||
| 
 | ||||
|         # chart isn't active/shown so skip render cycle and pause feed(s) | ||||
|         if chart.linked.isHidden(): | ||||
|             print('skipping update') | ||||
|             chart.pause_all_feeds() | ||||
|             continue | ||||
| 
 | ||||
|  | @ -322,7 +341,7 @@ def graphics_update_cycle( | |||
|     for sym, quote in ds.quotes.items(): | ||||
| 
 | ||||
|         # compute the first available graphic's x-units-per-pixel | ||||
|         uppx = chart.view.x_uppx() | ||||
|         uppx = vlm_chart.view.x_uppx() | ||||
| 
 | ||||
|         # NOTE: vlm may be written by the ``brokerd`` backend | ||||
|         # event though a tick sample is not emitted. | ||||
|  | @ -397,8 +416,10 @@ def graphics_update_cycle( | |||
|             ) | ||||
|             or trigger_all | ||||
|         ): | ||||
|             # TODO: we should track and compute whether the last | ||||
|             # pixel in a curve should show new data based on uppx | ||||
|             # and then iff update curves and shift? | ||||
|             chart.increment_view(steps=i_diff) | ||||
|             # chart.increment_view(steps=i_diff + round(append_diff - uppx)) | ||||
| 
 | ||||
|             if vlm_chart: | ||||
|                 vlm_chart.increment_view(steps=i_diff) | ||||
|  | @ -456,6 +477,7 @@ def graphics_update_cycle( | |||
|         ): | ||||
|             chart.update_graphics_from_flow( | ||||
|                 chart.name, | ||||
|                 # do_append=uppx < update_uppx, | ||||
|                 do_append=do_append, | ||||
|             ) | ||||
| 
 | ||||
|  | @ -786,10 +808,7 @@ async def display_symbol_data( | |||
|         async with trio.open_nursery() as ln: | ||||
| 
 | ||||
|             # if available load volume related built-in display(s) | ||||
|             if ( | ||||
|                 not symbol.broker_info[provider].get('no_vlm', False) | ||||
|                 and has_vlm(ohlcv) | ||||
|             ): | ||||
|             if has_vlm(ohlcv): | ||||
|                 vlm_chart = await ln.start( | ||||
|                     open_vlm_displays, | ||||
|                     linked, | ||||
|  | @ -824,9 +843,6 @@ async def display_symbol_data( | |||
|                     order_mode_started | ||||
|                 ) | ||||
|             ): | ||||
|                 if not vlm_chart: | ||||
|                     chart.default_view() | ||||
| 
 | ||||
|                 # let Qt run to render all widgets and make sure the | ||||
|                 # sidepanes line up vertically. | ||||
|                 await trio.sleep(0) | ||||
|  |  | |||
|  | @ -140,9 +140,9 @@ class LineEditor: | |||
| 
 | ||||
|     ) -> LevelLine: | ||||
| 
 | ||||
|         # staged_line = self._active_staged_line | ||||
|         # if not staged_line: | ||||
|         #     raise RuntimeError("No line is currently staged!?") | ||||
|         staged_line = self._active_staged_line | ||||
|         if not staged_line: | ||||
|             raise RuntimeError("No line is currently staged!?") | ||||
| 
 | ||||
|         # for now, until submission reponse arrives | ||||
|         line.hide_labels() | ||||
|  |  | |||
|  | @ -21,6 +21,7 @@ Qt event proxying and processing using ``trio`` mem chans. | |||
| from contextlib import asynccontextmanager, AsyncExitStack | ||||
| from typing import Callable | ||||
| 
 | ||||
| from pydantic import BaseModel | ||||
| import trio | ||||
| from PyQt5 import QtCore | ||||
| from PyQt5.QtCore import QEvent, pyqtBoundSignal | ||||
|  | @ -29,8 +30,6 @@ from PyQt5.QtWidgets import ( | |||
|     QGraphicsSceneMouseEvent as gs_mouse, | ||||
| ) | ||||
| 
 | ||||
| from ..data.types import Struct | ||||
| 
 | ||||
| 
 | ||||
| MOUSE_EVENTS = { | ||||
|     gs_mouse.GraphicsSceneMousePress, | ||||
|  | @ -44,10 +43,13 @@ MOUSE_EVENTS = { | |||
| # TODO: maybe consider some constrained ints down the road? | ||||
| # https://pydantic-docs.helpmanual.io/usage/types/#constrained-types | ||||
| 
 | ||||
| class KeyboardMsg(Struct): | ||||
| class KeyboardMsg(BaseModel): | ||||
|     '''Unpacked Qt keyboard event data. | ||||
| 
 | ||||
|     ''' | ||||
|     class Config: | ||||
|         arbitrary_types_allowed = True | ||||
| 
 | ||||
|     event: QEvent | ||||
|     etype: int | ||||
|     key: int | ||||
|  | @ -55,13 +57,16 @@ class KeyboardMsg(Struct): | |||
|     txt: str | ||||
| 
 | ||||
|     def to_tuple(self) -> tuple: | ||||
|         return tuple(self.to_dict().values()) | ||||
|         return tuple(self.dict().values()) | ||||
| 
 | ||||
| 
 | ||||
| class MouseMsg(Struct): | ||||
| class MouseMsg(BaseModel): | ||||
|     '''Unpacked Qt keyboard event data. | ||||
| 
 | ||||
|     ''' | ||||
|     class Config: | ||||
|         arbitrary_types_allowed = True | ||||
| 
 | ||||
|     event: QEvent | ||||
|     etype: int | ||||
|     button: int | ||||
|  |  | |||
|  | @ -337,7 +337,6 @@ class Flow(msgspec.Struct):  # , frozen=True): | |||
|     name: str | ||||
|     plot: pg.PlotItem | ||||
|     graphics: Union[Curve, BarItems] | ||||
|     yrange: tuple[float, float] = None | ||||
| 
 | ||||
|     # in some cases a flow may want to change its | ||||
|     # graphical "type" or, "form" when downsampling, | ||||
|  | @ -387,11 +386,10 @@ class Flow(msgspec.Struct):  # , frozen=True): | |||
|         lbar: int, | ||||
|         rbar: int, | ||||
| 
 | ||||
|     ) -> Optional[tuple[float, float]]: | ||||
|     ) -> tuple[float, float]: | ||||
|         ''' | ||||
|         Compute the cached max and min y-range values for a given | ||||
|         x-range determined by ``lbar`` and ``rbar`` or ``None`` | ||||
|         if no range can be determined (yet). | ||||
|         x-range determined by ``lbar`` and ``rbar``. | ||||
| 
 | ||||
|         ''' | ||||
|         rkey = (lbar, rbar) | ||||
|  | @ -401,8 +399,9 @@ class Flow(msgspec.Struct):  # , frozen=True): | |||
| 
 | ||||
|         shm = self.shm | ||||
|         if shm is None: | ||||
|             return None | ||||
|             mxmn = None | ||||
| 
 | ||||
|         else:  # new block for profiling?.. | ||||
|             arr = shm.array | ||||
| 
 | ||||
|             # build relative indexes into shm array | ||||
|  | @ -415,11 +414,7 @@ class Flow(msgspec.Struct):  # , frozen=True): | |||
|             ] | ||||
| 
 | ||||
|             if not slice_view.size: | ||||
|             return None | ||||
| 
 | ||||
|         elif self.yrange: | ||||
|             mxmn = self.yrange | ||||
|             # print(f'{self.name} M4 maxmin: {mxmn}') | ||||
|                 mxmn = None | ||||
| 
 | ||||
|             else: | ||||
|                 if self.is_ohlc: | ||||
|  | @ -432,10 +427,9 @@ class Flow(msgspec.Struct):  # , frozen=True): | |||
|                     yhigh = np.max(view) | ||||
| 
 | ||||
|                 mxmn = ylow, yhigh | ||||
|             # print(f'{self.name} MANUAL maxmin: {mxmin}') | ||||
| 
 | ||||
|         # cache result for input range | ||||
|         assert mxmn | ||||
|             if mxmn is not None: | ||||
|                 # cache new mxmn result | ||||
|                 self._mxmns[rkey] = mxmn | ||||
| 
 | ||||
|             return mxmn | ||||
|  | @ -634,13 +628,10 @@ class Flow(msgspec.Struct):  # , frozen=True): | |||
|             # source data so we clear our path data in prep | ||||
|             # to generate a new one from original source data. | ||||
|             new_sample_rate = True | ||||
|             showing_src_data = True | ||||
|             should_ds = False | ||||
|             should_redraw = True | ||||
| 
 | ||||
|             showing_src_data = True | ||||
|             # reset yrange to be computed from source data | ||||
|             self.yrange = None | ||||
| 
 | ||||
|         # MAIN RENDER LOGIC: | ||||
|         # - determine in view data and redraw on range change | ||||
|         # - determine downsampling ops if needed | ||||
|  | @ -666,10 +657,6 @@ class Flow(msgspec.Struct):  # , frozen=True): | |||
| 
 | ||||
|             **rkwargs, | ||||
|         ) | ||||
|         if showing_src_data: | ||||
|             # print(f"{self.name} SHOWING SOURCE") | ||||
|             # reset yrange to be computed from source data | ||||
|             self.yrange = None | ||||
| 
 | ||||
|         if not out: | ||||
|             log.warning(f'{self.name} failed to render!?') | ||||
|  | @ -677,9 +664,6 @@ class Flow(msgspec.Struct):  # , frozen=True): | |||
| 
 | ||||
|         path, data, reset = out | ||||
| 
 | ||||
|         # if self.yrange: | ||||
|         #     print(f'flow {self.name} yrange from m4: {self.yrange}') | ||||
| 
 | ||||
|         # XXX: SUPER UGGGHHH... without this we get stale cache | ||||
|         # graphics that don't update until you downsampler again.. | ||||
|         if reset: | ||||
|  | @ -1074,7 +1058,6 @@ class Renderer(msgspec.Struct): | |||
|         # xy-path data transform: convert source data to a format | ||||
|         # able to be passed to a `QPainterPath` rendering routine. | ||||
|         if not len(hist): | ||||
|             # XXX: this might be why the profiler only has exits? | ||||
|             return | ||||
| 
 | ||||
|         x_out, y_out, connect = self.format_xy( | ||||
|  | @ -1161,14 +1144,11 @@ class Renderer(msgspec.Struct): | |||
| 
 | ||||
|             elif should_ds and uppx > 1: | ||||
| 
 | ||||
|                 x_out, y_out, ymn, ymx = xy_downsample( | ||||
|                 x_out, y_out = xy_downsample( | ||||
|                     x_out, | ||||
|                     y_out, | ||||
|                     uppx, | ||||
|                 ) | ||||
|                 self.flow.yrange = ymn, ymx | ||||
|                 # print(f'{self.flow.name} post ds: ymn, ymx: {ymn},{ymx}') | ||||
| 
 | ||||
|                 reset = True | ||||
|                 profiler(f'FULL PATH downsample redraw={should_ds}') | ||||
|                 self._in_ds = True | ||||
|  |  | |||
|  | @ -619,7 +619,7 @@ class FillStatusBar(QProgressBar): | |||
|         # color: #19232D; | ||||
|         # width: 10px; | ||||
| 
 | ||||
|         self.setRange(0, int(slots)) | ||||
|         self.setRange(0, slots) | ||||
|         self.setValue(value) | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -27,13 +27,12 @@ from itertools import cycle | |||
| from typing import Optional, AsyncGenerator, Any | ||||
| 
 | ||||
| import numpy as np | ||||
| import msgspec | ||||
| from pydantic import create_model | ||||
| import tractor | ||||
| import pyqtgraph as pg | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| 
 | ||||
| from piker.data.types import Struct | ||||
| from ._axes import PriceAxis | ||||
| from .._cacheables import maybe_open_context | ||||
| from ..calc import humanize | ||||
|  | @ -54,7 +53,7 @@ from ._forms import ( | |||
| from ..fsp._api import maybe_mk_fsp_shm, Fsp | ||||
| from ..fsp import cascade | ||||
| from ..fsp._volume import ( | ||||
|     # tina_vwap, | ||||
|     tina_vwap, | ||||
|     dolla_vlm, | ||||
|     flow_rates, | ||||
| ) | ||||
|  | @ -154,13 +153,12 @@ async def open_fsp_sidepane( | |||
|     ) | ||||
| 
 | ||||
|     # https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation | ||||
|     FspConfig = msgspec.defstruct( | ||||
|         "Point", | ||||
|         [('name', name)] + list(params.items()), | ||||
|         bases=(Struct,), | ||||
|     FspConfig = create_model( | ||||
|         'FspConfig', | ||||
|         name=name, | ||||
|         **params, | ||||
|     ) | ||||
|     model = FspConfig(name=name, **params) | ||||
|     sidepane.model = model | ||||
|     sidepane.model = FspConfig() | ||||
| 
 | ||||
|     # just a logger for now until we get fsp configs up and running. | ||||
|     async def settings_change( | ||||
|  | @ -442,9 +440,7 @@ class FspAdmin: | |||
|                         # if the chart isn't hidden try to update | ||||
|                         # the data on screen. | ||||
|                         if not self.linked.isHidden(): | ||||
|                             log.debug( | ||||
|                                 f'Re-syncing graphics for fsp: {ns_path}' | ||||
|                             ) | ||||
|                             log.debug(f'Re-syncing graphics for fsp: {ns_path}') | ||||
|                             self.linked.graphics_cycle( | ||||
|                                 trigger_all=True, | ||||
|                                 prepend_update_index=info['first'], | ||||
|  | @ -473,10 +469,9 @@ class FspAdmin: | |||
|             target=target, | ||||
|             readonly=True, | ||||
|         ) | ||||
|         self._flow_registry[( | ||||
|             self.src_shm._token, | ||||
|             target.name | ||||
|         )] = dst_shm._token | ||||
|         self._flow_registry[ | ||||
|             (self.src_shm._token, target.name) | ||||
|         ] = dst_shm._token | ||||
| 
 | ||||
|         # if not opened: | ||||
|         #     raise RuntimeError( | ||||
|  | @ -644,25 +639,20 @@ async def open_vlm_displays( | |||
|             names: list[str], | ||||
| 
 | ||||
|         ) -> tuple[float, float]: | ||||
|             ''' | ||||
|             Flows "group" maxmin loop; assumes all named flows | ||||
|             are in the same co-domain and thus can be sorted | ||||
|             as one set. | ||||
| 
 | ||||
|             Iterates all the named flows and calls the chart | ||||
|             api to find their range values and return. | ||||
| 
 | ||||
|             TODO: really we should probably have a more built-in API | ||||
|             for this? | ||||
| 
 | ||||
|             ''' | ||||
|             mx = 0 | ||||
|             for name in names: | ||||
|                 ymn, ymx = chart.maxmin(name=name) | ||||
|                 mx = max(mx, ymx) | ||||
| 
 | ||||
|                 mxmn = chart.maxmin(name=name) | ||||
|                 if mxmn: | ||||
|                     ymax = mxmn[1] | ||||
|                     if ymax > mx: | ||||
|                         mx = ymax | ||||
| 
 | ||||
|             return 0, mx | ||||
| 
 | ||||
|         chart.view.maxmin = partial(multi_maxmin, names=['volume']) | ||||
| 
 | ||||
|         # TODO: fix the x-axis label issue where if you put | ||||
|         # the axis on the left it's totally not lined up... | ||||
|         # show volume units value on LHS (for dinkus) | ||||
|  | @ -786,7 +776,6 @@ async def open_vlm_displays( | |||
| 
 | ||||
|             ) -> None: | ||||
|                 for name in names: | ||||
| 
 | ||||
|                     if 'dark' in name: | ||||
|                         color = dark_vlm_color | ||||
|                     elif 'rate' in name: | ||||
|  |  | |||
|  | @ -221,7 +221,6 @@ async def handle_viewmode_kb_inputs( | |||
|             # TODO: show pp config mini-params in status bar widget | ||||
|             # mode.pp_config.show() | ||||
| 
 | ||||
|             trigger_type: str = 'dark' | ||||
|             if ( | ||||
|                 # 's' for "submit" to activate "live" order | ||||
|                 Qt.Key_S in pressed or | ||||
|  | @ -229,6 +228,9 @@ async def handle_viewmode_kb_inputs( | |||
|             ): | ||||
|                 trigger_type: str = 'live' | ||||
| 
 | ||||
|             else: | ||||
|                 trigger_type: str = 'dark' | ||||
| 
 | ||||
|             # order mode trigger "actions" | ||||
|             if Qt.Key_D in pressed:  # for "damp eet" | ||||
|                 action = 'sell' | ||||
|  | @ -395,11 +397,8 @@ class ChartView(ViewBox): | |||
| 
 | ||||
|         ''' | ||||
|         if self._ic is None: | ||||
|             try: | ||||
|             self.chart.pause_all_feeds() | ||||
|             self._ic = trio.Event() | ||||
|             except RuntimeError: | ||||
|                 pass | ||||
| 
 | ||||
|     def signal_ic( | ||||
|         self, | ||||
|  | @ -412,12 +411,9 @@ class ChartView(ViewBox): | |||
| 
 | ||||
|         ''' | ||||
|         if self._ic: | ||||
|             try: | ||||
|             self._ic.set() | ||||
|             self._ic = None | ||||
|             self.chart.resume_all_feeds() | ||||
|             except RuntimeError: | ||||
|                 pass | ||||
| 
 | ||||
|     @asynccontextmanager | ||||
|     async def open_async_input_handler( | ||||
|  | @ -673,10 +669,7 @@ class ChartView(ViewBox): | |||
|                 # XXX: WHY | ||||
|                 ev.accept() | ||||
| 
 | ||||
|                 try: | ||||
|                 self.start_ic() | ||||
|                 except RuntimeError: | ||||
|                     pass | ||||
|                 # if self._ic is None: | ||||
|                 #     self.chart.pause_all_feeds() | ||||
|                 #     self._ic = trio.Event() | ||||
|  | @ -930,7 +923,6 @@ class ChartView(ViewBox): | |||
|                     # XXX: super important to be aware of this. | ||||
|                     # or not flow.graphics.isVisible() | ||||
|                 ): | ||||
|                     # print(f'skipping {flow.name}') | ||||
|                     continue | ||||
| 
 | ||||
|                 # pass in no array which will read and render from the last | ||||
|  |  | |||
|  | @ -421,10 +421,6 @@ class LevelLine(pg.InfiniteLine): | |||
| 
 | ||||
|         return path | ||||
| 
 | ||||
|     @property | ||||
|     def marker(self) -> LevelMarker: | ||||
|         return self._marker | ||||
| 
 | ||||
|     def hoverEvent(self, ev): | ||||
|         ''' | ||||
|         Mouse hover callback. | ||||
|  |  | |||
|  | @ -22,9 +22,12 @@ from __future__ import annotations | |||
| from typing import ( | ||||
|     Optional, Generic, | ||||
|     TypeVar, Callable, | ||||
|     Literal, | ||||
| ) | ||||
| import enum | ||||
| import sys | ||||
| 
 | ||||
| # from pydantic import BaseModel, validator | ||||
| from pydantic import BaseModel, validator | ||||
| from pydantic.generics import GenericModel | ||||
| from PyQt5.QtWidgets import ( | ||||
|     QWidget, | ||||
|  | @ -35,7 +38,6 @@ from ._forms import ( | |||
|     # FontScaledDelegate, | ||||
|     Edit, | ||||
| ) | ||||
| from ..data.types import Struct | ||||
| 
 | ||||
| 
 | ||||
| DataType = TypeVar('DataType') | ||||
|  | @ -60,7 +62,7 @@ class Selection(Field[DataType], Generic[DataType]): | |||
|     options: dict[str, DataType] | ||||
|     # value: DataType = None | ||||
| 
 | ||||
|     # @validator('value')  # , always=True) | ||||
|     @validator('value')  # , always=True) | ||||
|     def set_value_first( | ||||
|         cls, | ||||
| 
 | ||||
|  | @ -98,7 +100,7 @@ class Edit(Field[DataType], Generic[DataType]): | |||
|     widget_factory = Edit | ||||
| 
 | ||||
| 
 | ||||
| class AllocatorPane(Struct): | ||||
| class AllocatorPane(BaseModel): | ||||
| 
 | ||||
|     account = Selection[str]( | ||||
|         options=dict.fromkeys( | ||||
|  |  | |||
|  | @ -80,8 +80,8 @@ class ComposedGridLayout: | |||
|     ``<axis_name>i`` in the layout. | ||||
| 
 | ||||
|     The ``item: PlotItem`` passed to the constructor's grid layout is | ||||
|     used verbatim as the "main plot" who's view box is given precedence | ||||
|     for input handling. The main plot's axes are removed from its | ||||
|     used verbatim as the "main plot" who's view box is give precedence | ||||
|     for input handling. The main plot's axes are removed from it's | ||||
|     layout and placed in the surrounding exterior layouts to allow for | ||||
|     re-ordering if desired. | ||||
| 
 | ||||
|  |  | |||
|  | @ -49,17 +49,12 @@ def xy_downsample( | |||
| 
 | ||||
|     x_spacer: float = 0.5, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     np.ndarray, | ||||
|     np.ndarray, | ||||
|     float, | ||||
|     float, | ||||
| ]: | ||||
| ) -> tuple[np.ndarray, np.ndarray]: | ||||
| 
 | ||||
|     # downsample whenever more then 1 pixels per datum can be shown. | ||||
|     # always refresh data bounds until we get diffing | ||||
|     # working properly, see above.. | ||||
|     bins, x, y, ymn, ymx = ds_m4( | ||||
|     bins, x, y = ds_m4( | ||||
|         x, | ||||
|         y, | ||||
|         uppx, | ||||
|  | @ -72,7 +67,7 @@ def xy_downsample( | |||
|     )).flatten() | ||||
|     y = y.flatten() | ||||
| 
 | ||||
|     return x, y, ymn, ymx | ||||
|     return x, y | ||||
| 
 | ||||
| 
 | ||||
| @njit( | ||||
|  |  | |||
|  | @ -19,7 +19,6 @@ Position info and display | |||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from copy import copy | ||||
| from dataclasses import dataclass | ||||
| from functools import partial | ||||
| from math import floor, copysign | ||||
|  | @ -106,8 +105,8 @@ async def update_pnl_from_feed( | |||
|                             # compute and display pnl status | ||||
|                             order_mode.pane.pnl_label.format( | ||||
|                                 pnl=copysign(1, size) * pnl( | ||||
|                                     # live.ppu, | ||||
|                                     order_mode.current_pp.live_pp.ppu, | ||||
|                                     # live.avg_price, | ||||
|                                     order_mode.current_pp.live_pp.avg_price, | ||||
|                                     tick['price'], | ||||
|                                 ), | ||||
|                             ) | ||||
|  | @ -166,29 +165,12 @@ class SettingsPane: | |||
|         key: str, | ||||
|         value: str, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Try to apply some input setting (by the user), revert to previous setting if it fails | ||||
|         display new value if applied. | ||||
| 
 | ||||
|         ''' | ||||
|         self.apply_setting(key, value) | ||||
|         self.update_status_ui(pp=self.order_mode.current_pp) | ||||
| 
 | ||||
|     def apply_setting( | ||||
|         self, | ||||
| 
 | ||||
|         key: str, | ||||
|         value: str, | ||||
| 
 | ||||
|     ) -> bool: | ||||
|         ''' | ||||
|         Called on any order pane edit field value change. | ||||
| 
 | ||||
|         ''' | ||||
|         mode = self.order_mode | ||||
|         tracker = mode.current_pp | ||||
|         alloc = tracker.alloc | ||||
| 
 | ||||
|         # an account switch request | ||||
|         if key == 'account': | ||||
|  | @ -224,28 +206,25 @@ class SettingsPane: | |||
|             # load the new account's allocator | ||||
|             alloc = tracker.alloc | ||||
| 
 | ||||
|         else: | ||||
|             tracker = mode.current_pp | ||||
|             alloc = tracker.alloc | ||||
| 
 | ||||
|         size_unit = alloc.size_unit | ||||
| 
 | ||||
|         # WRITE any settings to current pp's allocator | ||||
|         try: | ||||
|             if key == 'size_unit': | ||||
|                 # implicit re-write of value if input | ||||
|                 # is the "text name" of the units. | ||||
|                 # yah yah, i know this is badd.. | ||||
|                 alloc.size_unit = value | ||||
| 
 | ||||
|         elif key != 'account':  # numeric fields entry | ||||
|             try: | ||||
|             else: | ||||
|                 value = puterize(value) | ||||
|             except ValueError as err: | ||||
|                 log.error(err.args[0]) | ||||
|                 return False | ||||
| 
 | ||||
|                 if key == 'limit': | ||||
|                 if value <= 0: | ||||
|                     log.error('limit must be > 0') | ||||
|                     return False | ||||
| 
 | ||||
|                     pp = mode.current_pp.live_pp | ||||
| 
 | ||||
|                 if alloc.size_unit == 'currency': | ||||
|                     if size_unit == 'currency': | ||||
|                         dsize = pp.dsize | ||||
|                         if dsize > value: | ||||
|                             log.error( | ||||
|  | @ -267,42 +246,29 @@ class SettingsPane: | |||
| 
 | ||||
|                 elif key == 'slots': | ||||
|                     if value <= 0: | ||||
|                     # raise ValueError('slots must be > 0') | ||||
|                     log.error('limit must be > 0') | ||||
|                     return False | ||||
| 
 | ||||
|                         raise ValueError('slots must be > 0') | ||||
|                     alloc.slots = int(value) | ||||
| 
 | ||||
|                 else: | ||||
|                     log.error(f'Unknown setting {key}') | ||||
|                     raise ValueError | ||||
| 
 | ||||
|             # don't log account "change" case since it'll be submitted | ||||
|             # on every mouse interaction. | ||||
|             log.info(f'settings change: {key}: {value}') | ||||
| 
 | ||||
|         # TODO: maybe return a diff of settings so if we can an error we | ||||
|         # can have general input handling code to report it through the | ||||
|         # UI in some way? | ||||
|         return True | ||||
| 
 | ||||
|     def update_status_ui( | ||||
|         self, | ||||
|         pp: PositionTracker, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         alloc = pp.alloc | ||||
|         slots = alloc.slots | ||||
|         used = alloc.slots_used(pp.live_pp) | ||||
|         except ValueError: | ||||
|             log.error(f'Invalid value for `{key}`: {value}') | ||||
| 
 | ||||
|         # READ out settings and update the status UI / settings widgets | ||||
|         suffix = {'currency': ' $', 'units': ' u'}[alloc.size_unit] | ||||
|         suffix = {'currency': ' $', 'units': ' u'}[size_unit] | ||||
|         limit = alloc.limit() | ||||
| 
 | ||||
|         # TODO: a reverse look up from the position to the equivalent | ||||
|         # account(s), if none then look to user config for default? | ||||
|         self.update_status_ui(pp=tracker) | ||||
| 
 | ||||
|         step_size, currency_per_slot = alloc.step_sizes() | ||||
| 
 | ||||
|         if alloc.size_unit == 'currency': | ||||
|         if size_unit == 'currency': | ||||
|             step_size = currency_per_slot | ||||
| 
 | ||||
|         self.step_label.format( | ||||
|  | @ -320,7 +286,23 @@ class SettingsPane: | |||
|         self.form.fields['limit'].setText(str(limit)) | ||||
| 
 | ||||
|         # update of level marker size label based on any new settings | ||||
|         pp.update_from_pp() | ||||
|         tracker.update_from_pp() | ||||
| 
 | ||||
|         # TODO: maybe return a diff of settings so if we can an error we | ||||
|         # can have general input handling code to report it through the | ||||
|         # UI in some way? | ||||
|         return True | ||||
| 
 | ||||
|     def update_status_ui( | ||||
|         self, | ||||
| 
 | ||||
|         pp: PositionTracker, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         alloc = pp.alloc | ||||
|         slots = alloc.slots | ||||
|         used = alloc.slots_used(pp.live_pp) | ||||
| 
 | ||||
|         # calculate proportion of position size limit | ||||
|         # that exists and display in fill bar | ||||
|  | @ -374,7 +356,7 @@ class SettingsPane: | |||
|             # last historical close price | ||||
|             last = feed.shm.array[-1][['close']][0] | ||||
|             pnl_value = copysign(1, size) * pnl( | ||||
|                 tracker.live_pp.ppu, | ||||
|                 tracker.live_pp.avg_price, | ||||
|                 last, | ||||
|             ) | ||||
| 
 | ||||
|  | @ -458,14 +440,6 @@ def position_line( | |||
|     return line | ||||
| 
 | ||||
| 
 | ||||
| _derivs = ( | ||||
|     'future', | ||||
|     'continuous_future', | ||||
|     'option', | ||||
|     'futures_option', | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| class PositionTracker: | ||||
|     ''' | ||||
|     Track and display real-time positions for a single symbol | ||||
|  | @ -502,7 +476,7 @@ class PositionTracker: | |||
| 
 | ||||
|         self.alloc = alloc | ||||
|         self.startup_pp = startup_pp | ||||
|         self.live_pp = copy(startup_pp) | ||||
|         self.live_pp = startup_pp.copy() | ||||
| 
 | ||||
|         view = chart.getViewBox() | ||||
| 
 | ||||
|  | @ -572,71 +546,31 @@ class PositionTracker: | |||
|     def update_from_pp( | ||||
|         self, | ||||
|         position: Optional[Position] = None, | ||||
|         set_as_startup: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Update graphics and data from average price and size passed in | ||||
|         our EMS ``BrokerdPosition`` msg. | ||||
|         '''Update graphics and data from average price and size passed in our | ||||
|         EMS ``BrokerdPosition`` msg. | ||||
| 
 | ||||
|         ''' | ||||
|         # live pp updates | ||||
|         pp = position or self.live_pp | ||||
|         if set_as_startup: | ||||
|             startup_pp = pp | ||||
|         else: | ||||
|             startup_pp = self.startup_pp | ||||
|         alloc = self.alloc | ||||
| 
 | ||||
|         # update allocator settings | ||||
|         asset_type = pp.symbol.type_key | ||||
| 
 | ||||
|         # specific configs by asset class / type | ||||
|         if asset_type in _derivs: | ||||
|             # since it's harder to know how currency "applies" in this case | ||||
|             # given leverage properties | ||||
|             alloc.size_unit = '# units' | ||||
| 
 | ||||
|             # set units limit to slots size thus making make the next | ||||
|             # entry step 1.0 | ||||
|             alloc.units_limit = alloc.slots | ||||
| 
 | ||||
|         else: | ||||
|             alloc.size_unit = 'currency' | ||||
| 
 | ||||
|         # if the current position is already greater then the limit | ||||
|         # settings, increase the limit to the current position | ||||
|         if alloc.size_unit == 'currency': | ||||
|             startup_size = self.startup_pp.size * startup_pp.ppu | ||||
| 
 | ||||
|             if startup_size > alloc.currency_limit: | ||||
|                 alloc.currency_limit = round(startup_size, ndigits=2) | ||||
| 
 | ||||
|         else: | ||||
|             startup_size = abs(startup_pp.size) | ||||
| 
 | ||||
|             if startup_size > alloc.units_limit: | ||||
|                 alloc.units_limit = startup_size | ||||
| 
 | ||||
|                 if asset_type in _derivs: | ||||
|                     alloc.slots = alloc.units_limit | ||||
| 
 | ||||
|         self.update_line( | ||||
|             pp.ppu, | ||||
|             pp.avg_price, | ||||
|             pp.size, | ||||
|             self.chart.linked.symbol.lot_size_digits, | ||||
|         ) | ||||
| 
 | ||||
|         # label updates | ||||
|         self.size_label.fields['slots_used'] = round( | ||||
|             alloc.slots_used(pp), ndigits=1) | ||||
|             self.alloc.slots_used(pp), ndigits=1) | ||||
|         self.size_label.render() | ||||
| 
 | ||||
|         if pp.size == 0: | ||||
|             self.hide() | ||||
| 
 | ||||
|         else: | ||||
|             self._level_marker.level = pp.ppu | ||||
|             self._level_marker.level = pp.avg_price | ||||
| 
 | ||||
|             # these updates are critical to avoid lag on view/scene changes | ||||
|             self._level_marker.update()  # trigger paint | ||||
|  |  | |||
|  | @ -27,20 +27,20 @@ import time | |||
| from typing import Optional, Dict, Callable, Any | ||||
| import uuid | ||||
| 
 | ||||
| from pydantic import BaseModel | ||||
| import tractor | ||||
| import trio | ||||
| from PyQt5.QtCore import Qt | ||||
| 
 | ||||
| from .. import config | ||||
| from ..pp import Position | ||||
| from ..clearing._client import open_ems, OrderBook | ||||
| from ..clearing._allocate import ( | ||||
|     mk_allocator, | ||||
|     Position, | ||||
| ) | ||||
| from ._style import _font | ||||
| from ..data._source import Symbol | ||||
| from ..data.feed import Feed | ||||
| from ..data.types import Struct | ||||
| from ..log import get_logger | ||||
| from ._editors import LineEditor, ArrowEditor | ||||
| from ._lines import order_line, LevelLine | ||||
|  | @ -49,23 +49,17 @@ from ._position import ( | |||
|     SettingsPane, | ||||
| ) | ||||
| from ._forms import FieldsForm | ||||
| # from ._label import FormatLabel | ||||
| from ._window import MultiStatus | ||||
| from ..clearing._messages import ( | ||||
|     Order, | ||||
|     Status, | ||||
|     # BrokerdOrder, | ||||
|     # BrokerdStatus, | ||||
|     BrokerdPosition, | ||||
| ) | ||||
| from ..clearing._messages import Order, BrokerdPosition | ||||
| from ._forms import open_form_input_handling | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class Dialog(Struct): | ||||
|     ''' | ||||
|     Trade dialogue meta-data describing the lifetime | ||||
| class OrderDialog(BaseModel): | ||||
|     '''Trade dialogue meta-data describing the lifetime | ||||
|     of an order submission to ``emsd`` from a chart. | ||||
| 
 | ||||
|     ''' | ||||
|  | @ -78,6 +72,41 @@ class Dialog(Struct): | |||
|     msgs: dict[str, dict] = {} | ||||
|     fills: Dict[str, Any] = {} | ||||
| 
 | ||||
|     class Config: | ||||
|         arbitrary_types_allowed = True | ||||
|         underscore_attrs_are_private = False | ||||
| 
 | ||||
| 
 | ||||
| def on_level_change_update_next_order_info( | ||||
| 
 | ||||
|     level: float, | ||||
| 
 | ||||
|     # these are all ``partial``-ed in at callback assignment time. | ||||
|     line: LevelLine, | ||||
|     order: Order, | ||||
|     tracker: PositionTracker, | ||||
| 
 | ||||
| ) -> None: | ||||
|     '''A callback applied for each level change to the line | ||||
|     which will recompute the order size based on allocator | ||||
|     settings. this is assigned inside | ||||
|     ``OrderMode.line_from_order()`` | ||||
| 
 | ||||
|     ''' | ||||
|     # NOTE: the ``Order.account`` is set at order stage time | ||||
|     # inside ``OrderMode.line_from_order()``. | ||||
|     order_info = tracker.alloc.next_order_info( | ||||
|         startup_pp=tracker.startup_pp, | ||||
|         live_pp=tracker.live_pp, | ||||
|         price=level, | ||||
|         action=order.action, | ||||
|     ) | ||||
|     line.update_labels(order_info) | ||||
| 
 | ||||
|     # update bound-in staged order | ||||
|     order.price = level | ||||
|     order.size = order_info['size'] | ||||
| 
 | ||||
| 
 | ||||
| @dataclass | ||||
| class OrderMode: | ||||
|  | @ -114,7 +143,7 @@ class OrderMode: | |||
|     current_pp: Optional[PositionTracker] = None | ||||
|     active: bool = False | ||||
|     name: str = 'order' | ||||
|     dialogs: dict[str, Dialog] = field(default_factory=dict) | ||||
|     dialogs: dict[str, OrderDialog] = field(default_factory=dict) | ||||
| 
 | ||||
|     _colors = { | ||||
|         'alert': 'alert_yellow', | ||||
|  | @ -123,45 +152,12 @@ class OrderMode: | |||
|     } | ||||
|     _staged_order: Optional[Order] = None | ||||
| 
 | ||||
|     def on_level_change_update_next_order_info( | ||||
|         self, | ||||
|         level: float, | ||||
| 
 | ||||
|         # these are all ``partial``-ed in at callback assignment time. | ||||
|         line: LevelLine, | ||||
|         order: Order, | ||||
|         tracker: PositionTracker, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         A callback applied for each level change to the line | ||||
|         which will recompute the order size based on allocator | ||||
|         settings. this is assigned inside | ||||
|         ``OrderMode.line_from_order()`` | ||||
| 
 | ||||
|         ''' | ||||
|         # NOTE: the ``Order.account`` is set at order stage time inside | ||||
|         # ``OrderMode.line_from_order()`` or is inside ``Order`` msg | ||||
|         # field for loaded orders. | ||||
|         order_info = tracker.alloc.next_order_info( | ||||
|             startup_pp=tracker.startup_pp, | ||||
|             live_pp=tracker.live_pp, | ||||
|             price=level, | ||||
|             action=order.action, | ||||
|         ) | ||||
|         line.update_labels(order_info) | ||||
| 
 | ||||
|         # update bound-in staged order | ||||
|         order.price = level | ||||
|         order.size = order_info['size'] | ||||
| 
 | ||||
|         # when an order is changed we flip the settings side-pane to | ||||
|         # reflect the corresponding account and pos info. | ||||
|         self.pane.on_ui_settings_change('account', order.account) | ||||
| 
 | ||||
|     def line_from_order( | ||||
|         self, | ||||
| 
 | ||||
|         order: Order, | ||||
|         symbol: Symbol, | ||||
| 
 | ||||
|         **line_kwargs, | ||||
| 
 | ||||
|     ) -> LevelLine: | ||||
|  | @ -179,8 +175,8 @@ class OrderMode: | |||
|             color=self._colors[order.action], | ||||
| 
 | ||||
|             dotted=True if ( | ||||
|                 order.exec_mode == 'dark' | ||||
|                 and order.action != 'alert' | ||||
|                 order.exec_mode == 'dark' and | ||||
|                 order.action != 'alert' | ||||
|             ) else False, | ||||
| 
 | ||||
|             **line_kwargs, | ||||
|  | @ -190,12 +186,10 @@ class OrderMode: | |||
|         # immediately | ||||
|         if order.action != 'alert': | ||||
|             line._on_level_change = partial( | ||||
|                 self.on_level_change_update_next_order_info, | ||||
|                 on_level_change_update_next_order_info, | ||||
|                 line=line, | ||||
|                 order=order, | ||||
|                 # use the corresponding position tracker for the | ||||
|                 # order's account. | ||||
|                 tracker=self.trackers[order.account], | ||||
|                 tracker=self.current_pp, | ||||
|             ) | ||||
| 
 | ||||
|         else: | ||||
|  | @ -244,6 +238,8 @@ class OrderMode: | |||
| 
 | ||||
|         line = self.line_from_order( | ||||
|             order, | ||||
|             symbol, | ||||
| 
 | ||||
|             show_markers=True, | ||||
|             # just for the stage line to avoid | ||||
|             # flickering while moving the cursor | ||||
|  | @ -255,6 +251,7 @@ class OrderMode: | |||
|             # prevent flickering of marker while moving/tracking cursor | ||||
|             only_show_markers_on_hover=False, | ||||
|         ) | ||||
| 
 | ||||
|         line = self.lines.stage_line(line) | ||||
| 
 | ||||
|         # hide crosshair y-line and label | ||||
|  | @ -267,26 +264,28 @@ class OrderMode: | |||
| 
 | ||||
|     def submit_order( | ||||
|         self, | ||||
|         send_msg: bool = True, | ||||
|         order: Optional[Order] = None, | ||||
| 
 | ||||
|     ) -> Dialog: | ||||
|         ''' | ||||
|         Send execution order to EMS return a level line to | ||||
|     ) -> OrderDialog: | ||||
|         '''Send execution order to EMS return a level line to | ||||
|         represent the order on a chart. | ||||
| 
 | ||||
|         ''' | ||||
|         if not order: | ||||
|         staged = self._staged_order | ||||
|             # apply order fields for ems | ||||
|         symbol: Symbol = staged.symbol | ||||
|         oid = str(uuid.uuid4()) | ||||
|             order = staged.copy() | ||||
|             order.oid = oid | ||||
| 
 | ||||
|         order.symbol = order.symbol.front_fqsn() | ||||
|         # format order data for ems | ||||
|         fqsn = symbol.front_fqsn() | ||||
|         order = staged.copy( | ||||
|             update={ | ||||
|                 'symbol': fqsn, | ||||
|                 'oid': oid, | ||||
|             } | ||||
|         ) | ||||
| 
 | ||||
|         line = self.line_from_order( | ||||
|             order, | ||||
|             symbol, | ||||
| 
 | ||||
|             show_markers=True, | ||||
|             only_show_markers_on_hover=True, | ||||
|  | @ -304,17 +303,17 @@ class OrderMode: | |||
|         # color once the submission ack arrives. | ||||
|         self.lines.submit_line( | ||||
|             line=line, | ||||
|             uuid=order.oid, | ||||
|             uuid=oid, | ||||
|         ) | ||||
| 
 | ||||
|         dialog = Dialog( | ||||
|             uuid=order.oid, | ||||
|         dialog = OrderDialog( | ||||
|             uuid=oid, | ||||
|             order=order, | ||||
|             symbol=order.symbol, | ||||
|             symbol=symbol, | ||||
|             line=line, | ||||
|             last_status_close=self.multistatus.open_status( | ||||
|                 f'submitting {order.exec_mode}-{order.action}', | ||||
|                 final_msg=f'submitted {order.exec_mode}-{order.action}', | ||||
|                 f'submitting {self._trigger_type}-{order.action}', | ||||
|                 final_msg=f'submitted {self._trigger_type}-{order.action}', | ||||
|                 clear_on_next=True, | ||||
|             ) | ||||
|         ) | ||||
|  | @ -324,21 +323,14 @@ class OrderMode: | |||
| 
 | ||||
|         # enter submission which will be popped once a response | ||||
|         # from the EMS is received to move the order to a different# status | ||||
|         self.dialogs[order.oid] = dialog | ||||
|         self.dialogs[oid] = dialog | ||||
| 
 | ||||
|         # hook up mouse drag handlers | ||||
|         line._on_drag_start = self.order_line_modify_start | ||||
|         line._on_drag_end = self.order_line_modify_complete | ||||
| 
 | ||||
|         # send order cmd to ems | ||||
|         if send_msg: | ||||
|         self.book.send(order) | ||||
|         else: | ||||
|             # just register for control over this order | ||||
|             # TODO: some kind of mini-perms system here based on | ||||
|             # an out-of-band tagging/auth sub-sys for multiplayer | ||||
|             # order control? | ||||
|             self.book._sent_orders[order.oid] = order | ||||
| 
 | ||||
|         return dialog | ||||
| 
 | ||||
|  | @ -376,7 +368,7 @@ class OrderMode: | |||
|         self, | ||||
|         uuid: str | ||||
| 
 | ||||
|     ) -> Dialog: | ||||
|     ) -> OrderDialog: | ||||
|         ''' | ||||
|         Order submitted status event handler. | ||||
| 
 | ||||
|  | @ -431,7 +423,7 @@ class OrderMode: | |||
|         self, | ||||
| 
 | ||||
|         uuid: str, | ||||
|         msg: Status, | ||||
|         msg: Dict[str, Any], | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|  | @ -455,7 +447,7 @@ class OrderMode: | |||
| 
 | ||||
|                 # TODO: add in standard fill/exec info that maybe we | ||||
|                 # pack in a broker independent way? | ||||
|                 f'{msg.resp}: {msg.req.price}', | ||||
|                 f'{msg["resp"]}: {msg["trigger_price"]}', | ||||
|             ], | ||||
|         ) | ||||
|         log.runtime(result) | ||||
|  | @ -515,7 +507,7 @@ class OrderMode: | |||
|                     oid = dialog.uuid | ||||
| 
 | ||||
|                     cancel_status_close = self.multistatus.open_status( | ||||
|                         f'cancelling order {oid}', | ||||
|                         f'cancelling order {oid[:6]}', | ||||
|                         group_key=key, | ||||
|                     ) | ||||
|                     dialog.last_status_close = cancel_status_close | ||||
|  | @ -525,44 +517,6 @@ class OrderMode: | |||
| 
 | ||||
|         return ids | ||||
| 
 | ||||
|     def load_unknown_dialog_from_msg( | ||||
|         self, | ||||
|         msg: Status, | ||||
| 
 | ||||
|     ) -> Dialog: | ||||
|         # NOTE: the `.order` attr **must** be set with the | ||||
|         # equivalent order msg in order to be loaded. | ||||
|         order = msg.req | ||||
|         oid = str(msg.oid) | ||||
|         symbol = order.symbol | ||||
| 
 | ||||
|         # TODO: MEGA UGGG ZONEEEE! | ||||
|         src = msg.src | ||||
|         if ( | ||||
|             src | ||||
|             and src not in ('dark', 'paperboi') | ||||
|             and src not in symbol | ||||
|         ): | ||||
|             fqsn = symbol + '.' + src | ||||
|             brokername = src | ||||
|         else: | ||||
|             fqsn = symbol | ||||
|             *head, brokername = fqsn.rsplit('.') | ||||
| 
 | ||||
|         # fill out complex fields | ||||
|         order.oid = str(order.oid) | ||||
|         order.brokers = [brokername] | ||||
|         order.symbol = Symbol.from_fqsn( | ||||
|             fqsn=fqsn, | ||||
|             info={}, | ||||
|         ) | ||||
|         dialog = self.submit_order( | ||||
|             send_msg=False, | ||||
|             order=order, | ||||
|         ) | ||||
|         assert self.dialogs[oid] == dialog | ||||
|         return dialog | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| async def open_order_mode( | ||||
|  | @ -600,7 +554,6 @@ async def open_order_mode( | |||
|             trades_stream, | ||||
|             position_msgs, | ||||
|             brokerd_accounts, | ||||
|             ems_dialog_msgs, | ||||
|         ), | ||||
|         trio.open_nursery() as tn, | ||||
| 
 | ||||
|  | @ -624,9 +577,9 @@ async def open_order_mode( | |||
|             providers=symbol.brokers | ||||
|         ) | ||||
| 
 | ||||
|         # XXX: ``brokerd`` delivers a set of account names that it | ||||
|         # allows use of but the user also can define the accounts they'd | ||||
|         # like to use, in order, in their `brokers.toml` file. | ||||
|         # XXX: ``brokerd`` delivers a set of account names that it allows | ||||
|         # use of but the user also can define the accounts they'd like | ||||
|         # to use, in order, in their `brokers.toml` file. | ||||
|         accounts = {} | ||||
|         for name in brokerd_accounts: | ||||
|             # ensure name is in ``brokers.toml`` | ||||
|  | @ -639,6 +592,11 @@ async def open_order_mode( | |||
|             iter(accounts.keys()) | ||||
|         ) if accounts else 'paper' | ||||
| 
 | ||||
|         # NOTE: requires the backend exactly specifies | ||||
|         # the expected symbol key in its positions msg. | ||||
|         pp_msgs = position_msgs.get(symkey, ()) | ||||
|         pps_by_account = {msg['account']: msg for msg in pp_msgs} | ||||
| 
 | ||||
|         # update pp trackers with data relayed from ``brokerd``. | ||||
|         for account_name in accounts: | ||||
| 
 | ||||
|  | @ -646,11 +604,12 @@ async def open_order_mode( | |||
|             startup_pp = Position( | ||||
|                 symbol=symbol, | ||||
|                 size=0, | ||||
|                 ppu=0, | ||||
| 
 | ||||
|                 # XXX: BLEH, do we care about this on the client side? | ||||
|                 bsuid=symbol, | ||||
|                 avg_price=0, | ||||
|             ) | ||||
|             msg = pps_by_account.get(account_name) | ||||
|             if msg: | ||||
|                 log.info(f'Loading pp for {symkey}:\n{pformat(msg)}') | ||||
|                 startup_pp.update_from_msg(msg) | ||||
| 
 | ||||
|             # allocator config | ||||
|             alloc = mk_allocator( | ||||
|  | @ -685,7 +644,7 @@ async def open_order_mode( | |||
|         # setup order mode sidepane widgets | ||||
|         form: FieldsForm = chart.sidepane | ||||
|         form.vbox.setSpacing( | ||||
|             int((1 + 5 / 8) * _font.px_size) | ||||
|             int((1 + 5/8)*_font.px_size) | ||||
|         ) | ||||
| 
 | ||||
|         from ._feedstatus import mk_feed_label | ||||
|  | @ -735,7 +694,7 @@ async def open_order_mode( | |||
|         order_pane.order_mode = mode | ||||
| 
 | ||||
|         # select a pp to track | ||||
|         tracker: PositionTracker = trackers[pp_account] | ||||
|         tracker = trackers[pp_account] | ||||
|         mode.current_pp = tracker | ||||
|         tracker.show() | ||||
|         tracker.hide_info() | ||||
|  | @ -746,6 +705,7 @@ async def open_order_mode( | |||
|         # to order sync pane handler | ||||
|         for key in ('account', 'size_unit',): | ||||
|             w = form.fields[key] | ||||
| 
 | ||||
|             w.currentTextChanged.connect( | ||||
|                 partial( | ||||
|                     order_pane.on_selection_change, | ||||
|  | @ -768,18 +728,6 @@ async def open_order_mode( | |||
|         # Begin order-response streaming | ||||
|         done() | ||||
| 
 | ||||
|         # Pack position messages by account, should only be one-to-one. | ||||
|         # NOTE: requires the backend exactly specifies | ||||
|         # the expected symbol key in its positions msg. | ||||
|         for (broker, acctid), msgs in position_msgs.items(): | ||||
|             for msg in msgs: | ||||
|                 log.info(f'Loading pp for {symkey}:\n{pformat(msg)}') | ||||
|                 await process_trade_msg( | ||||
|                     mode, | ||||
|                     book, | ||||
|                     msg, | ||||
|                 ) | ||||
| 
 | ||||
|         # start async input handling for chart's view | ||||
|         async with ( | ||||
| 
 | ||||
|  | @ -798,61 +746,38 @@ async def open_order_mode( | |||
|             # to handle input since the ems connection is ready | ||||
|             started.set() | ||||
| 
 | ||||
|             for oid, msg in ems_dialog_msgs.items(): | ||||
| 
 | ||||
|                 # HACK ALERT: ensure a resp field is filled out since | ||||
|                 # techincally the call below expects a ``Status``. TODO: | ||||
|                 # parse into proper ``Status`` equivalents ems-side? | ||||
|                 # msg.setdefault('resp', msg['broker_details']['resp']) | ||||
|                 # msg.setdefault('oid', msg['broker_details']['oid']) | ||||
|                 msg['brokerd_msg'] = msg | ||||
| 
 | ||||
|                 await process_trade_msg( | ||||
|                     mode, | ||||
|                     book, | ||||
|                     msg, | ||||
|                 ) | ||||
| 
 | ||||
|             tn.start_soon( | ||||
|                 process_trades_and_update_ui, | ||||
|                 trades_stream, | ||||
|                 tn, | ||||
|                 feed, | ||||
|                 mode, | ||||
|                 trades_stream, | ||||
|                 book, | ||||
|             ) | ||||
| 
 | ||||
|             yield mode | ||||
| 
 | ||||
| 
 | ||||
| async def process_trades_and_update_ui( | ||||
| 
 | ||||
|     trades_stream: tractor.MsgStream, | ||||
|     n: trio.Nursery, | ||||
|     feed: Feed, | ||||
|     mode: OrderMode, | ||||
|     trades_stream: tractor.MsgStream, | ||||
|     book: OrderBook, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     get_index = mode.chart.get_index | ||||
|     global _pnl_tasks | ||||
| 
 | ||||
|     # this is where we receive **back** messages | ||||
|     # about executions **from** the EMS actor | ||||
|     async for msg in trades_stream: | ||||
|         await process_trade_msg( | ||||
|             mode, | ||||
|             book, | ||||
|             msg, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| async def process_trade_msg( | ||||
|     mode: OrderMode, | ||||
|     book: OrderBook, | ||||
|     msg: dict, | ||||
| 
 | ||||
| ) -> tuple[Dialog, Status]: | ||||
| 
 | ||||
|     get_index = mode.chart.get_index | ||||
|         fmsg = pformat(msg) | ||||
|     log.debug(f'Received order msg:\n{fmsg}') | ||||
|     name = msg['name'] | ||||
|         log.info(f'Received order msg:\n{fmsg}') | ||||
| 
 | ||||
|         name = msg['name'] | ||||
|         if name in ( | ||||
|             'position', | ||||
|         ): | ||||
|  | @ -860,14 +785,19 @@ async def process_trade_msg( | |||
|             pp_msg_symbol = msg['symbol'].lower() | ||||
|             fqsn = sym.front_fqsn() | ||||
|             broker, key = sym.front_feed() | ||||
|             # print( | ||||
|             #     f'pp msg symbol: {pp_msg_symbol}\n', | ||||
|             #     f'fqsn: {fqsn}\n', | ||||
|             #     f'front key: {key}\n', | ||||
|             # ) | ||||
| 
 | ||||
|             if ( | ||||
|             pp_msg_symbol == fqsn | ||||
|             or pp_msg_symbol == fqsn.removesuffix(f'.{broker}') | ||||
|                 pp_msg_symbol == fqsn.replace(f'.{broker}', '') | ||||
|             ): | ||||
|             log.info(f'{fqsn} matched pp msg: {fmsg}') | ||||
|                 tracker = mode.trackers[msg['account']] | ||||
|                 tracker.live_pp.update_from_msg(msg) | ||||
|             tracker.update_from_pp(set_as_startup=True)  # status/pane UI | ||||
|                 # update order pane widgets | ||||
|                 tracker.update_from_pp() | ||||
|                 mode.pane.update_status_ui(tracker) | ||||
| 
 | ||||
|                 if tracker.live_pp.size: | ||||
|  | @ -876,107 +806,84 @@ async def process_trade_msg( | |||
| 
 | ||||
|             # short circuit to next msg to avoid | ||||
|             # unnecessary msg content lookups | ||||
|         return | ||||
|             continue | ||||
| 
 | ||||
|     msg = Status(**msg) | ||||
|     resp = msg.resp | ||||
|     oid = msg.oid | ||||
|     dialog: Dialog = mode.dialogs.get(oid) | ||||
|         resp = msg['resp'] | ||||
|         oid = msg['oid'] | ||||
| 
 | ||||
|     match msg: | ||||
|         case Status(resp='dark_open' | 'open'): | ||||
|         dialog = mode.dialogs.get(oid) | ||||
|         if dialog is None: | ||||
|             log.warning(f'received msg for untracked dialog:\n{fmsg}') | ||||
| 
 | ||||
|             # TODO: enable pure tracking / mirroring of dialogs | ||||
|             # is desired. | ||||
|             continue | ||||
| 
 | ||||
|         # record message to dialog tracking | ||||
|         dialog.msgs[oid] = msg | ||||
| 
 | ||||
|         # response to 'action' request (buy/sell) | ||||
|         if resp in ( | ||||
|             'dark_submitted', | ||||
|             'broker_submitted' | ||||
|         ): | ||||
| 
 | ||||
|             if dialog is not None: | ||||
|             # show line label once order is live | ||||
|             mode.on_submit(oid) | ||||
| 
 | ||||
|             else: | ||||
|                 log.warning( | ||||
|                     f'received msg for untracked dialog:\n{fmsg}' | ||||
|                 ) | ||||
|                 assert msg.resp in ('open', 'dark_open'), f'Unknown msg: {msg}' | ||||
| 
 | ||||
|                 sym = mode.chart.linked.symbol | ||||
|                 fqsn = sym.front_fqsn() | ||||
|                 order = Order(**msg.req) | ||||
|                 if ( | ||||
|                     ((order.symbol + f'.{msg.src}') == fqsn) | ||||
| 
 | ||||
|                     # a existing dark order for the same symbol | ||||
|                     or ( | ||||
|                         order.symbol == fqsn | ||||
|                         and ( | ||||
|                             msg.src in ('dark', 'paperboi') | ||||
|                             or (msg.src in fqsn) | ||||
| 
 | ||||
|                         ) | ||||
|                     ) | ||||
|         # resp to 'cancel' request or error condition | ||||
|         # for action request | ||||
|         elif resp in ( | ||||
|             'broker_cancelled', | ||||
|             'broker_inactive', | ||||
|             'broker_errored', | ||||
|             'dark_cancelled' | ||||
|         ): | ||||
|                     msg.req = order | ||||
|                     dialog = mode.load_unknown_dialog_from_msg(msg) | ||||
|                     mode.on_submit(oid) | ||||
|                     # return dialog, msg | ||||
| 
 | ||||
|         case Status(resp='error'): | ||||
|             # delete level line from view | ||||
|             mode.on_cancel(oid) | ||||
|             broker_msg = msg.brokerd_msg | ||||
|             log.error( | ||||
|                 f'Order {oid}->{resp} with:\n{pformat(broker_msg)}' | ||||
|             ) | ||||
|             broker_msg = msg['brokerd_msg'] | ||||
|             log.warning(f'Order {oid} failed with:\n{pformat(broker_msg)}') | ||||
| 
 | ||||
|         case Status(resp='canceled'): | ||||
|             # delete level line from view | ||||
|             mode.on_cancel(oid) | ||||
|             req = Order(**msg.req) | ||||
|             log.cancel(f'Canceled {req.action}:{oid}') | ||||
| 
 | ||||
|         case Status( | ||||
|             resp='triggered', | ||||
|             # req=Order(exec_mode='dark')  # TODO: | ||||
|             req={'exec_mode': 'dark'}, | ||||
|         elif resp in ( | ||||
|             'dark_triggered' | ||||
|         ): | ||||
|             # TODO: UX for a "pending" clear/live order | ||||
|             log.info(f'Dark order triggered for {fmsg}') | ||||
| 
 | ||||
|         case Status( | ||||
|             resp='triggered', | ||||
|             # req=Order(exec_mode='live', action='alert') as req, # TODO | ||||
|             req={'exec_mode': 'live', 'action': 'alert'} as req, | ||||
|         elif resp in ( | ||||
|             'alert_triggered' | ||||
|         ): | ||||
|             # should only be one "fill" for an alert | ||||
|             # add a triangle and remove the level line | ||||
|             req = Order(**req) | ||||
|             mode.on_fill( | ||||
|                 oid, | ||||
|                 price=req.price, | ||||
|                 price=msg['trigger_price'], | ||||
|                 arrow_index=get_index(time.time()), | ||||
|             ) | ||||
|             mode.lines.remove_line(uuid=oid) | ||||
|             msg.req = req | ||||
|             await mode.on_exec(oid, msg) | ||||
| 
 | ||||
|         # response to completed 'dialog' for order request | ||||
|         case Status( | ||||
|             resp='closed', | ||||
|             # req=Order() as req,  # TODO | ||||
|             req=req, | ||||
|         # response to completed 'action' request for buy/sell | ||||
|         elif resp in ( | ||||
|             'broker_executed', | ||||
|         ): | ||||
|             msg.req = Order(**req) | ||||
|             # right now this is just triggering a system alert | ||||
|             await mode.on_exec(oid, msg) | ||||
| 
 | ||||
|             if msg['brokerd_msg']['remaining'] == 0: | ||||
|                 mode.lines.remove_line(uuid=oid) | ||||
| 
 | ||||
|         # each clearing tick is responded individually | ||||
|         case Status(resp='fill'): | ||||
|         elif resp in ( | ||||
|             'broker_filled', | ||||
|         ): | ||||
| 
 | ||||
|             # handle out-of-piker fills reporting? | ||||
|             known_order = book._sent_orders.get(oid) | ||||
|             if not known_order: | ||||
|                 log.warning(f'order {oid} is unknown') | ||||
|                 return | ||||
|                 continue | ||||
| 
 | ||||
|             action = known_order.action | ||||
|             details = msg.brokerd_msg | ||||
|             details = msg['brokerd_msg'] | ||||
| 
 | ||||
|             # TODO: some kinda progress system | ||||
|             mode.on_fill( | ||||
|  | @ -985,27 +892,9 @@ async def process_trade_msg( | |||
|                 pointing='up' if action == 'buy' else 'down', | ||||
| 
 | ||||
|                 # TODO: put the actual exchange timestamp | ||||
|                 arrow_index=get_index( | ||||
|                     # TODO: note currently the ``kraken`` openOrders sub | ||||
|                     # doesn't deliver their engine timestamp as part of | ||||
|                     # it's schema, so this value is **not** from them | ||||
|                     # (see our backend code). We should probably either | ||||
|                     # include all provider-engine timestamps in the | ||||
|                     # summary 'closed' status msg and/or figure out | ||||
|                     # a way to indicate what is a `brokerd` stamp versus | ||||
|                     # a true backend one? This will require finagling | ||||
|                     # with how each backend tracks/summarizes time | ||||
|                     # stamps for the downstream API. | ||||
|                     details['broker_time'] | ||||
|                 ), | ||||
|                 arrow_index=get_index(details['broker_time']), | ||||
|             ) | ||||
| 
 | ||||
|             # TODO: how should we look this up? | ||||
|             # tracker = mode.trackers[msg['account']] | ||||
|             # tracker.live_pp.fills.append(msg) | ||||
| 
 | ||||
|     # record message to dialog tracking | ||||
|     if dialog: | ||||
|         dialog.msgs[oid] = msg | ||||
| 
 | ||||
|     return dialog, msg | ||||
|  |  | |||
|  | @ -1,13 +1,14 @@ | |||
| # we require a pinned dev branch to get some edge features that | ||||
| # are often untested in tractor's CI and/or being tested by us | ||||
| # first before committing as core features in tractor's base. | ||||
| -e git+https://github.com/goodboy/tractor.git@reentrant_moc#egg=tractor | ||||
| -e git+https://github.com/goodboy/tractor.git@master#egg=tractor | ||||
| 
 | ||||
| # `pyqtgraph` peeps keep breaking, fixing, improving so might as well | ||||
| # pin this to a dev branch that we have more control over especially | ||||
| # as more graphics stuff gets hashed out. | ||||
| -e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph | ||||
| 
 | ||||
| 
 | ||||
| # our async client for ``marketstore`` (the tsdb) | ||||
| -e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore | ||||
| 
 | ||||
|  | @ -17,7 +18,4 @@ | |||
| 
 | ||||
| 
 | ||||
| # ``asyncvnc`` for sending interactions to ib-gw inside docker | ||||
| -e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc | ||||
| 
 | ||||
| # ``cryptofeed`` for connecting to various crypto exchanges + custom fixes | ||||
| -e git+https://github.com/pikers/cryptofeed.git@date_parsing#egg=cryptofeed | ||||
| -e git+https://github.com/pikers/asyncvnc.git@vid_passthrough#egg=asyncvnc | ||||
|  |  | |||
							
								
								
									
										5
									
								
								setup.py
								
								
								
								
							
							
						
						
									
										5
									
								
								setup.py
								
								
								
								
							|  | @ -41,24 +41,23 @@ setup( | |||
|     }, | ||||
|     install_requires=[ | ||||
|         'toml', | ||||
|         'tomli',  # fastest pure py reader | ||||
|         'click', | ||||
|         'colorlog', | ||||
|         'attrs', | ||||
|         'pygments', | ||||
|         'colorama',  # numba traceback coloring | ||||
|         'msgspec',  # performant IPC messaging and structs | ||||
|         'pydantic',  # structured data | ||||
| 
 | ||||
|         # async | ||||
|         'trio', | ||||
|         'trio-websocket', | ||||
|         'msgspec',  # performant IPC messaging | ||||
|         'async_generator', | ||||
| 
 | ||||
|         # from github currently (see requirements.txt) | ||||
|         # 'trimeter',  # not released yet.. | ||||
|         # 'tractor', | ||||
|         # asyncvnc, | ||||
|         # 'cryptofeed', | ||||
| 
 | ||||
|         # brokers | ||||
|         'asks==2.4.8', | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue