Compare commits
	
		
			No commits in common. "gitea_feats" and "historical_breakeven_pp_price" have entirely different histories. 
		
	
	
		
			gitea_feat
			...
			historical
		
	
		|  | @ -3,9 +3,10 @@ name: CI | |||
| 
 | ||||
| on: | ||||
|   # Triggers the workflow on push or pull request events but only for the master branch | ||||
|   pull_request: | ||||
|   push: | ||||
|     branches: [ master ] | ||||
|   pull_request: | ||||
|     branches: [ master ] | ||||
| 
 | ||||
|   # Allows you to run this workflow manually from the Actions tab | ||||
|   workflow_dispatch: | ||||
|  | @ -13,49 +14,19 @@ on: | |||
| 
 | ||||
| jobs: | ||||
| 
 | ||||
|   # test that we can generate a software distribution and install it | ||||
|   # thus avoid missing file issues after packaging. | ||||
|   sdist-linux: | ||||
|     name: 'sdist' | ||||
|     runs-on: ubuntu-latest | ||||
| 
 | ||||
|     steps: | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v3 | ||||
| 
 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.10' | ||||
| 
 | ||||
|       - name: Build sdist | ||||
|         run: python setup.py sdist --formats=zip | ||||
| 
 | ||||
|       - name: Install sdist from .zips | ||||
|         run: python -m pip install dist/*.zip | ||||
| 
 | ||||
|   testing: | ||||
|     name: 'install + test-suite' | ||||
|     timeout-minutes: 10 | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
| 
 | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v3 | ||||
| 
 | ||||
|       # elastic only | ||||
|       # - name: Build DB container | ||||
|       #   run: docker build -t piker:elastic dockering/elastic | ||||
| 
 | ||||
|       - name: Setup python | ||||
|         uses: actions/setup-python@v4 | ||||
|         uses: actions/setup-python@v3 | ||||
|         with: | ||||
|           python-version: '3.10' | ||||
| 
 | ||||
|       # elastic only | ||||
|       # - name: Install dependencies | ||||
|       #   run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager | ||||
| 
 | ||||
|       - name: Install dependencies | ||||
|         run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										296
									
								
								README.rst
								
								
								
								
							
							
						
						
									
										296
									
								
								README.rst
								
								
								
								
							|  | @ -1,161 +1,222 @@ | |||
| piker | ||||
| ----- | ||||
| trading gear for hackers | ||||
| trading gear for hackers. | ||||
| 
 | ||||
| |gh_actions| | ||||
| 
 | ||||
| .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpikers%2Fpiker%2Fbadge&style=popout-square | ||||
|     :target: https://actions-badge.atrox.dev/piker/pikers/goto | ||||
| 
 | ||||
| ``piker`` is a broker agnostic, next-gen FOSS toolset and runtime for | ||||
| real-time computational trading targeted at `hardcore Linux users | ||||
| <comp_trader>`_ . | ||||
| ``piker`` is a broker agnostic, next-gen FOSS toolset for real-time | ||||
| computational trading targeted at `hardcore Linux users <comp_trader>`_ . | ||||
| 
 | ||||
| we use much bleeding edge tech including (but not limited to): | ||||
| we use as much bleeding edge tech as possible including (but not limited to): | ||||
| 
 | ||||
| - latest python for glue_ | ||||
| - uv_ for packaging and distribution | ||||
| - trio_ & tractor_ for our distributed `structured concurrency`_ runtime | ||||
| - Qt_ for pristine low latency UIs | ||||
| - pyqtgraph_ (which we've extended) for real-time charting and graphics | ||||
| - ``polars`` ``numpy`` and ``numba`` for redic `fast numerics`_ | ||||
| - `apache arrow and parquet`_ for time-series storage | ||||
| - trio_ for `structured concurrency`_ | ||||
| - tractor_ for distributed, multi-core, real-time streaming | ||||
| - marketstore_ for historical and real-time tick data persistence and sharing | ||||
| - techtonicdb_ for L2 book storage | ||||
| - Qt_ for pristine high performance UIs | ||||
| - pyqtgraph_ for real-time charting | ||||
| - ``numpy`` and ``numba`` for `fast numerics`_ | ||||
| 
 | ||||
| potential projects we might integrate with soon, | ||||
| 
 | ||||
| - (already prototyped in ) techtonicdb_ for L2 book storage | ||||
| 
 | ||||
| .. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/ | ||||
| .. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue | ||||
| .. _uv: https://docs.astral.sh/uv/ | ||||
| .. |travis| image:: https://img.shields.io/travis/pikers/piker/master.svg | ||||
|     :target: https://travis-ci.org/pikers/piker | ||||
| .. _trio: https://github.com/python-trio/trio | ||||
| .. _tractor: https://github.com/goodboy/tractor | ||||
| .. _structured concurrency: https://trio.discourse.group/ | ||||
| .. _marketstore: https://github.com/alpacahq/marketstore | ||||
| .. _techtonicdb: https://github.com/0b01/tectonicdb | ||||
| .. _Qt: https://www.qt.io/ | ||||
| .. _pyqtgraph: https://github.com/pyqtgraph/pyqtgraph | ||||
| .. _apache arrow and parquet: https://arrow.apache.org/faq/ | ||||
| .. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue | ||||
| .. _fast numerics: https://zerowithdot.com/python-numpy-and-pandas-performance/ | ||||
| .. _techtonicdb: https://github.com/0b01/tectonicdb | ||||
| .. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/ | ||||
| 
 | ||||
| 
 | ||||
| focus and feats: | ||||
| focus and features: | ||||
| ******************* | ||||
| - 100% federated: your code, your hardware, your data feeds, your broker fills. | ||||
| - zero web: low latency, native software that doesn't try to re-invent the OS | ||||
| - maximal **privacy**: prevent brokers and mms from knowing your | ||||
|   planz; smack their spreads with dark volume. | ||||
| - zero clutter: modal, context oriented UIs that echew minimalism, reduce | ||||
|   thought noise and encourage un-emotion. | ||||
| - first class parallelism: built from the ground up on next-gen structured concurrency | ||||
|   primitives. | ||||
| - traders first: broker/exchange/asset-class agnostic | ||||
| - systems grounded: real-time financial signal processing that will | ||||
|   make any queuing or DSP eng juice their shorts. | ||||
| - non-tina UX: sleek, powerful keyboard driven interaction with expected use in tiling wms | ||||
| - data collaboration: every process and protocol is multi-host scalable. | ||||
| - fight club ready: zero interest in adoption by suits; no corporate friendly license, ever. | ||||
| 
 | ||||
| fitting with these tenets, we're always open to new framework suggestions and ideas. | ||||
| 
 | ||||
| building the best looking, most reliable, keyboard friendly trading | ||||
| platform is the dream; join the cause. | ||||
| 
 | ||||
| 
 | ||||
| install | ||||
| ******* | ||||
| ``piker`` is currently under heavy pre-alpha development and as such | ||||
| should be cloned from this repo and hacked on directly. | ||||
| 
 | ||||
| for a development install:: | ||||
| 
 | ||||
|     git clone git@github.com:pikers/piker.git | ||||
|     cd piker | ||||
|     virtualenv env | ||||
|     source ./env/bin/activate | ||||
|     pip install -r requirements.txt -e . | ||||
| 
 | ||||
| 
 | ||||
| install for tinas | ||||
| ***************** | ||||
| for windows peeps you can start by installing all the prerequisite software: | ||||
| 
 | ||||
| - install git with all default settings - https://git-scm.com/download/win | ||||
| - install anaconda all default settings - https://www.anaconda.com/products/individual | ||||
| - install microsoft build tools (check the box for Desktop development for C++, you might be able to uncheck some optional downloads)  - https://visualstudio.microsoft.com/visual-cpp-build-tools/ | ||||
| - install visual studio code default settings - https://code.visualstudio.com/download | ||||
| 
 | ||||
| 
 | ||||
| then, `crack a conda shell`_ and run the following commands:: | ||||
| 
 | ||||
|     mkdir code # create code directory | ||||
|     cd code # change directory to code | ||||
|     git clone https://github.com/pikers/piker.git # downloads piker installation package from github | ||||
|     cd piker # change directory to piker | ||||
|      | ||||
|     conda create -n pikonda # creates conda environment named pikonda | ||||
|     conda activate pikonda # activates pikonda | ||||
|      | ||||
|     conda install -c conda-forge python-levenshtein # in case it is not already installed | ||||
|     conda install pip # may already be installed | ||||
|     pip # will show if pip is installed | ||||
|      | ||||
|     pip install -e . -r requirements.txt # install piker in editable mode | ||||
| 
 | ||||
| test Piker to see if it is working:: | ||||
| 
 | ||||
|     piker -b binance chart btcusdt.binance # formatting for loading a chart | ||||
|     piker -b kraken -b binance chart xbtusdt.kraken | ||||
|     piker -b kraken -b binance -b ib chart qqq.nasdaq.ib | ||||
|     piker -b ib chart tsla.nasdaq.ib | ||||
| 
 | ||||
| potential error:: | ||||
|      | ||||
|     FileNotFoundError: [Errno 2] No such file or directory: 'C:\\Users\\user\\AppData\\Roaming\\piker\\brokers.toml' | ||||
|      | ||||
| solution: | ||||
| 
 | ||||
| - navigate to file directory above (may be different on your machine, location should be listed in the error code) | ||||
| - copy and paste file from 'C:\\Users\\user\\code\\data/brokers.toml' or create a blank file using notepad at the location above | ||||
| 
 | ||||
| Visual Studio Code setup: | ||||
| 
 | ||||
| - now that piker is installed we can set up vscode as the default terminal for running piker and editing the code | ||||
| - open Visual Studio Code | ||||
| - file --> Add Folder to Workspace --> C:\Users\user\code\piker (adds piker directory where all piker files are located) | ||||
| - file --> Save Workspace As --> save it wherever you want and call it whatever you want, this is going to be your default workspace for running and editing piker code | ||||
| - ctrl + shift + p --> start typing Python: Select Interpetter --> when the option comes up select it --> Select at the workspace level --> select the one that shows ('pikonda') | ||||
| - change the default terminal to cmd.exe instead of powershell (default) | ||||
| - now when you create a new terminal VScode should automatically activate you conda env so that piker can be run as the first command after a new terminal is created | ||||
| 
 | ||||
| also, try out fancyzones as part of powertoyz for a decent tiling windows manager to manage all the cool new software you are going to be running. | ||||
| 
 | ||||
| .. _conda installed: https:// | ||||
| .. _C++ build toolz: https:// | ||||
| .. _crack a conda shell: https:// | ||||
| .. _vscode: https:// | ||||
| 
 | ||||
| .. link to the tina guide | ||||
| .. _setup a coolio tiled wm console: https:// | ||||
| 
 | ||||
| provider support | ||||
| **************** | ||||
| fitting with these tenets, we're always open to new | ||||
| framework/lib/service interop suggestions and ideas! | ||||
| for live data feeds the in-progress set of supported brokers is: | ||||
| 
 | ||||
| - **100% federated**: | ||||
|   your code, your hardware, your data feeds, your broker fills. | ||||
| - IB_ via ``ib_insync``, also see our `container docs`_ | ||||
| - binance_ and kraken_ for crypto over their public websocket API | ||||
| - questrade_ (ish) which comes with effectively free L1 | ||||
| 
 | ||||
| - **zero web**: | ||||
|   low latency as a prime objective, native UIs and modern IPC | ||||
|   protocols without trying to re-invent the "OS-as-an-app".. | ||||
| coming soon... | ||||
| 
 | ||||
| - **maximal privacy**: | ||||
|   prevent brokers and mms from knowing your planz; smack their | ||||
|   spreads with dark volume from a VPN tunnel. | ||||
| - webull_ via the reverse engineered public API | ||||
| - yahoo via yliveticker_ | ||||
| 
 | ||||
| - **zero clutter**: | ||||
|   modal, context oriented UIs that echew minimalism, reduce thought | ||||
|   noise and encourage un-emotion. | ||||
| if you want your broker supported and they have an API let us know. | ||||
| 
 | ||||
| - **first class parallelism**: | ||||
|   built from the ground up on a next-gen structured concurrency | ||||
|   supervision sys. | ||||
| 
 | ||||
| - **traders first**: | ||||
|   broker/exchange/venue/asset-class/money-sys agnostic | ||||
| 
 | ||||
| - **systems grounded**: | ||||
|   real-time financial signal processing (fsp) that will make any | ||||
|   queuing or DSP eng juice their shorts. | ||||
| 
 | ||||
| - **non-tina UX**: | ||||
|   sleek, powerful keyboard driven interaction with expected use in | ||||
|   tiling wms (or maybe even a DDE). | ||||
| 
 | ||||
| - **data collab at scale**: | ||||
|   every actor-process and protocol is multi-host aware. | ||||
| 
 | ||||
| - **fight club ready**: | ||||
|   zero interest in adoption by suits; no corporate friendly license, | ||||
|   ever. | ||||
| 
 | ||||
| building the hottest looking, fastest, most reliable, keyboard | ||||
| friendly FOSS trading platform is the dream; join the cause. | ||||
| .. _IB: https://interactivebrokers.github.io/tws-api/index.html | ||||
| .. _container docs: https://github.com/pikers/piker/tree/master/dockering/ib | ||||
| .. _questrade: https://www.questrade.com/api/documentation | ||||
| .. _kraken: https://www.kraken.com/features/api#public-market-data | ||||
| .. _binance: https://github.com/pikers/piker/pull/182 | ||||
| .. _webull: https://github.com/tedchou12/webull | ||||
| .. _yliveticker: https://github.com/yahoofinancelive/yliveticker | ||||
| .. _coinbase: https://docs.pro.coinbase.com/#websocket-feed | ||||
| 
 | ||||
| 
 | ||||
| a sane install with `uv` | ||||
| ************************ | ||||
| bc why install with `python` when you can faster with `rust` :: | ||||
| check out our charts | ||||
| ******************** | ||||
| bet you weren't expecting this from the foss:: | ||||
| 
 | ||||
|     uv lock | ||||
|     piker -l info -b kraken -b binance chart btcusdt.binance --pdb | ||||
| 
 | ||||
| 
 | ||||
| hacky install on nixos | ||||
| ********************** | ||||
| ``NixOS`` is our core devs' distro of choice for which we offer | ||||
| a stringently defined development shell envoirment that can be loaded with:: | ||||
| this runs the main chart (currently with 1m sampled OHLC) in in debug | ||||
| mode and you can practice paper trading using the following | ||||
| micro-manual: | ||||
| 
 | ||||
|     nix-shell default.nix | ||||
| ``order_mode`` ( | ||||
|     edge triggered activation by any of the following keys, | ||||
|     ``mouse-click`` on y-level to submit at that price | ||||
|     ): | ||||
| 
 | ||||
|     - ``f``/ ``ctl-f`` to stage buy | ||||
|     - ``d``/ ``ctl-d`` to stage sell | ||||
|     - ``a`` to stage alert | ||||
| 
 | ||||
| 
 | ||||
| start a chart | ||||
| ************* | ||||
| run a realtime OHLCV chart stand-alone:: | ||||
| ``search_mode`` ( | ||||
|     ``ctl-l`` or ``ctl-space`` to open, | ||||
|     ``ctl-c`` or ``ctl-space`` to close | ||||
|     ) : | ||||
| 
 | ||||
|     piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken | ||||
| 
 | ||||
| this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes | ||||
| overlayed on the same graph. Use of `piker` without first starting | ||||
| a daemon (`pikerd` - see below) means there is an implicit spawning of the | ||||
| multi-actor-runtime (implemented as a `tractor` app). | ||||
| 
 | ||||
| For additional subsystem feats available through our chart UI see the | ||||
| various sub-readmes: | ||||
| 
 | ||||
| - order control using a mouse-n-keyboard UX B) | ||||
| - cross venue market-pair (what most call "symbol") search, select, overlay Bo | ||||
| - financial-signal-processing (`piker.fsp`) write-n-reload to sub-chart BO | ||||
| - src-asset derivatives scan for anal, like the infamous "max pain" XO | ||||
|     - begin typing to have symbol search automatically lookup | ||||
|       symbols from all loaded backend (broker) providers | ||||
|     - arrow keys and mouse click to navigate selection | ||||
|     - vi-like ``ctl-[hjkl]`` for navigation | ||||
| 
 | ||||
| 
 | ||||
| spawn a daemon standalone | ||||
| ************************* | ||||
| we call the root actor-process the ``pikerd``. it can be (and is | ||||
| recommended normally to be) started separately from the ``piker | ||||
| chart`` program:: | ||||
| you can also configure your position allocation limits from the | ||||
| sidepane. | ||||
| 
 | ||||
| 
 | ||||
| run in distributed mode | ||||
| *********************** | ||||
| start the service manager and data feed daemon in the background and | ||||
| connect to it:: | ||||
| 
 | ||||
|     pikerd -l info --pdb | ||||
| 
 | ||||
| the daemon does nothing until a ``piker``-client (like ``piker | ||||
| chart``) connects and requests some particular sub-system. for | ||||
| a connecting chart ``pikerd`` will spawn and manage at least, | ||||
| 
 | ||||
| - a data-feed daemon: ``datad`` which does all the work of comms with | ||||
|   the backend provider (in this case the ``binance`` cex). | ||||
| - a paper-trading engine instance, ``paperboi.binance``, (if no live | ||||
|   account has been configured) which allows for auto/manual order | ||||
|   control against the live quote stream. | ||||
| connect your chart:: | ||||
| 
 | ||||
| *using* an actor-service (aka micro-daemon) manager which dynamically | ||||
| supervises various sub-subsystems-as-services throughout the ``piker`` | ||||
| runtime-stack. | ||||
|     piker -l info -b kraken -b binance chart xmrusdt.binance --pdb | ||||
| 
 | ||||
| now you can (implicitly) connect your chart:: | ||||
| 
 | ||||
|     piker chart btcusdt.spot.binance | ||||
| 
 | ||||
| since ``pikerd`` was started separately you can now enjoy a persistent | ||||
| real-time data stream tied to the daemon-tree's lifetime. i.e. the next | ||||
| time you spawn a chart it will obviously not only load much faster | ||||
| (since the underlying ``datad.binance`` is left running with its | ||||
| in-memory IPC data structures) but also the data-feed and any order | ||||
| mgmt states should be persistent until you finally cancel ``pikerd``. | ||||
| enjoy persistent real-time data feeds tied to daemon lifetime. the next | ||||
| time you spawn a chart it will load much faster since the data feed has | ||||
| been cached and is now always running live in the background until you | ||||
| kill ``pikerd``. | ||||
| 
 | ||||
| 
 | ||||
| if anyone asks you what this project is about | ||||
| ********************************************* | ||||
| you don't talk about it; just use it. | ||||
| you don't talk about it. | ||||
| 
 | ||||
| 
 | ||||
| how do i get involved? | ||||
|  | @ -165,15 +226,6 @@ enter the matrix. | |||
| 
 | ||||
| how come there ain't that many docs | ||||
| *********************************** | ||||
| i mean we want/need them but building the core right has been higher | ||||
| prio then marketting (and likely will stay that way Bp). | ||||
| 
 | ||||
| soo, suck it up bc, | ||||
| 
 | ||||
| - no one is trying to sell you on anything | ||||
| - learning the code base is prolly way more valuable | ||||
| - the UI/UXs are intended to be "intuitive" for any hacker.. | ||||
| 
 | ||||
| we obviously need tonz help so if you want to start somewhere and | ||||
| can't necessarily write "advanced" concurrent python/rust code, this | ||||
| helping document literally anything might be the place for you! | ||||
| suck it up, learn the code; no one is trying to sell you on anything. | ||||
| also, we need lotsa help so if you want to start somewhere and can't | ||||
| necessarily write serious code, this might be the place for you! | ||||
|  |  | |||
|  | @ -1,52 +1,19 @@ | |||
| ################ | ||||
| # ---- CEXY ---- | ||||
| ################ | ||||
| [binance] | ||||
| accounts.paper = 'paper' | ||||
| 
 | ||||
| accounts.usdtm = 'futes' | ||||
| futes.use_testnet = false | ||||
| futes.api_key = '' | ||||
| futes.api_secret = '' | ||||
| 
 | ||||
| accounts.spot = 'spot' | ||||
| spot.use_testnet = false | ||||
| spot.api_key = '' | ||||
| spot.api_secret = '' | ||||
| 
 | ||||
| 
 | ||||
| [deribit] | ||||
| key_id = '' | ||||
| key_secret = '' | ||||
| 
 | ||||
| 
 | ||||
| [kraken] | ||||
| key_descr = '' | ||||
| api_key = '' | ||||
| secret = '' | ||||
| 
 | ||||
| 
 | ||||
| [kucoin] | ||||
| key_id = '' | ||||
| key_secret = '' | ||||
| key_passphrase = '' | ||||
| 
 | ||||
| 
 | ||||
| ################ | ||||
| # -- BROKERZ --- | ||||
| ################ | ||||
| [questrade] | ||||
| refresh_token = '' | ||||
| access_token = '' | ||||
| api_server = 'https://api06.iq.questrade.com/' | ||||
| refresh_token = "" | ||||
| access_token = "" | ||||
| api_server = "https://api06.iq.questrade.com/" | ||||
| expires_in = 1800 | ||||
| token_type = 'Bearer' | ||||
| token_type = "Bearer" | ||||
| expires_at = 1616095326.355846 | ||||
| 
 | ||||
| [kraken] | ||||
| key_descr = "api_0" | ||||
| api_key = "" | ||||
| secret = "" | ||||
| 
 | ||||
| [ib] | ||||
| hosts = [ | ||||
|     '127.0.0.1', | ||||
|     "127.0.0.1", | ||||
| ] | ||||
| # XXX: the order in which ports will be scanned | ||||
| # (by the `brokerd` daemon-actor) | ||||
|  | @ -63,8 +30,8 @@ ports = [ | |||
| # is not supported so you have to manually download | ||||
| # and XML report and put it in a location that can be | ||||
| # accessed by the ``brokerd.ib`` backend code for parsing. | ||||
| flex_token = '' | ||||
| flex_trades_query_id = ''  # live account | ||||
| flex_token = '666666666666666666666666' | ||||
| flex_trades_query_id = '666666'  # live account | ||||
| 
 | ||||
| # when clients are being scanned this determines | ||||
| # which clients are preferred to be used for data | ||||
|  | @ -80,6 +47,6 @@ prefer_data_account = [ | |||
| # the order in which accounts will be selectable | ||||
| # in the order mode UI (if found via clients during | ||||
| # API-app scanning)when a new symbol is loaded. | ||||
| paper = 'XX0000000' | ||||
| margin = 'X0000000' | ||||
| ira = 'X0000000' | ||||
| paper = "XX0000000" | ||||
| margin = "X0000000" | ||||
| ira = "X0000000" | ||||
|  |  | |||
|  | @ -1,12 +0,0 @@ | |||
| [network] | ||||
| tsdb.backend = 'marketstore' | ||||
| tsdb.host = 'localhost' | ||||
| tsdb.grpc_port = 5995 | ||||
| 
 | ||||
| [ui] | ||||
| # set custom font + size which will scale entire UI | ||||
| # font_size = 16 | ||||
| # font_name = 'Monospaced' | ||||
| 
 | ||||
| # colorscheme = 'default'  # UNUSED | ||||
| # graphics.update_throttle = 60  # Hz  # TODO | ||||
							
								
								
									
										134
									
								
								default.nix
								
								
								
								
							
							
						
						
									
										134
									
								
								default.nix
								
								
								
								
							|  | @ -1,134 +0,0 @@ | |||
| with (import <nixpkgs> {}); | ||||
| let | ||||
|   glibStorePath = lib.getLib glib; | ||||
|   zlibStorePath = lib.getLib zlib; | ||||
|   zstdStorePath = lib.getLib zstd; | ||||
|   dbusStorePath = lib.getLib dbus; | ||||
|   libGLStorePath = lib.getLib libGL; | ||||
|   freetypeStorePath = lib.getLib freetype; | ||||
|   qt6baseStorePath = lib.getLib qt6.qtbase; | ||||
|   fontconfigStorePath = lib.getLib fontconfig; | ||||
|   libxkbcommonStorePath = lib.getLib libxkbcommon; | ||||
|   xcbutilcursorStorePath = lib.getLib xcb-util-cursor; | ||||
| 
 | ||||
|   qtpyStorePath = lib.getLib python312Packages.qtpy; | ||||
|   pyqt6StorePath = lib.getLib python312Packages.pyqt6; | ||||
|   pyqt6SipStorePath = lib.getLib python312Packages.pyqt6-sip; | ||||
|   rapidfuzzStorePath = lib.getLib python312Packages.rapidfuzz; | ||||
|   qdarkstyleStorePath = lib.getLib python312Packages.qdarkstyle; | ||||
| 
 | ||||
|   xorgLibX11StorePath = lib.getLib xorg.libX11; | ||||
|   xorgLibxcbStorePath = lib.getLib xorg.libxcb; | ||||
|   xorgxcbutilwmStorePath = lib.getLib xorg.xcbutilwm; | ||||
|   xorgxcbutilimageStorePath = lib.getLib xorg.xcbutilimage; | ||||
|   xorgxcbutilerrorsStorePath = lib.getLib xorg.xcbutilerrors; | ||||
|   xorgxcbutilkeysymsStorePath = lib.getLib xorg.xcbutilkeysyms; | ||||
|   xorgxcbutilrenderutilStorePath = lib.getLib xorg.xcbutilrenderutil; | ||||
| in | ||||
| stdenv.mkDerivation { | ||||
|   name = "piker-qt6-uv"; | ||||
|   buildInputs = [ | ||||
|     # System requirements. | ||||
|     glib | ||||
|     zlib | ||||
|     dbus | ||||
|     zstd | ||||
|     libGL | ||||
|     freetype | ||||
|     qt6.qtbase | ||||
|     libgcc.lib | ||||
|     fontconfig | ||||
|     libxkbcommon | ||||
| 
 | ||||
|     # Xorg requirements | ||||
|     xcb-util-cursor | ||||
|     xorg.libxcb | ||||
|     xorg.libX11 | ||||
|     xorg.xcbutilwm | ||||
|     xorg.xcbutilimage | ||||
|     xorg.xcbutilerrors | ||||
|     xorg.xcbutilkeysyms | ||||
|     xorg.xcbutilrenderutil | ||||
| 
 | ||||
|     # Python requirements. | ||||
|     python312Full | ||||
|     python312Packages.uv | ||||
|     python312Packages.qdarkstyle | ||||
|     python312Packages.rapidfuzz | ||||
|     python312Packages.pyqt6 | ||||
|     python312Packages.qtpy | ||||
|   ]; | ||||
|   src = null; | ||||
|   shellHook = '' | ||||
|     set -e | ||||
| 
 | ||||
|     # Set the Qt plugin path | ||||
|     # export QT_DEBUG_PLUGINS=1 | ||||
| 
 | ||||
|     QTBASE_PATH="${qt6baseStorePath}/lib" | ||||
|     QT_PLUGIN_PATH="$QTBASE_PATH/qt-6/plugins" | ||||
|     QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms" | ||||
| 
 | ||||
|     LIB_GCC_PATH="${libgcc.lib}/lib" | ||||
|     GLIB_PATH="${glibStorePath}/lib" | ||||
|     ZSTD_PATH="${zstdStorePath}/lib" | ||||
|     ZLIB_PATH="${zlibStorePath}/lib" | ||||
|     DBUS_PATH="${dbusStorePath}/lib" | ||||
|     LIBGL_PATH="${libGLStorePath}/lib" | ||||
|     FREETYPE_PATH="${freetypeStorePath}/lib" | ||||
|     FONTCONFIG_PATH="${fontconfigStorePath}/lib" | ||||
|     LIB_XKB_COMMON_PATH="${libxkbcommonStorePath}/lib" | ||||
| 
 | ||||
|     XCB_UTIL_CURSOR_PATH="${xcbutilcursorStorePath}/lib" | ||||
|     XORG_LIB_X11_PATH="${xorgLibX11StorePath}/lib" | ||||
|     XORG_LIB_XCB_PATH="${xorgLibxcbStorePath}/lib" | ||||
|     XORG_XCB_UTIL_IMAGE_PATH="${xorgxcbutilimageStorePath}/lib" | ||||
|     XORG_XCB_UTIL_WM_PATH="${xorgxcbutilwmStorePath}/lib" | ||||
|     XORG_XCB_UTIL_RENDER_UTIL_PATH="${xorgxcbutilrenderutilStorePath}/lib" | ||||
|     XORG_XCB_UTIL_KEYSYMS_PATH="${xorgxcbutilkeysymsStorePath}/lib" | ||||
|     XORG_XCB_UTIL_ERRORS_PATH="${xorgxcbutilerrorsStorePath}/lib" | ||||
| 
 | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QTBASE_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_PLUGIN_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_QPA_PLATFORM_PLUGIN_PATH" | ||||
| 
 | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_GCC_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$DBUS_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GLIB_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZLIB_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZSTD_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIBGL_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FONTCONFIG_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FREETYPE_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_XKB_COMMON_PATH" | ||||
| 
 | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XCB_UTIL_CURSOR_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_X11_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_XCB_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_IMAGE_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_WM_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_RENDER_UTIL_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_KEYSYMS_PATH" | ||||
|     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_ERRORS_PATH" | ||||
| 
 | ||||
|     export LD_LIBRARY_PATH | ||||
| 
 | ||||
|     RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages" | ||||
|     QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages" | ||||
|     QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages" | ||||
|     PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages" | ||||
|     PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages" | ||||
| 
 | ||||
|     PATCH="$PATCH:$RPDFUZZ_PATH" | ||||
|     PATCH="$PATCH:$QDRKSTYLE_PATH" | ||||
|     PATCH="$PATCH:$QTPY_PATH" | ||||
|     PATCH="$PATCH:$PYQT6_PATH" | ||||
|     PATCH="$PATCH:$PYQT6_SIP_PATH" | ||||
| 
 | ||||
|     export PATCH | ||||
| 
 | ||||
|     # Install deps | ||||
|     uv lock | ||||
| 
 | ||||
|   ''; | ||||
| } | ||||
							
								
								
									
										47
									
								
								develop.nix
								
								
								
								
							
							
						
						
									
										47
									
								
								develop.nix
								
								
								
								
							|  | @ -1,47 +0,0 @@ | |||
| with (import <nixpkgs> {}); | ||||
| 
 | ||||
| stdenv.mkDerivation { | ||||
|   name = "poetry-env"; | ||||
|   buildInputs = [ | ||||
|     # System requirements. | ||||
|     readline | ||||
| 
 | ||||
|     # TODO: hacky non-poetry install stuff we need to get rid of!! | ||||
|     poetry | ||||
|     # virtualenv | ||||
|     # setuptools | ||||
|     # pip | ||||
| 
 | ||||
|     # Python requirements (enough to get a virtualenv going). | ||||
|     python311Full | ||||
| 
 | ||||
|     # obviously, and see below for hacked linking | ||||
|     python311Packages.pyqt5 | ||||
|     python311Packages.pyqt5_sip | ||||
|     # python311Packages.qtpy | ||||
| 
 | ||||
|     # numerics deps | ||||
|     python311Packages.levenshtein | ||||
|     python311Packages.fastparquet | ||||
|     python311Packages.polars | ||||
| 
 | ||||
|   ]; | ||||
|   # environment.sessionVariables = { | ||||
|   #   LD_LIBRARY_PATH = "${pkgs.stdenv.cc.cc.lib}/lib"; | ||||
|   # }; | ||||
|   src = null; | ||||
|   shellHook = '' | ||||
|     # Allow the use of wheels. | ||||
|     SOURCE_DATE_EPOCH=$(date +%s) | ||||
| 
 | ||||
|     # Augment the dynamic linker path | ||||
|     export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${R}/lib/R/lib:${readline}/lib | ||||
|     export QT_QPA_PLATFORM_PLUGIN_PATH="${qt5.qtbase.bin}/lib/qt-${qt5.qtbase.version}/plugins"; | ||||
| 
 | ||||
|     if [ ! -d ".venv" ]; then | ||||
|         poetry install --with uis | ||||
|     fi | ||||
| 
 | ||||
|     poetry shell | ||||
|   ''; | ||||
| } | ||||
|  | @ -1,11 +0,0 @@ | |||
| FROM elasticsearch:7.17.4 | ||||
| 
 | ||||
| ENV ES_JAVA_OPTS "-Xms2g -Xmx2g" | ||||
| ENV ELASTIC_USERNAME "elastic" | ||||
| ENV ELASTIC_PASSWORD "password" | ||||
| 
 | ||||
| COPY elasticsearch.yml /usr/share/elasticsearch/config/ | ||||
| 
 | ||||
| RUN printf "password" | ./bin/elasticsearch-keystore add -f -x "bootstrap.password" | ||||
| 
 | ||||
| EXPOSE 19200 | ||||
|  | @ -1,5 +0,0 @@ | |||
| network.host: 0.0.0.0 | ||||
| 
 | ||||
| http.port: 19200 | ||||
| 
 | ||||
| discovery.type: single-node | ||||
|  | @ -2,27 +2,12 @@ | |||
| # https://github.com/waytrade/ib-gateway-docker/blob/master/docker-compose.yml | ||||
| version: "3.5" | ||||
| 
 | ||||
| 
 | ||||
| services: | ||||
| 
 | ||||
|   ib_gw_paper: | ||||
| 
 | ||||
|     # apparently java is a mega cukc: | ||||
|     # https://stackoverflow.com/a/56895801 | ||||
|     # https://bugs.openjdk.org/browse/JDK-8150460 | ||||
|     ulimits: | ||||
|       # nproc: 65535 | ||||
|       nproc: 6000 | ||||
|       nofile: | ||||
|         soft: 2000 | ||||
|         hard: 3000 | ||||
| 
 | ||||
|   ib-gateway: | ||||
|     # other image tags available: | ||||
|     # https://github.com/waytrade/ib-gateway-docker#supported-tags | ||||
|     # image: waytrade/ib-gateway:1012.2i | ||||
|     image: ghcr.io/gnzsnz/ib-gateway:latest | ||||
| 
 | ||||
|     restart: 'no'  # restart on boot whenev there's a crash or user clicsk | ||||
|     image: waytrade/ib-gateway:981.3j | ||||
|     restart: always | ||||
|     network_mode: 'host' | ||||
| 
 | ||||
|     volumes: | ||||
|  | @ -54,12 +39,14 @@ services: | |||
|     # this compose file which looks something like: | ||||
|     # TWS_USERID='myuser' | ||||
|     # TWS_PASSWORD='guest' | ||||
|     # TRADING_MODE=paper (or live) | ||||
|     # VNC_SERVER_PASSWORD='diggity' | ||||
| 
 | ||||
|     environment: | ||||
|       TWS_USERID: ${TWS_USERID} | ||||
|       TWS_PASSWORD: ${TWS_PASSWORD} | ||||
|       TRADING_MODE: 'paper' | ||||
|       VNC_SERVER_PASSWORD: 'doggy' | ||||
|       VNC_SERVER_PORT: '3003' | ||||
|       TRADING_MODE: ${TRADING_MODE:-paper} | ||||
|       VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-} | ||||
| 
 | ||||
|     # ports: | ||||
|     #   - target: 4002 | ||||
|  | @ -75,40 +62,3 @@ services: | |||
|       # - "127.0.0.1:4001:4001" | ||||
|       # - "127.0.0.1:4002:4002" | ||||
|       # - "127.0.0.1:5900:5900" | ||||
| 
 | ||||
|   # ib_gw_live: | ||||
|   #   image: waytrade/ib-gateway:1012.2i | ||||
|   #   restart: no | ||||
|   #   network_mode: 'host' | ||||
| 
 | ||||
|   #   volumes: | ||||
|   #     - type: bind | ||||
|   #       source: ./jts_live.ini | ||||
|   #       target: /root/jts/jts.ini | ||||
|   #       # don't let ibc clobber this file for | ||||
|   #       # the main reason of not having a stupid | ||||
|   #       # timezone set.. | ||||
|   #       read_only: true | ||||
| 
 | ||||
|   #     # force our own ibc config | ||||
|   #     - type: bind | ||||
|   #       source: ./ibc.ini | ||||
|   #       target: /root/ibc/config.ini | ||||
| 
 | ||||
|   #     # force our noop script - socat isn't needed in host mode. | ||||
|   #     - type: bind | ||||
|   #       source: ./fork_ports_delayed.sh | ||||
|   #       target: /root/scripts/fork_ports_delayed.sh | ||||
| 
 | ||||
|   #     # force our noop script - socat isn't needed in host mode. | ||||
|   #     - type: bind | ||||
|   #       source: ./run_x11_vnc.sh | ||||
|   #       target: /root/scripts/run_x11_vnc.sh | ||||
|   #       read_only: true | ||||
| 
 | ||||
|   #   # NOTE: to fill these out, define an `.env` file in the same dir as | ||||
|   #   # this compose file which looks something like: | ||||
|   #   environment: | ||||
|   #     TRADING_MODE: 'live' | ||||
|   #     VNC_SERVER_PASSWORD: 'doggy' | ||||
|   #     VNC_SERVER_PORT: '3004' | ||||
|  |  | |||
|  | @ -117,57 +117,9 @@ SecondFactorDevice= | |||
| 
 | ||||
| # If you use the IBKR Mobile app for second factor authentication, | ||||
| # and you fail to complete the process before the time limit imposed | ||||
| # by IBKR, this setting tells IBC whether to automatically restart | ||||
| # the login sequence, giving you another opportunity to complete | ||||
| # second factor authentication.  | ||||
| # | ||||
| # Permitted values are 'yes' and 'no'. | ||||
| # | ||||
| # If this setting is not present or has no value, then the value | ||||
| # of the deprecated ExitAfterSecondFactorAuthenticationTimeout is | ||||
| # used instead. If this also has no value, then this setting defaults | ||||
| # to 'no'. | ||||
| # | ||||
| # NB: you must be using IBC v3.14.0 or later to use this setting: | ||||
| # earlier versions ignore it. | ||||
| 
 | ||||
| ReloginAfterSecondFactorAuthenticationTimeout= | ||||
| 
 | ||||
| 
 | ||||
| # This setting is only relevant if | ||||
| # ReloginAfterSecondFactorAuthenticationTimeout is set to 'yes', | ||||
| # or if ExitAfterSecondFactorAuthenticationTimeout is set to 'yes'. | ||||
| # | ||||
| # It controls how long (in seconds) IBC waits for login to complete | ||||
| # after the user acknowledges the second factor authentication | ||||
| # alert at the IBKR Mobile app. If login has not completed after | ||||
| # this time, IBC terminates. | ||||
| # The default value is 60. | ||||
| 
 | ||||
| SecondFactorAuthenticationExitInterval= | ||||
| 
 | ||||
| 
 | ||||
| # This setting specifies the timeout for second factor authentication | ||||
| # imposed by IB. The value is in seconds. You should not change this | ||||
| # setting unless you have reason to believe that IB has changed the | ||||
| # timeout. The default value is 180. | ||||
| 
 | ||||
| SecondFactorAuthenticationTimeout=180 | ||||
| 
 | ||||
| 
 | ||||
| # DEPRECATED SETTING | ||||
| # ------------------ | ||||
| # | ||||
| # ExitAfterSecondFactorAuthenticationTimeout - THIS SETTING WILL BE | ||||
| # REMOVED IN A FUTURE RELEASE. For IBC version 3.14.0 and later, see | ||||
| # the notes for ReloginAfterSecondFactorAuthenticationTimeout above. | ||||
| # | ||||
| # For IBC versions earlier than 3.14.0: If you use the IBKR Mobile | ||||
| # app for second factor authentication, and you fail to complete the | ||||
| # process before the time limit imposed by IBKR, you can use this | ||||
| # setting to tell IBC to exit: arrangements can then be made to | ||||
| # automatically restart IBC in order to initiate the login sequence | ||||
| # afresh. Otherwise, manual intervention at TWS's | ||||
| # by IBKR, you can use this setting to tell IBC to exit: arrangements | ||||
| # can then be made to automatically restart IBC in order to initiate | ||||
| # the login sequence afresh. Otherwise, manual intervention at TWS's | ||||
| # Second Factor Authentication dialog is needed to complete the | ||||
| # login. | ||||
| # | ||||
|  | @ -180,18 +132,29 @@ SecondFactorAuthenticationTimeout=180 | |||
| ExitAfterSecondFactorAuthenticationTimeout=no | ||||
| 
 | ||||
| 
 | ||||
| # This setting is only relevant if | ||||
| # ExitAfterSecondFactorAuthenticationTimeout is set to 'yes'. | ||||
| # | ||||
| # It controls how long (in seconds) IBC waits for login to complete | ||||
| # after the user acknowledges the second factor authentication | ||||
| # alert at the IBKR Mobile app. If login has not completed after | ||||
| # this time, IBC terminates. | ||||
| # The default value is 40. | ||||
| 
 | ||||
| SecondFactorAuthenticationExitInterval= | ||||
| 
 | ||||
| 
 | ||||
| # Trading Mode | ||||
| # ------------ | ||||
| # | ||||
| # This indicates whether the live account or the paper trading | ||||
| # account corresponding to the supplied credentials is to be used. | ||||
| # The allowed values are 'live' (the default) and 'paper'. | ||||
| # | ||||
| # If this is set to 'live', then the credentials for the live | ||||
| # account must be supplied. If it is set to 'paper', then either | ||||
| # the live or the paper-trading credentials may be supplied. | ||||
| # TWS 955 introduced a new Trading Mode combo box on its login | ||||
| # dialog. This indicates whether the live account or the paper | ||||
| # trading account corresponding to the supplied credentials is | ||||
| # to be used. The allowed values are 'live' (the default) and | ||||
| # 'paper'. For earlier versions of TWS this setting has no | ||||
| # effect. | ||||
| 
 | ||||
| TradingMode=paper | ||||
| TradingMode= | ||||
| 
 | ||||
| 
 | ||||
| # Paper-trading Account Warning | ||||
|  | @ -225,7 +188,7 @@ AcceptNonBrokerageAccountWarning=yes | |||
| # | ||||
| # The default value is 60. | ||||
| 
 | ||||
| LoginDialogDisplayTimeout=60 | ||||
| LoginDialogDisplayTimeout = 60 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|  | @ -254,15 +217,7 @@ LoginDialogDisplayTimeout=60 | |||
| # but they are acceptable. | ||||
| # | ||||
| # The default is the current working directory when IBC is | ||||
| # started, unless the TWS_SETTINGS_PATH setting in the relevant | ||||
| # start script is set. | ||||
| # | ||||
| # If both this setting and TWS_SETTINGS_PATH are set, then this | ||||
| # setting takes priority. Note that if they have different values, | ||||
| # auto-restart will not work. | ||||
| # | ||||
| # NB: this setting is now DEPRECATED. You should use the | ||||
| # TWS_SETTINGS_PATH setting in the relevant start script. | ||||
| # started. | ||||
| 
 | ||||
| IbDir=/root/Jts | ||||
| 
 | ||||
|  | @ -329,32 +284,15 @@ ExistingSessionDetectedAction=primary | |||
| # Override TWS API Port Number | ||||
| # ---------------------------- | ||||
| # | ||||
| # If OverrideTwsApiPort is set to an integer, IBC changes the  | ||||
| # 'Socket port' in TWS's API configuration to that number shortly  | ||||
| # after startup (but note that for the FIX Gateway, this setting is | ||||
| # actually stored in jts.ini rather than the Gateway's settings | ||||
| # file). Leaving the setting blank will make no change to  | ||||
| # the current setting. This setting is only intended for use in  | ||||
| # certain specialized situations where the port number needs to  | ||||
| # be set dynamically at run-time, and for the FIX Gateway: most | ||||
| # non-FIX users will never need it, so don't use it unless you know | ||||
| # you need it. | ||||
| 
 | ||||
| OverrideTwsApiPort=4000 | ||||
| 
 | ||||
| 
 | ||||
| # Override TWS Master Client ID | ||||
| # ----------------------------- | ||||
| # | ||||
| # If OverrideTwsMasterClientID is set to an integer, IBC changes the | ||||
| # 'Master Client ID' value in TWS's API configuration to that  | ||||
| # value shortly after startup. Leaving the setting blank will make | ||||
| # no change to the current setting. This setting is only intended  | ||||
| # for use in certain specialized situations where the value needs to | ||||
| # If OverrideTwsApiPort is set to an integer, IBC changes the | ||||
| # 'Socket port' in TWS's API configuration to that number shortly | ||||
| # after startup. Leaving the setting blank will make no change to | ||||
| # the current setting. This setting is only intended for use in | ||||
| # certain specialized situations where the port number needs to | ||||
| # be set dynamically at run-time: most users will never need it, | ||||
| # so don't use it unless you know you need it. | ||||
| 
 | ||||
| OverrideTwsMasterClientID= | ||||
| OverrideTwsApiPort=4002 | ||||
| 
 | ||||
| 
 | ||||
| # Read-only Login | ||||
|  | @ -364,13 +302,11 @@ OverrideTwsMasterClientID= | |||
| # account security programme, the user will not be asked to perform | ||||
| # the second factor authentication action, and login to TWS will | ||||
| # occur automatically in read-only mode: in this mode, placing or | ||||
| # managing orders is not allowed.  | ||||
| # | ||||
| # If set to 'no', and the user is enrolled in IB's account security | ||||
| # programme, the second factor authentication process is handled | ||||
| # according to the Second Factor Authentication Settings described | ||||
| # elsewhere in this file. | ||||
| # | ||||
| # managing orders is not allowed. If set to 'no', and the user is | ||||
| # enrolled in IB's account security programme, the user must perform | ||||
| # the relevant second factor authentication action to complete the | ||||
| # login.  | ||||
| 
 | ||||
| # If the user is not enrolled in IB's account security programme, | ||||
| # this setting is ignored. The default is 'no'. | ||||
| 
 | ||||
|  | @ -390,44 +326,7 @@ ReadOnlyLogin=no | |||
| # set the relevant checkbox (this only needs to be done once) and | ||||
| # not provide a value for this setting. | ||||
| 
 | ||||
| ReadOnlyApi= | ||||
| 
 | ||||
| 
 | ||||
| # API Precautions | ||||
| # --------------- | ||||
| #  | ||||
| # These settings relate to the corresponding 'Precautions' checkboxes in the | ||||
| # API section of the Global Configuration dialog. | ||||
| # | ||||
| # For all of these, the accepted values are: | ||||
| # - 'yes' sets the checkbox | ||||
| # - 'no' clears the checkbox | ||||
| # - if not set, the existing TWS/Gateway configuration is unchanged | ||||
| # | ||||
| # NB: thess settings are really only supplied for the benefit of new TWS | ||||
| # or Gateway instances that are being automatically installed and | ||||
| # started without user intervention, or where user settings are not preserved | ||||
| # between sessions (eg some Docker containers). Where a user is involved, they | ||||
| # should use the Global Configuration to set the relevant checkboxes and not | ||||
| # provide values for these settings. | ||||
| 
 | ||||
| BypassOrderPrecautions= | ||||
| 
 | ||||
| BypassBondWarning= | ||||
| 
 | ||||
| BypassNegativeYieldToWorstConfirmation= | ||||
| 
 | ||||
| BypassCalledBondWarning= | ||||
| 
 | ||||
| BypassSameActionPairTradeWarning= | ||||
| 
 | ||||
| BypassPriceBasedVolatilityRiskWarning= | ||||
| 
 | ||||
| BypassUSStocksMarketDataInSharesWarning= | ||||
| 
 | ||||
| BypassRedirectOrderWarning= | ||||
| 
 | ||||
| BypassNoOverfillProtectionPrecaution= | ||||
| ReadOnlyApi=no | ||||
| 
 | ||||
| 
 | ||||
| # Market data size for US stocks - lots or shares | ||||
|  | @ -482,145 +381,54 @@ AcceptBidAskLastSizeDisplayUpdateNotification=accept | |||
| SendMarketDataInLotsForUSstocks= | ||||
| 
 | ||||
| 
 | ||||
| # Trusted API Client IPs | ||||
| # ---------------------- | ||||
| # | ||||
| # NB: THIS SETTING IS ONLY RELEVANT FOR THE GATEWAY, AND ONLY WHEN FIX=yes. | ||||
| # In all other cases it is ignored. | ||||
| # | ||||
| # This is a list of IP addresses separated by commas. API clients with IP | ||||
| # addresses in this list are able to connect to the API without Gateway | ||||
| # generating the 'Incoming connection' popup. | ||||
| # | ||||
| # Note that 127.0.0.1 is always permitted to connect, so do not include it | ||||
| # in this setting. | ||||
| 
 | ||||
| TrustedTwsApiClientIPs= | ||||
| 
 | ||||
| 
 | ||||
| # Reset Order ID Sequence | ||||
| # ----------------------- | ||||
| # | ||||
| # The setting resets the order id sequence for orders submitted via the API, so | ||||
| # that the next invocation of the `NextValidId` API callback will return the | ||||
| # value 1. The reset occurs when TWS starts. | ||||
| # | ||||
| # Note that order ids are reset for all API clients, except those that have | ||||
| # outstanding (ie incomplete) orders: their order id sequence carries on as | ||||
| # before. | ||||
| # | ||||
| # Valid values are 'yes', 'true', 'false' and 'no'. The default is 'no'. | ||||
| 
 | ||||
| ResetOrderIdsAtStart= | ||||
| 
 | ||||
| 
 | ||||
| # This setting specifies IBC's action when TWS displays the dialog asking for | ||||
| # confirmation of a request to reset the API order id sequence. | ||||
| # | ||||
| # Note that the Gateway never displays this dialog, so this setting is ignored | ||||
| # for a Gateway session. | ||||
| # | ||||
| # Valid values consist of two strings separated by a solidus '/'. The first | ||||
| # value specifies the action to take when the order id reset request resulted | ||||
| # from setting ResetOrderIdsAtStart=yes. The second specifies the action to | ||||
| # take when the order id reset request is a result of the user clicking the | ||||
| # 'Reset API order ID sequence' button in the API configuration. Each value | ||||
| # must be one of the following: | ||||
| # | ||||
| #    'confirm'  | ||||
| #        order ids will be reset | ||||
| # | ||||
| #    'reject'  | ||||
| #        order ids will not be reset | ||||
| # | ||||
| #    'ignore'  | ||||
| #        IBC will ignore the dialog. The user must take action. | ||||
| # | ||||
| #    The default setting is ignore/ignore | ||||
| 
 | ||||
| # Examples: | ||||
| # | ||||
| #    'confirm/reject' - confirm order id reset only if ResetOrderIdsAtStart=yes | ||||
| #                       and reject any user-initiated requests | ||||
| # | ||||
| #    'ignore/confirm' - user must decide what to do if ResetOrderIdsAtStart=yes | ||||
| #                       and confirm user-initiated requests | ||||
| # | ||||
| #    'reject/ignore'  - reject order id reset if  ResetOrderIdsAtStart=yes but | ||||
| #                       allow user to handle user-initiated requests  | ||||
| 
 | ||||
| ConfirmOrderIdReset= | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| # ============================================================================= | ||||
| # 4.   TWS Auto-Logoff and Auto-Restart | ||||
| # 4.   TWS Auto-Closedown | ||||
| # ============================================================================= | ||||
| # | ||||
| # TWS and Gateway insist on being restarted every day. Two alternative | ||||
| # automatic options are offered:  | ||||
| # IMPORTANT NOTE: Starting with TWS 974, this setting no longer | ||||
| # works properly, because IB have changed the way TWS handles its | ||||
| # autologoff mechanism.  | ||||
| # | ||||
| #    - Auto-Logoff: at a specified time, TWS shuts down tidily, without | ||||
| #      restarting. | ||||
| # You should now configure the TWS autologoff time to something | ||||
| # convenient for you, and restart IBC each day. | ||||
| # | ||||
| #    - Auto-Restart: at a specified time, TWS shuts down and then restarts | ||||
| #      without the user having to re-autheticate. | ||||
| # | ||||
| # The normal way to configure the time at which this happens is via the Lock | ||||
| # and Exit section of the Configuration dialog. Once this time has been | ||||
| # configured in this way, the setting persists until the user changes it again. | ||||
| # | ||||
| # However, there are situations where there is no user available to do this | ||||
| # configuration, or where there is no persistent storage (for example some | ||||
| # Docker images). In such cases, the auto-restart or auto-logoff time can be | ||||
| # set whenever IBC starts with the settings below. | ||||
| # | ||||
| # The value, if specified, must be a time in HH:MM AM/PM format, for example | ||||
| # 08:00 AM or 10:00 PM. Note that there must be a single space between the | ||||
| # two parts of this value; also that midnight is "12:00 AM" and midday is | ||||
| # "12:00 PM". | ||||
| # | ||||
| # If no value is specified for either setting, the currently configured | ||||
| # settings will apply. If a value is supplied for one setting, the other | ||||
| # setting is cleared. If values are supplied for both settings, only the | ||||
| # auto-restart time is set, and the auto-logoff time is cleared. | ||||
| # | ||||
| # Note that for a normal TWS/Gateway installation with persistent storage | ||||
| # (for example on a desktop computer) the value will be persisted as if the | ||||
| # user had set it via the configuration dialog. | ||||
| # | ||||
| # If you choose to auto-restart, you should take note of the considerations | ||||
| # described at the link below. Note that where this information mentions | ||||
| # 'manual authentication', restarting IBC will do the job (IBKR does not | ||||
| # recognise the existence of IBC in its docuemntation). | ||||
| # | ||||
| #  https://www.interactivebrokers.com/en/software/tws/twsguide.htm#usersguidebook/configuretws/auto_restart_info.htm | ||||
| # | ||||
| # If you use the "RESTART" command via the IBC command server, and IBC is | ||||
| # running any version of the Gateway (or a version of TWS earlier than 1018), | ||||
| # note that this will set the Auto-Restart time in Gateway/TWS's configuration | ||||
| # dialog to the time at which the restart actually happens (which may be up to | ||||
| # a minute after the RESTART command is issued). To prevent future auto- | ||||
| # restarts at this time, you must make sure you have set AutoLogoffTime or | ||||
| # AutoRestartTime to your desired value before running IBC. NB: this does not | ||||
| # apply to TWS from version 1018 onwards. | ||||
| # Alternatively, discontinue use of IBC and use the auto-relogin | ||||
| # mechanism within TWS 974 and later versions (note that the  | ||||
| # auto-relogin mechanism provided by IB is not available if you | ||||
| # use IBC). | ||||
| 
 | ||||
| AutoLogoffTime= | ||||
| # Set to yes or no (lower case). | ||||
| # | ||||
| #   yes   means allow TWS to shut down automatically at its | ||||
| # 	  specified shutdown time, which is set via the TWS | ||||
| #	  configuration menu. | ||||
| # | ||||
| #   no    means TWS never shuts down automatically. | ||||
| # | ||||
| # NB: IB recommends that you do not keep TWS running | ||||
| # continuously. If you set this setting to 'no', you may | ||||
| # experience incorrect TWS operation. | ||||
| # | ||||
| # NB: the default for this setting is 'no'. Since this will | ||||
| # only work properly with TWS versions earlier than 974, you | ||||
| # should explicitly set this to 'yes' for version 974 and later. | ||||
| 
 | ||||
| IbAutoClosedown=yes | ||||
| 
 | ||||
| AutoRestartTime= | ||||
| 
 | ||||
| 
 | ||||
| # ============================================================================= | ||||
| # 5.   TWS Tidy Closedown Time | ||||
| # ============================================================================= | ||||
| # | ||||
| # Specifies a time at which TWS will close down tidily, with no restart. | ||||
| # NB: starting with TWS 974 this is no longer a useful option | ||||
| # because both TWS and Gateway now have the same auto-logoff | ||||
| # mechanism, and IBC can no longer avoid this. | ||||
| # | ||||
| # There is little reason to use this setting. It is similar to AutoLogoffTime, | ||||
| # but can include a day-of-the-week, whereas AutoLogoffTime and AutoRestartTime | ||||
| # apply every day. So for example you could use ClosedownAt in conjunction with | ||||
| # AutoRestartTime to shut down TWS on Friday evenings after the markets | ||||
| # close, without it running on Saturday as well. | ||||
| # Note that giving this setting a value does not change TWS's | ||||
| # auto-logoff in any way: any setting will be additional to the | ||||
| # TWS auto-logoff. | ||||
| # | ||||
| # To tell IBC to tidily close TWS at a specified time every | ||||
| # day, set this value to <hh:mm>, for example: | ||||
|  | @ -679,7 +487,7 @@ AcceptIncomingConnectionAction=reject | |||
| #   no    means the dialog remains on display and must be | ||||
| #         handled by the user. | ||||
| 
 | ||||
| AllowBlindTrading=no | ||||
| AllowBlindTrading=yes | ||||
| 
 | ||||
| 
 | ||||
| # Save Settings on a Schedule | ||||
|  | @ -722,26 +530,6 @@ AllowBlindTrading=no | |||
| SaveTwsSettingsAt= | ||||
| 
 | ||||
| 
 | ||||
| # Confirm Crypto Currency Orders Automatically | ||||
| # -------------------------------------------- | ||||
| # | ||||
| # When you place an order for a cryptocurrency contract, a dialog is displayed | ||||
| # asking you to confirm that you want to place the order, and notifying you | ||||
| # that you are placing an order to trade cryptocurrency with Paxos, a New York | ||||
| # limited trust company, and not at Interactive Brokers. | ||||
| # | ||||
| #   transmit    means that the order will be placed automatically, and the | ||||
| #               dialog will then be closed | ||||
| # | ||||
| #   cancel      means that the order will not be placed, and the dialog will | ||||
| #               then be closed | ||||
| # | ||||
| #   manual      means that IBC will take no action and the user must deal | ||||
| #               with the dialog | ||||
| 
 | ||||
| ConfirmCryptoCurrencyOrders=transmit | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| # ============================================================================= | ||||
| # 7.   Settings Specific to Indian Versions of TWS | ||||
|  | @ -778,17 +566,13 @@ DismissNSEComplianceNotice=yes | |||
| # | ||||
| # The port number that IBC listens on for commands | ||||
| # such as "STOP". DO NOT set this to the port number | ||||
| # used for TWS API connections. | ||||
| # | ||||
| # The convention is to use 7462 for this port, | ||||
| # but it must be set to a different value from any other | ||||
| # IBC instance that might run at the same time. | ||||
| # | ||||
| # The default value is 0, which tells IBC not to start | ||||
| # the command server | ||||
| # used for TWS API connections. There is no good reason | ||||
| # to change this setting unless the port is used by | ||||
| # some other application (typically another instance of | ||||
| # IBC). The default value is 0, which tells IBC not to | ||||
| # start the command server | ||||
| 
 | ||||
| #CommandServerPort=7462 | ||||
| CommandServerPort=0 | ||||
| 
 | ||||
| 
 | ||||
| # Permitted Command Sources | ||||
|  | @ -799,19 +583,19 @@ CommandServerPort=0 | |||
| # IBC.  Commands can always be sent from the | ||||
| # same host as IBC is running on. | ||||
| 
 | ||||
| ControlFrom= | ||||
| ControlFrom=127.0.0.1 | ||||
| 
 | ||||
| 
 | ||||
| # Address for Receiving Commands | ||||
| # ------------------------------ | ||||
| # | ||||
| # Specifies the IP address on which the Command Server | ||||
| # is to listen. For a multi-homed host, this can be used | ||||
| # is so listen. For a multi-homed host, this can be used | ||||
| # to specify that connection requests are only to be | ||||
| # accepted on the specified address. The default is to | ||||
| # accept connection requests on all local addresses. | ||||
| 
 | ||||
| BindAddress= | ||||
| BindAddress=127.0.0.1 | ||||
| 
 | ||||
| 
 | ||||
| # Command Prompt | ||||
|  | @ -837,7 +621,7 @@ CommandPrompt= | |||
| # information is sent. The default is that such information | ||||
| # is not sent. | ||||
| 
 | ||||
| SuppressInfoMessages=yes | ||||
| SuppressInfoMessages=no | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|  | @ -867,10 +651,10 @@ SuppressInfoMessages=yes | |||
| # The LogStructureScope setting indicates which windows are | ||||
| # eligible for structure logging: | ||||
| # | ||||
| #    - (default value) if set to 'known', only windows that | ||||
| #      IBC recognizes are eligible - these are windows that | ||||
| #      IBC has some interest in monitoring, usually to take | ||||
| #      some action on the user's behalf; | ||||
| #    - if set to 'known', only windows that IBC recognizes | ||||
| #      are eligible - these are windows that IBC has some | ||||
| #      interest in monitoring, usually to take some action | ||||
| #      on the user's behalf; | ||||
| # | ||||
| #    - if set to 'unknown', only windows that IBC does not | ||||
| #      recognize are eligible. Most windows displayed by | ||||
|  | @ -883,8 +667,9 @@ SuppressInfoMessages=yes | |||
| #    - if set to 'all', then every window displayed by TWS | ||||
| #      is eligible. | ||||
| # | ||||
| # The default value is 'known'. | ||||
| 
 | ||||
| LogStructureScope=known | ||||
| LogStructureScope=all | ||||
| 
 | ||||
| 
 | ||||
| # When to Log Window Structure | ||||
|  | @ -897,15 +682,13 @@ LogStructureScope=known | |||
| #       structure of an eligible window the first time it | ||||
| #       is encountered; | ||||
| # | ||||
| #     - if set to 'openclose', the structure is logged every | ||||
| #       time an eligible window is opened or closed; | ||||
| # | ||||
| #    - if set to 'activate', the structure is logged every | ||||
| #      time an eligible window is made active; | ||||
| # | ||||
| #    - (default value) if set to 'never' or 'no' or 'false', | ||||
| #      structure information is never logged. | ||||
| #    - if set to 'never' or 'no' or 'false', structure | ||||
| #      information is never logged. | ||||
| # | ||||
| # The default value is 'never'. | ||||
| 
 | ||||
| LogStructureWhen=never | ||||
| 
 | ||||
|  | @ -925,3 +708,4 @@ LogStructureWhen=never | |||
| #LogComponents= | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,33 +0,0 @@ | |||
| [IBGateway] | ||||
| ApiOnly=true | ||||
| LocalServerPort=4001 | ||||
| # NOTE: must be set if using IBC's "reject" mode | ||||
| TrustedIPs=127.0.0.1 | ||||
| ; RemoteHostOrderRouting=ndc1.ibllc.com | ||||
| ; WriteDebug=true | ||||
| ; RemotePortOrderRouting=4001 | ||||
| ; useRemoteSettings=false | ||||
| ; tradingMode=p | ||||
| ; Steps=8 | ||||
| ; colorPalletName=dark | ||||
| 
 | ||||
| # window geo, this may be useful for sending `xdotool` commands? | ||||
| ; MainWindow.Width=1986 | ||||
| ; screenHeight=3960 | ||||
| 
 | ||||
| 
 | ||||
| [Logon] | ||||
| Locale=en | ||||
| # most markets are oriented around this zone | ||||
| # so might as well hard code it. | ||||
| TimeZone=America/New_York | ||||
| UseSSL=true | ||||
| displayedproxymsg=1 | ||||
| os_titlebar=true | ||||
| s3store=true | ||||
| useRemoteSettings=false | ||||
| 
 | ||||
| [Communication] | ||||
| ctciAutoEncrypt=true | ||||
| Region=usr | ||||
| ; Peer=cdc1.ibllc.com:4001 | ||||
|  | @ -1,35 +1,16 @@ | |||
| #!/bin/sh | ||||
| # start vnc server and listen for connections | ||||
| # on port specced in `$VNC_SERVER_PORT` | ||||
| 
 | ||||
| # start VNC server | ||||
| x11vnc \ | ||||
|     -listen 127.0.0.1 \ | ||||
|     -allow 127.0.0.1 \ | ||||
|     -rfbport "${VNC_SERVER_PORT}" \ | ||||
|     -ncache_cr \ | ||||
|     -listen localhost \ | ||||
|     -display :1 \ | ||||
|     -forever \ | ||||
|     -shared \ | ||||
|     -logappend /var/log/x11vnc.log \ | ||||
|     -bg \ | ||||
|     -nowf \ | ||||
|     -noxdamage \ | ||||
|     -noxfixes \ | ||||
|     -no6 \ | ||||
|     -noipv6 \ | ||||
| 
 | ||||
| 
 | ||||
|     # -nowcr \ | ||||
|     # TODO: can't use this because of ``asyncvnc`` issue: | ||||
|     -autoport 3003 \ | ||||
|     # can't use this because of ``asyncvnc`` issue: | ||||
|     # https://github.com/barneygale/asyncvnc/issues/1 | ||||
|     # -passwd 'ibcansmbz' | ||||
| 
 | ||||
|     # XXX: optional graphics caching flags that seem to rekt the overlay | ||||
|     # of the 2 gw windows? When running a single gateway | ||||
|     # this seems to maybe optimize some memory usage? | ||||
|     # -ncache_cr \ | ||||
|     # -ncache \ | ||||
| 
 | ||||
|     # NOTE: this will prevent logs from going to the console. | ||||
|     # -logappend /var/log/x11vnc.log \ | ||||
| 
 | ||||
|     # where to start allocating ports | ||||
|     # -autoport "${VNC_SERVER_PORT}" \ | ||||
|  |  | |||
|  | @ -1,91 +0,0 @@ | |||
| ### NOTE this is likely out of date given it was written some | ||||
| (years) time ago by a user that has since not really partaken in | ||||
| contributing since. | ||||
| 
 | ||||
| install for tinas | ||||
| ***************** | ||||
| for windows peeps you can start by installing all the prerequisite software: | ||||
| 
 | ||||
| - install git with all default settings - https://git-scm.com/download/win | ||||
| - install anaconda all default settings - https://www.anaconda.com/products/individual | ||||
| - install microsoft build tools (check the box for Desktop development for C++, you might be able to uncheck some optional downloads)  - https://visualstudio.microsoft.com/visual-cpp-build-tools/ | ||||
| - install visual studio code default settings - https://code.visualstudio.com/download | ||||
| 
 | ||||
| 
 | ||||
| then, `crack a conda shell`_ and run the following commands:: | ||||
| 
 | ||||
|     mkdir code # create code directory | ||||
|     cd code # change directory to code | ||||
|     git clone https://github.com/pikers/piker.git # downloads piker installation package from github | ||||
|     cd piker # change directory to piker | ||||
|      | ||||
|     conda create -n pikonda # creates conda environment named pikonda | ||||
|     conda activate pikonda # activates pikonda | ||||
|      | ||||
|     conda install -c conda-forge python-levenshtein # in case it is not already installed | ||||
|     conda install pip # may already be installed | ||||
|     pip # will show if pip is installed | ||||
|      | ||||
|     pip install -e . -r requirements.txt # install piker in editable mode | ||||
| 
 | ||||
| test Piker to see if it is working:: | ||||
| 
 | ||||
|     piker -b binance chart btcusdt.binance # formatting for loading a chart | ||||
|     piker -b kraken -b binance chart xbtusdt.kraken | ||||
|     piker -b kraken -b binance -b ib chart qqq.nasdaq.ib | ||||
|     piker -b ib chart tsla.nasdaq.ib | ||||
| 
 | ||||
| potential error:: | ||||
|      | ||||
|     FileNotFoundError: [Errno 2] No such file or directory: 'C:\\Users\\user\\AppData\\Roaming\\piker\\brokers.toml' | ||||
|      | ||||
| solution: | ||||
| 
 | ||||
| - navigate to file directory above (may be different on your machine, location should be listed in the error code) | ||||
| - copy and paste file from 'C:\\Users\\user\\code\\data/brokers.toml' or create a blank file using notepad at the location above | ||||
| 
 | ||||
| Visual Studio Code setup: | ||||
| 
 | ||||
| - now that piker is installed we can set up vscode as the default terminal for running piker and editing the code | ||||
| - open Visual Studio Code | ||||
| - file --> Add Folder to Workspace --> C:\Users\user\code\piker (adds piker directory where all piker files are located) | ||||
| - file --> Save Workspace As --> save it wherever you want and call it whatever you want, this is going to be your default workspace for running and editing piker code | ||||
| - ctrl + shift + p --> start typing Python: Select Interpetter --> when the option comes up select it --> Select at the workspace level --> select the one that shows ('pikonda') | ||||
| - change the default terminal to cmd.exe instead of powershell (default) | ||||
| - now when you create a new terminal VScode should automatically activate you conda env so that piker can be run as the first command after a new terminal is created | ||||
| 
 | ||||
| also, try out fancyzones as part of powertoyz for a decent tiling windows manager to manage all the cool new software you are going to be running. | ||||
| 
 | ||||
| .. _conda installed: https:// | ||||
| .. _C++ build toolz: https:// | ||||
| .. _crack a conda shell: https:// | ||||
| .. _vscode: https:// | ||||
| 
 | ||||
| .. link to the tina guide | ||||
| .. _setup a coolio tiled wm console: https:// | ||||
| 
 | ||||
| provider support | ||||
| **************** | ||||
| for live data feeds the in-progress set of supported brokers is: | ||||
| 
 | ||||
| - IB_ via ``ib_insync``, also see our `container docs`_ | ||||
| - binance_ and kraken_ for crypto over their public websocket API | ||||
| - questrade_ (ish) which comes with effectively free L1 | ||||
| 
 | ||||
| coming soon... | ||||
| 
 | ||||
| - webull_ via the reverse engineered public API | ||||
| - yahoo via yliveticker_ | ||||
| 
 | ||||
| if you want your broker supported and they have an API let us know. | ||||
| 
 | ||||
| .. _IB: https://interactivebrokers.github.io/tws-api/index.html | ||||
| .. _container docs: https://github.com/pikers/piker/tree/master/dockering/ib | ||||
| .. _questrade: https://www.questrade.com/api/documentation | ||||
| .. _kraken: https://www.kraken.com/features/api#public-market-data | ||||
| .. _binance: https://github.com/pikers/piker/pull/182 | ||||
| .. _webull: https://github.com/tedchou12/webull | ||||
| .. _yliveticker: https://github.com/yahoofinancelive/yliveticker | ||||
| .. _coinbase: https://docs.pro.coinbase.com/#websocket-feed | ||||
| 
 | ||||
| 
 | ||||
|  | @ -1,263 +0,0 @@ | |||
| # from pprint import pformat | ||||
| from functools import partial | ||||
| from decimal import Decimal | ||||
| from typing import Callable | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| from uuid import uuid4 | ||||
| 
 | ||||
| from piker.service import maybe_open_pikerd | ||||
| from piker.accounting import dec_digits | ||||
| from piker.clearing import ( | ||||
|     open_ems, | ||||
|     OrderClient, | ||||
| ) | ||||
| # TODO: we should probably expose these top level in this subsys? | ||||
| from piker.clearing._messages import ( | ||||
|     Order, | ||||
|     Status, | ||||
|     BrokerdPosition, | ||||
| ) | ||||
| from piker.data import ( | ||||
|     iterticks, | ||||
|     Flume, | ||||
|     open_feed, | ||||
|     Feed, | ||||
|     # ShmArray, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: handle other statuses: | ||||
| # - fills, errors, and position tracking | ||||
| async def wait_for_order_status( | ||||
|     trades_stream: tractor.MsgStream, | ||||
|     oid: str, | ||||
|     expect_status: str, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     list[Status], | ||||
|     list[BrokerdPosition], | ||||
| ]: | ||||
|     ''' | ||||
|     Wait for a specific order status for a given dialog, return msg flow | ||||
|     up to that msg and any position update msgs in a tuple. | ||||
| 
 | ||||
|     ''' | ||||
|     # Wait for position message before moving on to verify flow(s) | ||||
|     # for the multi-order position entry/exit. | ||||
|     status_msgs: list[Status] = [] | ||||
|     pp_msgs: list[BrokerdPosition] = [] | ||||
| 
 | ||||
|     async for msg in trades_stream: | ||||
|         match msg: | ||||
|             case {'name': 'position'}: | ||||
|                 ppmsg = BrokerdPosition(**msg) | ||||
|                 pp_msgs.append(ppmsg) | ||||
| 
 | ||||
|             case { | ||||
|                 'name': 'status', | ||||
|             }: | ||||
|                 msg = Status(**msg) | ||||
|                 status_msgs.append(msg) | ||||
| 
 | ||||
|                 # if we get the status we expect then return all | ||||
|                 # collected msgs from the brokerd dialog up to the | ||||
|                 # exected msg B) | ||||
|                 if ( | ||||
|                      msg.resp == expect_status | ||||
|                      and msg.oid == oid | ||||
|                 ): | ||||
|                     return status_msgs, pp_msgs | ||||
| 
 | ||||
| 
 | ||||
| async def bot_main(): | ||||
|     ''' | ||||
|     Boot the piker runtime, open an ems connection, submit | ||||
|     and process orders statuses in real-time. | ||||
| 
 | ||||
|     ''' | ||||
|     ll: str = 'info' | ||||
| 
 | ||||
|     # open an order ctl client, live data feed, trio nursery for | ||||
|     # spawning an order trailer task | ||||
|     client: OrderClient | ||||
|     trades_stream: tractor.MsgStream | ||||
|     feed: Feed | ||||
|     accounts: list[str] | ||||
| 
 | ||||
|     fqme: str = 'btcusdt.usdtm.perp.binance' | ||||
| 
 | ||||
|     async with ( | ||||
| 
 | ||||
|         # TODO: do this implicitly inside `open_ems()` ep below? | ||||
|         # init and sync actor-service runtime | ||||
|         maybe_open_pikerd( | ||||
|             loglevel=ll, | ||||
|             debug_mode=True, | ||||
| 
 | ||||
|         ), | ||||
|         open_ems( | ||||
|             fqme, | ||||
|             mode='paper',  # {'live', 'paper'} | ||||
|             # mode='live',  # for real-brokerd submissions | ||||
|             loglevel=ll, | ||||
| 
 | ||||
|         ) as ( | ||||
|             client,  # OrderClient | ||||
|             trades_stream,  # tractor.MsgStream startup_pps, | ||||
|             _,  # positions | ||||
|             accounts, | ||||
|             _,  # dialogs | ||||
|         ), | ||||
| 
 | ||||
|         open_feed( | ||||
|             fqmes=[fqme], | ||||
|             loglevel=ll, | ||||
| 
 | ||||
|             # TODO: if you want to throttle via downsampling | ||||
|             # how many tick updates your feed received on | ||||
|             # quote streams B) | ||||
|             # tick_throttle=10, | ||||
|         ) as feed, | ||||
| 
 | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         assert accounts | ||||
|         print(f'Loaded binance accounts: {accounts}') | ||||
| 
 | ||||
|         flume: Flume = feed.flumes[fqme] | ||||
|         min_tick = Decimal(flume.mkt.price_tick) | ||||
|         min_tick_digits: int = dec_digits(min_tick) | ||||
|         price_round: Callable = partial( | ||||
|             round, | ||||
|             ndigits=min_tick_digits, | ||||
|         ) | ||||
| 
 | ||||
|         quote_stream: trio.abc.ReceiveChannel = feed.streams['binance'] | ||||
| 
 | ||||
| 
 | ||||
|         # always keep live limit 0.003% below last | ||||
|         # clearing price | ||||
|         clear_margin: float = 0.9997 | ||||
| 
 | ||||
|         async def trailer( | ||||
|             order: Order, | ||||
|         ): | ||||
|             # ref shm OHLCV array history, if you want | ||||
|             # s_shm: ShmArray = flume.rt_shm | ||||
|             # m_shm: ShmArray = flume.hist_shm | ||||
| 
 | ||||
|             # NOTE: if you wanted to frame ticks by type like the | ||||
|             # the quote throttler does.. and this is probably | ||||
|             # faster in terms of getting the latest tick type | ||||
|             # embedded value of interest? | ||||
|             # from piker.data._sampling import frame_ticks | ||||
| 
 | ||||
|             async for quotes in quote_stream: | ||||
|                 for fqme, quote in quotes.items(): | ||||
|                     # print( | ||||
|                     #     f'{quote["symbol"]} -> {quote["ticks"]}\n' | ||||
|                     #     f'last 1s OHLC:\n{s_shm.array[-1]}\n' | ||||
|                     #     f'last 1m OHLC:\n{m_shm.array[-1]}\n' | ||||
|                     # ) | ||||
| 
 | ||||
|                     for tick in iterticks( | ||||
|                         quote, | ||||
|                         reverse=True, | ||||
|                         # types=('trade', 'dark_trade'), # defaults | ||||
|                     ): | ||||
| 
 | ||||
|                         await client.update( | ||||
|                             uuid=order.oid, | ||||
|                             price=price_round( | ||||
|                                 clear_margin | ||||
|                                 * | ||||
|                                 tick['price'] | ||||
|                             ), | ||||
|                         ) | ||||
|                         msgs, pps = await wait_for_order_status( | ||||
|                             trades_stream, | ||||
|                             order.oid, | ||||
|                             'open' | ||||
|                         ) | ||||
|                         # if multiple clears per quote just | ||||
|                         # skip to the next quote? | ||||
|                         break | ||||
| 
 | ||||
| 
 | ||||
|         # get first live quote to be sure we submit the initial | ||||
|         # live buy limit low enough that it doesn't clear due to | ||||
|         # a stale initial price from the data feed layer! | ||||
|         first_ask_price: float | None = None | ||||
|         async for quotes in quote_stream: | ||||
|             for fqme, quote in quotes.items(): | ||||
|                 # print(quote['symbol']) | ||||
|                 for tick in iterticks(quote, types=('ask')): | ||||
|                     first_ask_price: float = tick['price'] | ||||
|                     break | ||||
| 
 | ||||
|             if first_ask_price: | ||||
|                 break | ||||
| 
 | ||||
|         # setup order dialog via first msg | ||||
|         price: float = price_round( | ||||
|                 clear_margin | ||||
|                 * | ||||
|                 first_ask_price, | ||||
|         ) | ||||
| 
 | ||||
|         # compute a 1k USD sized pos | ||||
|         size: float = round(1e3/price, ndigits=3) | ||||
| 
 | ||||
|         order = Order( | ||||
| 
 | ||||
|             # docs on how this all works, bc even i'm not entirely | ||||
|             # clear XD. also we probably want to figure  out how to | ||||
|             # offer both the paper engine running and the brokerd | ||||
|             # order ctl tasks with the ems choosing which stream to | ||||
|             # route msgs on given the account value! | ||||
|             account='paper',  # use built-in paper clearing engine and .accounting | ||||
|             # account='binance.usdtm',  # for live binance futes | ||||
| 
 | ||||
|             oid=str(uuid4()), | ||||
|             exec_mode='live',  # {'dark', 'live', 'alert'} | ||||
| 
 | ||||
|             action='buy',  # TODO: remove this from our schema? | ||||
| 
 | ||||
|             size=size, | ||||
|             symbol=fqme, | ||||
|             price=price, | ||||
|             brokers=['binance'], | ||||
|         ) | ||||
|         await client.send(order) | ||||
| 
 | ||||
|         msgs, pps = await wait_for_order_status( | ||||
|             trades_stream, | ||||
|             order.oid, | ||||
|             'open', | ||||
|         ) | ||||
| 
 | ||||
|         assert not pps | ||||
|         assert msgs[-1].oid == order.oid | ||||
| 
 | ||||
|         # start "trailer task" which tracks rt quote stream | ||||
|         tn.start_soon(trailer, order) | ||||
| 
 | ||||
|         try: | ||||
|             # wait for ctl-c from user.. | ||||
|             await trio.sleep_forever() | ||||
|         except KeyboardInterrupt: | ||||
|             # cancel the open order | ||||
|             await client.cancel(order.oid) | ||||
| 
 | ||||
|             msgs, pps = await wait_for_order_status( | ||||
|                 trades_stream, | ||||
|                 order.oid, | ||||
|                 'canceled' | ||||
|             ) | ||||
|             raise | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(bot_main) | ||||
							
								
								
									
										138
									
								
								flake.lock
								
								
								
								
							
							
						
						
									
										138
									
								
								flake.lock
								
								
								
								
							|  | @ -1,138 +0,0 @@ | |||
| { | ||||
|   "nodes": { | ||||
|     "flake-utils": { | ||||
|       "inputs": { | ||||
|         "systems": "systems" | ||||
|       }, | ||||
|       "locked": { | ||||
|         "lastModified": 1689068808, | ||||
|         "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", | ||||
|         "owner": "numtide", | ||||
|         "repo": "flake-utils", | ||||
|         "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", | ||||
|         "type": "github" | ||||
|       }, | ||||
|       "original": { | ||||
|         "owner": "numtide", | ||||
|         "repo": "flake-utils", | ||||
|         "type": "github" | ||||
|       } | ||||
|     }, | ||||
|     "flake-utils_2": { | ||||
|       "inputs": { | ||||
|         "systems": "systems_2" | ||||
|       }, | ||||
|       "locked": { | ||||
|         "lastModified": 1689068808, | ||||
|         "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", | ||||
|         "owner": "numtide", | ||||
|         "repo": "flake-utils", | ||||
|         "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", | ||||
|         "type": "github" | ||||
|       }, | ||||
|       "original": { | ||||
|         "owner": "numtide", | ||||
|         "repo": "flake-utils", | ||||
|         "type": "github" | ||||
|       } | ||||
|     }, | ||||
|     "nix-github-actions": { | ||||
|       "inputs": { | ||||
|         "nixpkgs": [ | ||||
|           "poetry2nix", | ||||
|           "nixpkgs" | ||||
|         ] | ||||
|       }, | ||||
|       "locked": { | ||||
|         "lastModified": 1688870561, | ||||
|         "narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=", | ||||
|         "owner": "nix-community", | ||||
|         "repo": "nix-github-actions", | ||||
|         "rev": "165b1650b753316aa7f1787f3005a8d2da0f5301", | ||||
|         "type": "github" | ||||
|       }, | ||||
|       "original": { | ||||
|         "owner": "nix-community", | ||||
|         "repo": "nix-github-actions", | ||||
|         "type": "github" | ||||
|       } | ||||
|     }, | ||||
|     "nixpkgs": { | ||||
|       "locked": { | ||||
|         "lastModified": 1692174805, | ||||
|         "narHash": "sha256-xmNPFDi/AUMIxwgOH/IVom55Dks34u1g7sFKKebxUm0=", | ||||
|         "owner": "NixOS", | ||||
|         "repo": "nixpkgs", | ||||
|         "rev": "caac0eb6bdcad0b32cb2522e03e4002c8975c62e", | ||||
|         "type": "github" | ||||
|       }, | ||||
|       "original": { | ||||
|         "owner": "NixOS", | ||||
|         "ref": "nixos-unstable", | ||||
|         "repo": "nixpkgs", | ||||
|         "type": "github" | ||||
|       } | ||||
|     }, | ||||
|     "poetry2nix": { | ||||
|       "inputs": { | ||||
|         "flake-utils": "flake-utils_2", | ||||
|         "nix-github-actions": "nix-github-actions", | ||||
|         "nixpkgs": [ | ||||
|           "nixpkgs" | ||||
|         ] | ||||
|       }, | ||||
|       "locked": { | ||||
|         "lastModified": 1692048894, | ||||
|         "narHash": "sha256-cDw03rso2V4CDc3Mll0cHN+ztzysAvdI8pJ7ybbz714=", | ||||
|         "ref": "refs/heads/pyqt6", | ||||
|         "rev": "b059ad4c3051f45d6c912e17747aae37a9ec1544", | ||||
|         "revCount": 2276, | ||||
|         "type": "git", | ||||
|         "url": "file:///home/lord_fomo/repos/poetry2nix" | ||||
|       }, | ||||
|       "original": { | ||||
|         "type": "git", | ||||
|         "url": "file:///home/lord_fomo/repos/poetry2nix" | ||||
|       } | ||||
|     }, | ||||
|     "root": { | ||||
|       "inputs": { | ||||
|         "flake-utils": "flake-utils", | ||||
|         "nixpkgs": "nixpkgs", | ||||
|         "poetry2nix": "poetry2nix" | ||||
|       } | ||||
|     }, | ||||
|     "systems": { | ||||
|       "locked": { | ||||
|         "lastModified": 1681028828, | ||||
|         "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", | ||||
|         "owner": "nix-systems", | ||||
|         "repo": "default", | ||||
|         "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", | ||||
|         "type": "github" | ||||
|       }, | ||||
|       "original": { | ||||
|         "owner": "nix-systems", | ||||
|         "repo": "default", | ||||
|         "type": "github" | ||||
|       } | ||||
|     }, | ||||
|     "systems_2": { | ||||
|       "locked": { | ||||
|         "lastModified": 1681028828, | ||||
|         "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", | ||||
|         "owner": "nix-systems", | ||||
|         "repo": "default", | ||||
|         "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", | ||||
|         "type": "github" | ||||
|       }, | ||||
|       "original": { | ||||
|         "owner": "nix-systems", | ||||
|         "repo": "default", | ||||
|         "type": "github" | ||||
|       } | ||||
|     } | ||||
|   }, | ||||
|   "root": "root", | ||||
|   "version": 7 | ||||
| } | ||||
							
								
								
									
										180
									
								
								flake.nix
								
								
								
								
							
							
						
						
									
										180
									
								
								flake.nix
								
								
								
								
							|  | @ -1,180 +0,0 @@ | |||
| # NOTE: to convert to a poetry2nix env like this here are the | ||||
| # steps: | ||||
| # - install poetry in your system nix config | ||||
| # - convert the repo to use poetry using `poetry init`: | ||||
| #   https://python-poetry.org/docs/basic-usage/#initialising-a-pre-existing-project | ||||
| # - then manually ensuring all deps are converted over: | ||||
| # - add this file to the repo and commit it | ||||
| # -  | ||||
| 
 | ||||
| # GROKin tips: | ||||
| # - CLI eps are (ostensibly) added via an `entry_points.txt`: | ||||
| #   - https://packaging.python.org/en/latest/specifications/entry-points/#file-format | ||||
| #   - https://github.com/nix-community/poetry2nix/blob/master/editable.nix#L49 | ||||
| { | ||||
|   description = "piker: trading gear for hackers (pkged with poetry2nix)"; | ||||
| 
 | ||||
|   inputs.flake-utils.url = "github:numtide/flake-utils"; | ||||
|   inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; | ||||
| 
 | ||||
|   # see https://github.com/nix-community/poetry2nix/tree/master#api | ||||
|   inputs.poetry2nix = { | ||||
|     # url = "github:nix-community/poetry2nix"; | ||||
|     # url = "github:K900/poetry2nix/qt5-explicit-deps"; | ||||
|     url = "/home/lord_fomo/repos/poetry2nix"; | ||||
| 
 | ||||
|     inputs.nixpkgs.follows = "nixpkgs"; | ||||
|   }; | ||||
| 
 | ||||
|   outputs = { | ||||
|     self, | ||||
|     nixpkgs, | ||||
|     flake-utils, | ||||
|     poetry2nix, | ||||
|   }: | ||||
|     # TODO: build cross-OS and use the `${system}` var thingy.. | ||||
|     flake-utils.lib.eachDefaultSystem (system: | ||||
|       let | ||||
|         # use PWD as sources | ||||
|         projectDir = ./.; | ||||
|         pyproject = ./pyproject.toml; | ||||
|         poetrylock = ./poetry.lock; | ||||
| 
 | ||||
|         # TODO: port to 3.11 and support both versions? | ||||
|         python = "python3.10"; | ||||
| 
 | ||||
|         # for more functions and examples. | ||||
|         # inherit | ||||
|         # (poetry2nix.legacyPackages.${system}) | ||||
|         # mkPoetryApplication; | ||||
|         # pkgs = nixpkgs.legacyPackages.${system}; | ||||
| 
 | ||||
|         pkgs = nixpkgs.legacyPackages.x86_64-linux; | ||||
|         lib = pkgs.lib; | ||||
|         p2npkgs = poetry2nix.legacyPackages.x86_64-linux; | ||||
| 
 | ||||
|         # define all pkg overrides per dep, see edgecases.md: | ||||
|         # https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md | ||||
|         # TODO: add these into the json file: | ||||
|         # https://github.com/nix-community/poetry2nix/blob/master/overrides/build-systems.json | ||||
|         pypkgs-build-requirements = { | ||||
|           asyncvnc = [ "setuptools" ]; | ||||
|           eventkit = [ "setuptools" ]; | ||||
|           ib-insync = [ "setuptools" "flake8" ]; | ||||
|           msgspec = [ "setuptools"]; | ||||
|           pdbp = [ "setuptools" ]; | ||||
|           pyqt6-sip = [ "setuptools" ]; | ||||
|           tabcompleter = [ "setuptools" ]; | ||||
|           tractor = [ "setuptools" ]; | ||||
|           tricycle = [ "setuptools" ]; | ||||
|           trio-typing = [ "setuptools" ]; | ||||
|           trio-util = [ "setuptools" ]; | ||||
|           xonsh = [ "setuptools" ]; | ||||
|         }; | ||||
| 
 | ||||
|         # auto-generate override entries | ||||
|         p2n-overrides = p2npkgs.defaultPoetryOverrides.extend (self: super: | ||||
|           builtins.mapAttrs (package: build-requirements: | ||||
|             (builtins.getAttr package super).overridePythonAttrs (old: { | ||||
|               buildInputs = ( | ||||
|                 old.buildInputs or [ ] | ||||
|               ) ++ ( | ||||
|                 builtins.map ( | ||||
|                   pkg: if builtins.isString pkg then builtins.getAttr pkg super else pkg | ||||
|                   ) build-requirements | ||||
|               ); | ||||
|             }) | ||||
|           ) pypkgs-build-requirements | ||||
|         ); | ||||
| 
 | ||||
|         # override some ahead-of-time compiled extensions | ||||
|         # to be built with their wheels. | ||||
|         ahot_overrides = p2n-overrides.extend( | ||||
|           final: prev: { | ||||
| 
 | ||||
|             # llvmlite = prev.llvmlite.override { | ||||
|             #   preferWheel = false; | ||||
|             # }; | ||||
| 
 | ||||
|             # TODO: get this workin with p2n and nixpkgs.. | ||||
|             # pyqt6 = prev.pyqt6.override { | ||||
|             #   preferWheel = true; | ||||
|             # }; | ||||
| 
 | ||||
|             # NOTE: this DOESN'T work atm but after a fix | ||||
|             # to poetry2nix, it will and actually this line | ||||
|             # won't be needed - thanks @k900: | ||||
|             # https://github.com/nix-community/poetry2nix/pull/1257 | ||||
|             pyqt5 = prev.pyqt5.override { | ||||
|               # withWebkit = false; | ||||
|               preferWheel = true; | ||||
|             }; | ||||
| 
 | ||||
|             # see PR from @k900: | ||||
|             # https://github.com/nix-community/poetry2nix/pull/1257 | ||||
|             # pyqt5-qt5 = prev.pyqt5-qt5.override { | ||||
|             #   withWebkit = false; | ||||
|             #   preferWheel = true; | ||||
|             # }; | ||||
| 
 | ||||
|             # TODO: patch in an override for polars to build | ||||
|             # from src! See the details likely needed from | ||||
|             # the cryptography entry: | ||||
|             # https://github.com/nix-community/poetry2nix/blob/master/overrides/default.nix#L426-L435 | ||||
|             polars = prev.polars.override { | ||||
|               preferWheel = true; | ||||
|             }; | ||||
|           } | ||||
|       ); | ||||
| 
 | ||||
|       # WHY!? -> output-attrs that `nix develop` scans for: | ||||
|       # https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-develop.html#flake-output-attributes | ||||
|       in | ||||
|       rec { | ||||
|         packages = { | ||||
|           # piker = poetry2nix.legacyPackages.x86_64-linux.mkPoetryEditablePackage { | ||||
|           #   editablePackageSources = { piker = ./piker; }; | ||||
| 
 | ||||
|           piker = p2npkgs.mkPoetryApplication { | ||||
|             projectDir = projectDir; | ||||
| 
 | ||||
|             # SEE ABOVE for auto-genned input set, override | ||||
|             # buncha deps with extras.. like `setuptools` mostly. | ||||
|             # TODO: maybe propose a patch to p2n to show that you | ||||
|             # can even do this in the edgecases docs? | ||||
|             overrides = ahot_overrides; | ||||
| 
 | ||||
|             # XXX: won't work on llvmlite.. | ||||
|             # preferWheels = true; | ||||
|           }; | ||||
|         }; | ||||
| 
 | ||||
|         # devShells.default = pkgs.mkShell { | ||||
|         #   projectDir = projectDir; | ||||
|         #   python = "python3.10"; | ||||
|         #   overrides = ahot_overrides; | ||||
|         #   inputsFrom = [ self.packages.x86_64-linux.piker ]; | ||||
|         #   packages = packages; | ||||
|         #   # packages = [ poetry2nix.packages.${system}.poetry ]; | ||||
|         # }; | ||||
| 
 | ||||
|         # TODO: grok the difference here.. | ||||
|         # - avoid re-cloning git repos on every develop entry.. | ||||
|         # - ideally allow hacking on the src code of some deps | ||||
|         #   (tractor, pyqtgraph, tomlkit, etc.) WITHOUT having to | ||||
|         #   re-install them every time a change is made. | ||||
|         # - boot a usable xonsh inside the poetry virtualenv when | ||||
|         #   defined via a custom entry point? | ||||
|         devShells.default = p2npkgs.mkPoetryEnv { | ||||
|         # env = p2npkgs.mkPoetryEnv { | ||||
|             projectDir = projectDir; | ||||
|             python = pkgs.python310; | ||||
|             overrides = ahot_overrides; | ||||
|             editablePackageSources = packages; | ||||
|               # piker = "./"; | ||||
|               # tractor = "../tractor/"; | ||||
|             # };  # wut? | ||||
|         }; | ||||
|       } | ||||
|     );  # end of .outputs scope | ||||
| } | ||||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers. | ||||
| # Copyright 2020-eternity Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright 2020-eternity Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -14,14 +14,7 @@ | |||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| """ | ||||
| piker: trading gear for hackers. | ||||
| 
 | ||||
| ''' | ||||
| from .service import open_piker_runtime | ||||
| from .data.feed import open_feed | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'open_piker_runtime', | ||||
|     'open_feed', | ||||
| ] | ||||
| """ | ||||
|  |  | |||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -14,71 +14,37 @@ | |||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| """ | ||||
| Cacheing apis and toolz. | ||||
| 
 | ||||
| ''' | ||||
| """ | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| from typing import ( | ||||
|     Awaitable, | ||||
|     Callable, | ||||
|     ParamSpec, | ||||
|     TypeVar, | ||||
| from contextlib import ( | ||||
|     asynccontextmanager, | ||||
| ) | ||||
| 
 | ||||
| from tractor.trionics import maybe_open_context | ||||
| 
 | ||||
| from .brokers import get_brokermod | ||||
| from .log import get_logger | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| T = TypeVar("T") | ||||
| P = ParamSpec("P") | ||||
| 
 | ||||
| 
 | ||||
| # TODO: move this to `tractor.trionics`.. | ||||
| # - egs. to replicate for tests: https://github.com/aio-libs/async-lru#usage | ||||
| # - their suite as well: | ||||
| #   https://github.com/aio-libs/async-lru/tree/master/tests | ||||
| # - asked trio_util about it too: | ||||
| #   https://github.com/groove-x/trio-util/issues/21 | ||||
| def async_lifo_cache( | ||||
|     maxsize=128, | ||||
| 
 | ||||
|     # NOTE: typing style was learned from: | ||||
|     # https://stackoverflow.com/a/71132186 | ||||
| ) -> Callable[ | ||||
|     Callable[P, Awaitable[T]], | ||||
|     Callable[ | ||||
|         Callable[P, Awaitable[T]], | ||||
|         Callable[P, Awaitable[T]], | ||||
|     ], | ||||
| ]: | ||||
|     ''' | ||||
|     Async ``cache`` with a LIFO policy. | ||||
| def async_lifo_cache(maxsize=128): | ||||
|     """Async ``cache`` with a LIFO policy. | ||||
| 
 | ||||
|     Implemented my own since no one else seems to have | ||||
|     a standard. I'll wait for the smarter people to come | ||||
|     up with one, but until then... | ||||
| 
 | ||||
|     NOTE: when decorating, due to this simple/naive implementation, you | ||||
|     MUST call the decorator like, | ||||
| 
 | ||||
|     .. code:: python | ||||
| 
 | ||||
|         @async_lifo_cache() | ||||
|         async def cache_target(): | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     cache = OrderedDict() | ||||
| 
 | ||||
|     def decorator( | ||||
|         fn: Callable[P, Awaitable[T]], | ||||
|     ) -> Callable[P, Awaitable[T]]: | ||||
|     def decorator(fn): | ||||
| 
 | ||||
|         async def decorated( | ||||
|             *args: P.args, | ||||
|             **kwargs: P.kwargs, | ||||
|         ) -> T: | ||||
|         async def wrapper(*args): | ||||
|             key = args | ||||
|             try: | ||||
|                 return cache[key] | ||||
|  | @ -87,13 +53,27 @@ def async_lifo_cache( | |||
|                     # discard last added new entry | ||||
|                     cache.popitem() | ||||
| 
 | ||||
|                 # call underlying | ||||
|                 cache[key] = await fn( | ||||
|                     *args, | ||||
|                     **kwargs, | ||||
|                 ) | ||||
|                 # do it | ||||
|                 cache[key] = await fn(*args) | ||||
|                 return cache[key] | ||||
| 
 | ||||
|         return decorated | ||||
|         return wrapper | ||||
| 
 | ||||
|     return decorator | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| async def open_cached_client( | ||||
|     brokername: str, | ||||
| ) -> 'Client':  # noqa | ||||
|     ''' | ||||
|     Get a cached broker client from the current actor's local vars. | ||||
| 
 | ||||
|     If one has not been setup do it and cache it. | ||||
| 
 | ||||
|     ''' | ||||
|     brokermod = get_brokermod(brokername) | ||||
|     async with maybe_open_context( | ||||
|         acm_func=brokermod.get_client, | ||||
|     ) as (cache_hit, client): | ||||
|         yield client | ||||
|  |  | |||
|  | @ -0,0 +1,558 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Structured, daemon tree service management. | ||||
| 
 | ||||
| """ | ||||
| from typing import Optional, Union, Callable, Any | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from collections import defaultdict | ||||
| 
 | ||||
| from msgspec import Struct | ||||
| import tractor | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| 
 | ||||
| from .log import get_logger, get_console_log | ||||
| from .brokers import get_brokermod | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _root_dname = 'pikerd' | ||||
| 
 | ||||
| _registry_addr = ('127.0.0.1', 6116) | ||||
| _tractor_kwargs: dict[str, Any] = { | ||||
|     # use a different registry addr then tractor's default | ||||
|     'arbiter_addr': _registry_addr | ||||
| } | ||||
| _root_modules = [ | ||||
|     __name__, | ||||
|     'piker.clearing._ems', | ||||
|     'piker.clearing._client', | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| class Services(Struct): | ||||
| 
 | ||||
|     actor_n: tractor._supervise.ActorNursery | ||||
|     service_n: trio.Nursery | ||||
|     debug_mode: bool  # tractor sub-actor debug mode flag | ||||
|     service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {} | ||||
| 
 | ||||
|     async def start_service_task( | ||||
|         self, | ||||
|         name: str, | ||||
|         portal: tractor.Portal, | ||||
|         target: Callable, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> (trio.CancelScope, tractor.Context): | ||||
|         ''' | ||||
|         Open a context in a service sub-actor, add to a stack | ||||
|         that gets unwound at ``pikerd`` teardown. | ||||
| 
 | ||||
|         This allows for allocating long-running sub-services in our main | ||||
|         daemon and explicitly controlling their lifetimes. | ||||
| 
 | ||||
|         ''' | ||||
|         async def open_context_in_task( | ||||
|             task_status: TaskStatus[ | ||||
|                 trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
|         ) -> Any: | ||||
| 
 | ||||
|             with trio.CancelScope() as cs: | ||||
|                 async with portal.open_context( | ||||
|                     target, | ||||
|                     **kwargs, | ||||
| 
 | ||||
|                 ) as (ctx, first): | ||||
| 
 | ||||
|                     # unblock once the remote context has started | ||||
|                     task_status.started((cs, first)) | ||||
|                     log.info( | ||||
|                         f'`pikerd` service {name} started with value {first}' | ||||
|                     ) | ||||
|                     try: | ||||
|                         # wait on any context's return value | ||||
|                         ctx_res = await ctx.result() | ||||
|                     except tractor.ContextCancelled: | ||||
|                         return await self.cancel_service(name) | ||||
|                     else: | ||||
|                         # wait on any error from the sub-actor | ||||
|                         # NOTE: this will block indefinitely until | ||||
|                         # cancelled either by error from the target | ||||
|                         # context function or by being cancelled here by | ||||
|                         # the surrounding cancel scope | ||||
|                         return (await portal.result(), ctx_res) | ||||
| 
 | ||||
|         cs, first = await self.service_n.start(open_context_in_task) | ||||
| 
 | ||||
|         # store the cancel scope and portal for later cancellation or | ||||
|         # retstart if needed. | ||||
|         self.service_tasks[name] = (cs, portal) | ||||
| 
 | ||||
|         return cs, first | ||||
| 
 | ||||
|     # TODO: per service cancellation by scope, we aren't using this | ||||
|     # anywhere right? | ||||
|     async def cancel_service( | ||||
|         self, | ||||
|         name: str, | ||||
|     ) -> Any: | ||||
|         log.info(f'Cancelling `pikerd` service {name}') | ||||
|         cs, portal = self.service_tasks[name] | ||||
|         # XXX: not entirely sure why this is required, | ||||
|         # and should probably be better fine tuned in | ||||
|         # ``tractor``? | ||||
|         cs.cancel() | ||||
|         return await portal.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| _services: Optional[Services] = None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_pikerd( | ||||
|     start_method: str = 'trio', | ||||
|     loglevel: Optional[str] = None, | ||||
| 
 | ||||
|     # XXX: you should pretty much never want debug mode | ||||
|     # for data daemons when running in production. | ||||
|     debug_mode: bool = False, | ||||
| 
 | ||||
| ) -> Optional[tractor._portal.Portal]: | ||||
|     ''' | ||||
|     Start a root piker daemon who's lifetime extends indefinitely | ||||
|     until cancelled. | ||||
| 
 | ||||
|     A root actor nursery is created which can be used to create and keep | ||||
|     alive underling services (see below). | ||||
| 
 | ||||
|     ''' | ||||
|     global _services | ||||
|     assert _services is None | ||||
| 
 | ||||
|     # XXX: this may open a root actor as well | ||||
|     async with ( | ||||
|         tractor.open_root_actor( | ||||
| 
 | ||||
|             # passed through to ``open_root_actor`` | ||||
|             arbiter_addr=_registry_addr, | ||||
|             name=_root_dname, | ||||
|             loglevel=loglevel, | ||||
|             debug_mode=debug_mode, | ||||
|             start_method=start_method, | ||||
| 
 | ||||
|             # TODO: eventually we should be able to avoid | ||||
|             # having the root have more then permissions to | ||||
|             # spawn other specialized daemons I think? | ||||
|             enable_modules=_root_modules, | ||||
|         ) as _, | ||||
| 
 | ||||
|         tractor.open_nursery() as actor_nursery, | ||||
|     ): | ||||
|         async with trio.open_nursery() as service_nursery: | ||||
| 
 | ||||
|             # # setup service mngr singleton instance | ||||
|             # async with AsyncExitStack() as stack: | ||||
| 
 | ||||
|             # assign globally for future daemon/task creation | ||||
|             _services = Services( | ||||
|                 actor_n=actor_nursery, | ||||
|                 service_n=service_nursery, | ||||
|                 debug_mode=debug_mode, | ||||
|             ) | ||||
| 
 | ||||
|             yield _services | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_piker_runtime( | ||||
|     name: str, | ||||
|     enable_modules: list[str] = [], | ||||
|     start_method: str = 'trio', | ||||
|     loglevel: Optional[str] = None, | ||||
| 
 | ||||
|     # XXX: you should pretty much never want debug mode | ||||
|     # for data daemons when running in production. | ||||
|     debug_mode: bool = False, | ||||
| 
 | ||||
| ) -> Optional[tractor._portal.Portal]: | ||||
|     ''' | ||||
|     Start a piker actor who's runtime will automatically | ||||
|     sync with existing piker actors in local network | ||||
|     based on configuration. | ||||
| 
 | ||||
|     ''' | ||||
|     global _services | ||||
|     assert _services is None | ||||
| 
 | ||||
|     # XXX: this may open a root actor as well | ||||
|     async with ( | ||||
|         tractor.open_root_actor( | ||||
| 
 | ||||
|             # passed through to ``open_root_actor`` | ||||
|             arbiter_addr=_registry_addr, | ||||
|             name=name, | ||||
|             loglevel=loglevel, | ||||
|             debug_mode=debug_mode, | ||||
|             start_method=start_method, | ||||
| 
 | ||||
|             # TODO: eventually we should be able to avoid | ||||
|             # having the root have more then permissions to | ||||
|             # spawn other specialized daemons I think? | ||||
|             enable_modules=_root_modules, | ||||
|         ) as _, | ||||
|     ): | ||||
|         yield tractor.current_actor() | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_runtime( | ||||
|     loglevel: Optional[str] = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     """ | ||||
|     Start the ``tractor`` runtime (a root actor) if none exists. | ||||
| 
 | ||||
|     """ | ||||
|     settings = _tractor_kwargs | ||||
|     settings.update(kwargs) | ||||
| 
 | ||||
|     if not tractor.current_actor(err_on_no_runtime=False): | ||||
|         async with tractor.open_root_actor( | ||||
|             loglevel=loglevel, | ||||
|             **settings, | ||||
|         ): | ||||
|             yield | ||||
|     else: | ||||
|         yield | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_pikerd( | ||||
|     loglevel: Optional[str] = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> Union[tractor._portal.Portal, Services]: | ||||
|     """If no ``pikerd`` daemon-root-actor can be found start it and | ||||
|     yield up (we should probably figure out returning a portal to self | ||||
|     though). | ||||
| 
 | ||||
|     """ | ||||
|     if loglevel: | ||||
|         get_console_log(loglevel) | ||||
| 
 | ||||
|     # subtle, we must have the runtime up here or portal lookup will fail | ||||
|     async with maybe_open_runtime(loglevel, **kwargs): | ||||
| 
 | ||||
|         async with tractor.find_actor(_root_dname) as portal: | ||||
|             # assert portal is not None | ||||
|             if portal is not None: | ||||
|                 yield portal | ||||
|                 return | ||||
| 
 | ||||
|     # presume pikerd role since no daemon could be found at | ||||
|     # configured address | ||||
|     async with open_pikerd( | ||||
| 
 | ||||
|         loglevel=loglevel, | ||||
|         debug_mode=kwargs.get('debug_mode', False), | ||||
| 
 | ||||
|     ) as _: | ||||
|         # in the case where we're starting up the | ||||
|         # tractor-piker runtime stack in **this** process | ||||
|         # we return no portal to self. | ||||
|         yield None | ||||
| 
 | ||||
| 
 | ||||
| # brokerd enabled modules | ||||
| _data_mods = [ | ||||
|     'piker.brokers.core', | ||||
|     'piker.brokers.data', | ||||
|     'piker.data', | ||||
|     'piker.data.feed', | ||||
|     'piker.data._sampling' | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| class Brokerd: | ||||
|     locks = defaultdict(trio.Lock) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def find_service( | ||||
|     service_name: str, | ||||
| ) -> Optional[tractor.Portal]: | ||||
| 
 | ||||
|     log.info(f'Scanning for service `{service_name}`') | ||||
|     # attach to existing daemon by name if possible | ||||
|     async with tractor.find_actor( | ||||
|         service_name, | ||||
|         arbiter_sockaddr=_registry_addr, | ||||
|     ) as maybe_portal: | ||||
|         yield maybe_portal | ||||
| 
 | ||||
| 
 | ||||
| async def check_for_service( | ||||
|     service_name: str, | ||||
| 
 | ||||
| ) -> bool: | ||||
|     ''' | ||||
|     Service daemon "liveness" predicate. | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.query_actor( | ||||
|         service_name, | ||||
|         arbiter_sockaddr=_registry_addr, | ||||
|     ) as sockaddr: | ||||
|         return sockaddr | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_spawn_daemon( | ||||
| 
 | ||||
|     service_name: str, | ||||
|     service_task_target: Callable, | ||||
|     spawn_args: dict[str, Any], | ||||
|     loglevel: Optional[str] = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> tractor.Portal: | ||||
|     ''' | ||||
|     If no ``service_name`` daemon-actor can be found, | ||||
|     spawn one in a local subactor and return a portal to it. | ||||
| 
 | ||||
|     If this function is called from a non-pikerd actor, the | ||||
|     spawned service will persist as long as pikerd does or | ||||
|     it is requested to be cancelled. | ||||
| 
 | ||||
|     This can be seen as a service starting api for remote-actor | ||||
|     clients. | ||||
| 
 | ||||
|     ''' | ||||
|     if loglevel: | ||||
|         get_console_log(loglevel) | ||||
| 
 | ||||
|     # serialize access to this section to avoid | ||||
|     # 2 or more tasks racing to create a daemon | ||||
|     lock = Brokerd.locks[service_name] | ||||
|     await lock.acquire() | ||||
| 
 | ||||
|     async with find_service(service_name) as portal: | ||||
|         if portal is not None: | ||||
|             lock.release() | ||||
|             yield portal | ||||
|             return | ||||
| 
 | ||||
|     log.warning(f"Couldn't find any existing {service_name}") | ||||
| 
 | ||||
|     # ask root ``pikerd`` daemon to spawn the daemon we need if | ||||
|     # pikerd is not live we now become the root of the | ||||
|     # process tree | ||||
|     async with maybe_open_pikerd( | ||||
| 
 | ||||
|         loglevel=loglevel, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) as pikerd_portal: | ||||
| 
 | ||||
|         if pikerd_portal is None: | ||||
|             # we are the root and thus are `pikerd` | ||||
|             # so spawn the target service directly by calling | ||||
|             # the provided target routine. | ||||
|             # XXX: this assumes that the target is well formed and will | ||||
|             # do the right things to setup both a sub-actor **and** call | ||||
|             # the ``_Services`` api from above to start the top level | ||||
|             # service task for that actor. | ||||
|             await service_task_target(**spawn_args) | ||||
| 
 | ||||
|         else: | ||||
|             # tell the remote `pikerd` to start the target, | ||||
|             # the target can't return a non-serializable value | ||||
|             # since it is expected that service startingn is | ||||
|             # non-blocking and the target task will persist running | ||||
|             # on `pikerd` after the client requesting it's start | ||||
|             # disconnects. | ||||
|             await pikerd_portal.run( | ||||
|                 service_task_target, | ||||
|                 **spawn_args, | ||||
|             ) | ||||
| 
 | ||||
|         async with tractor.wait_for_actor(service_name) as portal: | ||||
|             lock.release() | ||||
|             yield portal | ||||
|             await portal.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| async def spawn_brokerd( | ||||
| 
 | ||||
|     brokername: str, | ||||
|     loglevel: Optional[str] = None, | ||||
|     **tractor_kwargs, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     log.info(f'Spawning {brokername} broker daemon') | ||||
| 
 | ||||
|     brokermod = get_brokermod(brokername) | ||||
|     dname = f'brokerd.{brokername}' | ||||
| 
 | ||||
|     extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {}) | ||||
|     tractor_kwargs.update(extra_tractor_kwargs) | ||||
| 
 | ||||
|     global _services | ||||
|     assert _services | ||||
| 
 | ||||
|     # ask `pikerd` to spawn a new sub-actor and manage it under its | ||||
|     # actor nursery | ||||
|     modpath = brokermod.__name__ | ||||
|     broker_enable = [modpath] | ||||
|     for submodname in getattr( | ||||
|         brokermod, | ||||
|         '__enable_modules__', | ||||
|         [], | ||||
|     ): | ||||
|         subpath = f'{modpath}.{submodname}' | ||||
|         broker_enable.append(subpath) | ||||
| 
 | ||||
|     portal = await _services.actor_n.start_actor( | ||||
|         dname, | ||||
|         enable_modules=_data_mods + broker_enable, | ||||
|         loglevel=loglevel, | ||||
|         debug_mode=_services.debug_mode, | ||||
|         **tractor_kwargs | ||||
|     ) | ||||
| 
 | ||||
|     # non-blocking setup of brokerd service nursery | ||||
|     from .data import _setup_persistent_brokerd | ||||
| 
 | ||||
|     await _services.start_service_task( | ||||
|         dname, | ||||
|         portal, | ||||
|         _setup_persistent_brokerd, | ||||
|         brokername=brokername, | ||||
|     ) | ||||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_spawn_brokerd( | ||||
| 
 | ||||
|     brokername: str, | ||||
|     loglevel: Optional[str] = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> tractor.Portal: | ||||
|     ''' | ||||
|     Helper to spawn a brokerd service *from* a client | ||||
|     who wishes to use the sub-actor-daemon. | ||||
| 
 | ||||
|     ''' | ||||
|     async with maybe_spawn_daemon( | ||||
| 
 | ||||
|         f'brokerd.{brokername}', | ||||
|         service_task_target=spawn_brokerd, | ||||
|         spawn_args={'brokername': brokername, 'loglevel': loglevel}, | ||||
|         loglevel=loglevel, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) as portal: | ||||
|         yield portal | ||||
| 
 | ||||
| 
 | ||||
| async def spawn_emsd( | ||||
| 
 | ||||
|     loglevel: Optional[str] = None, | ||||
|     **extra_tractor_kwargs | ||||
| 
 | ||||
| ) -> bool: | ||||
|     """ | ||||
|     Start the clearing engine under ``pikerd``. | ||||
| 
 | ||||
|     """ | ||||
|     log.info('Spawning emsd') | ||||
| 
 | ||||
|     global _services | ||||
|     assert _services | ||||
| 
 | ||||
|     portal = await _services.actor_n.start_actor( | ||||
|         'emsd', | ||||
|         enable_modules=[ | ||||
|             'piker.clearing._ems', | ||||
|             'piker.clearing._client', | ||||
|         ], | ||||
|         loglevel=loglevel, | ||||
|         debug_mode=_services.debug_mode,  # set by pikerd flag | ||||
|         **extra_tractor_kwargs | ||||
|     ) | ||||
| 
 | ||||
|     # non-blocking setup of clearing service | ||||
|     from .clearing._ems import _setup_persistent_emsd | ||||
| 
 | ||||
|     await _services.start_service_task( | ||||
|         'emsd', | ||||
|         portal, | ||||
|         _setup_persistent_emsd, | ||||
|     ) | ||||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_emsd( | ||||
| 
 | ||||
|     brokername: str, | ||||
|     loglevel: Optional[str] = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> tractor._portal.Portal:  # noqa | ||||
| 
 | ||||
|     async with maybe_spawn_daemon( | ||||
| 
 | ||||
|         'emsd', | ||||
|         service_task_target=spawn_emsd, | ||||
|         spawn_args={'loglevel': loglevel}, | ||||
|         loglevel=loglevel, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) as portal: | ||||
|         yield portal | ||||
| 
 | ||||
| 
 | ||||
| # TODO: ideally we can start the tsdb "on demand" but it's | ||||
| # probably going to require "rootless" docker, at least if we don't | ||||
| # want to expect the user to start ``pikerd`` with root perms all the | ||||
| # time. | ||||
| # async def maybe_open_marketstored( | ||||
| #     loglevel: Optional[str] = None, | ||||
| #     **kwargs, | ||||
| 
 | ||||
| # ) -> tractor._portal.Portal:  # noqa | ||||
| 
 | ||||
| #     async with maybe_spawn_daemon( | ||||
| 
 | ||||
| #         'marketstored', | ||||
| #         service_task_target=spawn_emsd, | ||||
| #         spawn_args={'loglevel': loglevel}, | ||||
| #         loglevel=loglevel, | ||||
| #         **kwargs, | ||||
| 
 | ||||
| #     ) as portal: | ||||
| #         yield portal | ||||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -13,21 +13,34 @@ | |||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| """ | ||||
| Sub-sys module commons. | ||||
| 
 | ||||
| """ | ||||
| from functools import partial | ||||
| Profiling wrappers for internal libs. | ||||
| 
 | ||||
| from ..log import ( | ||||
|     get_logger, | ||||
|     get_console_log, | ||||
| ) | ||||
| subsys: str = 'piker.service' | ||||
| """ | ||||
| import time | ||||
| from functools import wraps | ||||
| 
 | ||||
| log = get_logger(subsys) | ||||
| # NOTE: you can pass a flag to enable this: | ||||
| # ``piker chart <args> --profile``. | ||||
| _pg_profile: bool = False | ||||
| ms_slower_then: float = 0 | ||||
| 
 | ||||
| get_console_log = partial( | ||||
|     get_console_log, | ||||
|     name=subsys, | ||||
| ) | ||||
| 
 | ||||
| def pg_profile_enabled() -> bool: | ||||
|     global _pg_profile | ||||
|     return _pg_profile | ||||
| 
 | ||||
| 
 | ||||
| def timeit(fn): | ||||
|     @wraps(fn) | ||||
|     def wrapper(*args, **kwargs): | ||||
|         t = time.time() | ||||
|         res = fn(*args, **kwargs) | ||||
|         print( | ||||
|             '%s.%s: %.4f sec' | ||||
|             % (fn.__module__, fn.__qualname__, time.time() - t) | ||||
|         ) | ||||
|         return res | ||||
| 
 | ||||
|     return wrapper | ||||
|  | @ -1,16 +0,0 @@ | |||
| .accounting | ||||
| ----------- | ||||
| A subsystem for transaction processing, storage and historical | ||||
| measurement. | ||||
| 
 | ||||
| 
 | ||||
| .pnl | ||||
| ---- | ||||
| BEP, the break even price: the price at which liquidating | ||||
| a remaining position results in a zero PnL since the position was | ||||
| "opened" in the destination asset. | ||||
| 
 | ||||
| PPU: price-per-unit: the "average cost" (in cumulative mean terms) | ||||
| of the "entry" transactions which "make a position larger"; taking | ||||
| a profit relative to this price means that you will "make more | ||||
| profit then made prior" since the position was opened. | ||||
|  | @ -1,107 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| "Accounting for degens": count dem numberz that tracks how much you got | ||||
| for tendiez. | ||||
| 
 | ||||
| ''' | ||||
| from ..log import get_logger | ||||
| 
 | ||||
| from .calc import ( | ||||
|     iter_by_dt, | ||||
| ) | ||||
| from ._ledger import ( | ||||
|     Transaction, | ||||
|     TransactionLedger, | ||||
|     open_trade_ledger, | ||||
| ) | ||||
| from ._pos import ( | ||||
|     Account, | ||||
|     load_account, | ||||
|     load_account_from_ledger, | ||||
|     open_pps, | ||||
|     open_account, | ||||
|     Position, | ||||
| ) | ||||
| from ._mktinfo import ( | ||||
|     Asset, | ||||
|     dec_digits, | ||||
|     digits_to_dec, | ||||
|     MktPair, | ||||
|     Symbol, | ||||
|     unpack_fqme, | ||||
|     _derivs as DerivTypes, | ||||
| ) | ||||
| from ._allocate import ( | ||||
|     mk_allocator, | ||||
|     Allocator, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'Account', | ||||
|     'Allocator', | ||||
|     'Asset', | ||||
|     'MktPair', | ||||
|     'Position', | ||||
|     'Symbol', | ||||
|     'Transaction', | ||||
|     'TransactionLedger', | ||||
|     'dec_digits', | ||||
|     'digits_to_dec', | ||||
|     'iter_by_dt', | ||||
|     'load_account', | ||||
|     'load_account_from_ledger', | ||||
|     'mk_allocator', | ||||
|     'open_account', | ||||
|     'open_pps', | ||||
|     'open_trade_ledger', | ||||
|     'unpack_fqme', | ||||
|     'DerivTypes', | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| def get_likely_pair( | ||||
|     src: str, | ||||
|     dst: str, | ||||
|     bs_mktid: str, | ||||
| 
 | ||||
| ) -> str | None: | ||||
|     ''' | ||||
|     Attempt to get the likely trading pair matching a given destination | ||||
|     asset `dst: str`. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         src_name_start: str = bs_mktid.rindex(src) | ||||
|     except ( | ||||
|         ValueError,   # substr not found | ||||
|     ): | ||||
|         # TODO: handle nested positions..(i.e. | ||||
|         # positions where the src fiat was used to | ||||
|         # buy some other dst which was furhter used | ||||
|         # to buy another dst..) | ||||
|         # log.warning( | ||||
|         #     f'No src fiat {src} found in {bs_mktid}?' | ||||
|         # ) | ||||
|         return None | ||||
| 
 | ||||
|     likely_dst: str = bs_mktid[:src_name_start] | ||||
|     if likely_dst == dst: | ||||
|         return bs_mktid | ||||
|  | @ -1,421 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Trade and transaction ledger processing. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from collections import UserDict | ||||
| from contextlib import contextmanager as cm | ||||
| from functools import partial | ||||
| from pathlib import Path | ||||
| from pprint import pformat | ||||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     Generator, | ||||
|     Literal, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from pendulum import ( | ||||
|     DateTime, | ||||
| ) | ||||
| import tomli_w  # for fast ledger writing | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| from piker import config | ||||
| from ..log import get_logger | ||||
| from .calc import ( | ||||
|     iter_by_dt, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ..data._symcache import ( | ||||
|         SymbologyCache, | ||||
|     ) | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| TxnType = Literal[ | ||||
|     'clear', | ||||
|     'transfer', | ||||
| 
 | ||||
|     # TODO: see https://github.com/pikers/piker/issues/510 | ||||
|     # 'split', | ||||
|     # 'rename', | ||||
|     # 'resize', | ||||
|     # 'removal', | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| class Transaction(Struct, frozen=True): | ||||
| 
 | ||||
|     # NOTE: this is a unified acronym also used in our `MktPair` | ||||
|     # and can stand for any of a | ||||
|     # "fully qualified <blank> endpoint": | ||||
|     # - "market" in the case of financial trades | ||||
|     #   (btcusdt.spot.binance). | ||||
|     # - "merkel (tree)" aka a blockchain system "wallet tranfers" | ||||
|     #   (btc.blockchain) | ||||
|     # - "money" for tradtitional (digital databases) | ||||
|     #   *bank accounts* (usd.swift, eur.sepa) | ||||
|     fqme: str | ||||
| 
 | ||||
|     tid: str | int  # unique transaction id | ||||
|     size: float | ||||
|     price: float | ||||
|     cost: float  # commisions or other additional costs | ||||
|     dt: DateTime | ||||
| 
 | ||||
|     # the "event type" in terms of "market events" see above and | ||||
|     # https://github.com/pikers/piker/issues/510 | ||||
|     etype: TxnType = 'clear' | ||||
| 
 | ||||
|     # TODO: we can drop this right since we | ||||
|     # can instead expect the backend to provide this | ||||
|     # via the `MktPair`? | ||||
|     expiry: DateTime | None = None | ||||
| 
 | ||||
|     # (optional) key-id defined by the broker-service backend which | ||||
|     # ensures the instrument-symbol market key for this record is unique | ||||
|     # in the "their backend/system" sense; i.e. this uid for the market | ||||
|     # as defined (internally) in some namespace defined by the broker | ||||
|     # service. | ||||
|     bs_mktid: str | int | None = None | ||||
| 
 | ||||
|     def to_dict( | ||||
|         self, | ||||
|         **kwargs, | ||||
|     ) -> dict: | ||||
|         dct: dict[str, Any] = super().to_dict(**kwargs) | ||||
| 
 | ||||
|         # ensure we use a pendulum formatted | ||||
|         # ISO style str here!@ | ||||
|         dct['dt'] = str(self.dt) | ||||
| 
 | ||||
|         return dct | ||||
| 
 | ||||
| 
 | ||||
| class TransactionLedger(UserDict): | ||||
|     ''' | ||||
|     Very simple ``dict`` wrapper + ``pathlib.Path`` handle to | ||||
|     a TOML formatted transaction file for enabling file writes | ||||
|     dynamically whilst still looking exactly like a ``dict`` from the | ||||
|     outside. | ||||
| 
 | ||||
|     ''' | ||||
|     # NOTE: see `open_trade_ledger()` for defaults, this should | ||||
|     # never be constructed manually! | ||||
|     def __init__( | ||||
|         self, | ||||
|         ledger_dict: dict, | ||||
|         file_path: Path, | ||||
|         account: str, | ||||
|         mod: ModuleType,  # broker mod | ||||
|         tx_sort: Callable, | ||||
|         symcache: SymbologyCache, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         self.account: str = account | ||||
|         self.file_path: Path = file_path | ||||
|         self.mod: ModuleType = mod | ||||
|         self.tx_sort: Callable = tx_sort | ||||
| 
 | ||||
|         self._symcache: SymbologyCache = symcache | ||||
| 
 | ||||
|         # any added txns we keep in that form for meta-data | ||||
|         # gathering purposes | ||||
|         self._txns: dict[str, Transaction] = {} | ||||
| 
 | ||||
|         super().__init__(ledger_dict) | ||||
| 
 | ||||
|     def __repr__(self) -> str: | ||||
|         return ( | ||||
|             f'TransactionLedger: {len(self)}\n' | ||||
|             f'{pformat(list(self.data))}' | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def symcache(self) -> SymbologyCache: | ||||
|         ''' | ||||
|         Read-only ref to backend's ``SymbologyCache``. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._symcache | ||||
| 
 | ||||
|     def update_from_t( | ||||
|         self, | ||||
|         t: Transaction, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Given an input `Transaction`, cast to `dict` and update | ||||
|         from it's transaction id. | ||||
| 
 | ||||
|         ''' | ||||
|         self.data[t.tid] = t.to_dict() | ||||
|         self._txns[t.tid] = t | ||||
| 
 | ||||
|     def iter_txns( | ||||
|         self, | ||||
|         symcache: SymbologyCache | None = None, | ||||
| 
 | ||||
|     ) -> Generator[ | ||||
|         Transaction, | ||||
|         None, | ||||
|         None, | ||||
|     ]: | ||||
|         ''' | ||||
|         Deliver trades records in ``(key: str, t: Transaction)`` | ||||
|         form via generator. | ||||
| 
 | ||||
|         ''' | ||||
|         symcache = symcache or self._symcache | ||||
| 
 | ||||
|         if self.account == 'paper': | ||||
|             from piker.clearing import _paper_engine | ||||
|             norm_trade: Callable = partial( | ||||
|                 _paper_engine.norm_trade, | ||||
|                 brokermod=self.mod, | ||||
|             ) | ||||
| 
 | ||||
|         else: | ||||
|             norm_trade: Callable = self.mod.norm_trade | ||||
| 
 | ||||
|         # datetime-sort and pack into txs | ||||
|         for tid, txdict in self.tx_sort(self.data.items()): | ||||
|             txn: Transaction = norm_trade( | ||||
|                 tid, | ||||
|                 txdict, | ||||
|                 pairs=symcache.pairs, | ||||
|                 symcache=symcache, | ||||
|             ) | ||||
|             yield txn | ||||
| 
 | ||||
|     def to_txns( | ||||
|         self, | ||||
|         symcache: SymbologyCache | None = None, | ||||
| 
 | ||||
|     ) -> dict[str, Transaction]: | ||||
|         ''' | ||||
|         Return entire output from ``.iter_txns()`` in a ``dict``. | ||||
| 
 | ||||
|         ''' | ||||
|         txns: dict[str, Transaction] = {} | ||||
|         for t in self.iter_txns(symcache=symcache): | ||||
| 
 | ||||
|             if not t: | ||||
|                 log.warning(f'{self.mod.name}:{self.account} TXN is -> {t}') | ||||
|                 continue | ||||
| 
 | ||||
|             txns[t.tid] = t | ||||
| 
 | ||||
|         return txns | ||||
| 
 | ||||
|     def write_config(self) -> None: | ||||
|         ''' | ||||
|         Render the self.data ledger dict to its TOML file form. | ||||
| 
 | ||||
|         ALWAYS order datetime sorted! | ||||
| 
 | ||||
|         ''' | ||||
|         is_paper: bool = self.account == 'paper' | ||||
| 
 | ||||
|         symcache: SymbologyCache = self._symcache | ||||
|         towrite: dict[str, Any] = {} | ||||
|         for tid, txdict in self.tx_sort(self.data.copy()): | ||||
|             # write blank-str expiry for non-expiring assets | ||||
|             if ( | ||||
|                 'expiry' in txdict | ||||
|                 and txdict['expiry'] is None | ||||
|             ): | ||||
|                 txdict['expiry'] = '' | ||||
| 
 | ||||
|             # (maybe) re-write old acro-key | ||||
|             if ( | ||||
|                 is_paper | ||||
|                 # if symcache is empty/not supported (yet), don't | ||||
|                 # bother xD | ||||
|                 and symcache.mktmaps | ||||
|             ): | ||||
|                 fqme: str = txdict.pop('fqsn', None) or txdict['fqme'] | ||||
|                 bs_mktid: str | None = txdict.get('bs_mktid') | ||||
| 
 | ||||
|                 if ( | ||||
| 
 | ||||
|                     fqme not in symcache.mktmaps | ||||
|                     or ( | ||||
|                         # also try to see if this is maybe a paper | ||||
|                         # engine ledger in which case the bs_mktid | ||||
|                         # should be the fqme as well! | ||||
|                         bs_mktid | ||||
|                         and fqme != bs_mktid | ||||
|                     ) | ||||
|                 ): | ||||
|                     # always take any (paper) bs_mktid if defined and | ||||
|                     # in the backend's cache key set. | ||||
|                     if bs_mktid in symcache.mktmaps: | ||||
|                         fqme: str = bs_mktid | ||||
|                     else: | ||||
|                         best_fqme: str = list(symcache.search(fqme))[0] | ||||
|                         log.warning( | ||||
|                             f'Could not find FQME: {fqme} in qualified set?\n' | ||||
|                             f'Qualifying and expanding {fqme} -> {best_fqme}' | ||||
|                         ) | ||||
|                         fqme = best_fqme | ||||
| 
 | ||||
|                 if ( | ||||
|                     bs_mktid | ||||
|                     and bs_mktid != fqme | ||||
|                 ): | ||||
|                     # in paper account case always make sure both the | ||||
|                     # fqme and bs_mktid are fully qualified.. | ||||
|                     txdict['bs_mktid'] = fqme | ||||
| 
 | ||||
|                 # in paper ledgers always write the latest | ||||
|                 # symbology key field: an FQME. | ||||
|                 txdict['fqme'] = fqme | ||||
| 
 | ||||
|             towrite[tid] = txdict | ||||
| 
 | ||||
|         with self.file_path.open(mode='wb') as fp: | ||||
|             tomli_w.dump(towrite, fp) | ||||
| 
 | ||||
| 
 | ||||
| def load_ledger( | ||||
|     brokername: str, | ||||
|     acctid: str, | ||||
| 
 | ||||
|     # for testing or manual load from file | ||||
|     dirpath: Path | None = None, | ||||
| 
 | ||||
| ) -> tuple[dict, Path]: | ||||
|     ''' | ||||
|     Load a ledger (TOML) file from user's config directory: | ||||
|     $CONFIG_DIR/accounting/ledgers/trades_<brokername>_<acctid>.toml | ||||
| 
 | ||||
|     Return its `dict`-content and file path. | ||||
| 
 | ||||
|     ''' | ||||
|     import time | ||||
|     try: | ||||
|         import tomllib | ||||
|     except ModuleNotFoundError: | ||||
|         import tomli as tomllib | ||||
| 
 | ||||
|     ldir: Path = ( | ||||
|         dirpath | ||||
|         or | ||||
|         config._config_dir / 'accounting' / 'ledgers' | ||||
|     ) | ||||
|     if not ldir.is_dir(): | ||||
|         ldir.mkdir() | ||||
| 
 | ||||
|     fname = f'trades_{brokername}_{acctid}.toml' | ||||
|     fpath: Path = ldir / fname | ||||
| 
 | ||||
|     if not fpath.is_file(): | ||||
|         log.info( | ||||
|             f'Creating new local trades ledger: {fpath}' | ||||
|         ) | ||||
|         fpath.touch() | ||||
| 
 | ||||
|     with fpath.open(mode='rb') as cf: | ||||
|         start = time.time() | ||||
|         ledger_dict = tomllib.load(cf) | ||||
|         log.debug(f'Ledger load took {time.time() - start}s') | ||||
| 
 | ||||
|     return ledger_dict, fpath | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def open_trade_ledger( | ||||
|     broker: str, | ||||
|     account: str, | ||||
| 
 | ||||
|     allow_from_sync_code: bool = False, | ||||
|     symcache: SymbologyCache | None = None, | ||||
| 
 | ||||
|     # default is to sort by detected datetime-ish field | ||||
|     tx_sort: Callable = iter_by_dt, | ||||
|     rewrite: bool = False, | ||||
| 
 | ||||
|     # for testing or manual load from file | ||||
|     _fp: Path | None = None, | ||||
| 
 | ||||
| ) -> Generator[TransactionLedger, None, None]: | ||||
|     ''' | ||||
|     Indempotently create and read in a trade log file from the | ||||
|     ``<configuration_dir>/ledgers/`` directory. | ||||
| 
 | ||||
|     Files are named per broker account of the form | ||||
|     ``<brokername>_<accountname>.toml``. The ``accountname`` here is the | ||||
|     name as defined in the user's ``brokers.toml`` config. | ||||
| 
 | ||||
|     ''' | ||||
|     from ..brokers import get_brokermod | ||||
|     mod: ModuleType = get_brokermod(broker) | ||||
| 
 | ||||
|     ledger_dict, fpath = load_ledger( | ||||
|         broker, | ||||
|         account, | ||||
|         dirpath=_fp, | ||||
|     ) | ||||
|     cpy = ledger_dict.copy() | ||||
| 
 | ||||
|     # XXX NOTE: if not provided presume we are being called from | ||||
|     # sync code and need to maybe run `trio` to generate.. | ||||
|     if symcache is None: | ||||
| 
 | ||||
|         # XXX: be mega pendantic and ensure the caller knows what | ||||
|         # they're doing! | ||||
|         if not allow_from_sync_code: | ||||
|             raise RuntimeError( | ||||
|                 'You MUST set `allow_from_sync_code=True` when ' | ||||
|                 'calling `open_trade_ledger()` from sync code! ' | ||||
|                 'If you are calling from async code you MUST ' | ||||
|                 'instead pass a `symcache: SymbologyCache`!' | ||||
|             ) | ||||
| 
 | ||||
|         from ..data._symcache import ( | ||||
|             get_symcache, | ||||
|         ) | ||||
|         symcache: SymbologyCache = get_symcache(broker) | ||||
| 
 | ||||
|     assert symcache | ||||
| 
 | ||||
|     ledger = TransactionLedger( | ||||
|         ledger_dict=cpy, | ||||
|         file_path=fpath, | ||||
|         account=account, | ||||
|         mod=mod, | ||||
|         symcache=symcache, | ||||
|         tx_sort=getattr(mod, 'tx_sort', tx_sort), | ||||
|     ) | ||||
|     try: | ||||
|         yield ledger | ||||
|     finally: | ||||
|         if ( | ||||
|             ledger.data != ledger_dict | ||||
|             or rewrite | ||||
|         ): | ||||
|             # TODO: show diff output? | ||||
|             # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries | ||||
|             log.info(f'Updating ledger for {fpath}:\n') | ||||
|             ledger.write_config() | ||||
|  | @ -1,766 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Market (pair) meta-info layer: sane addressing semantics and meta-data | ||||
| for cross-provider marketplaces. | ||||
| 
 | ||||
| We intoduce the concept of, | ||||
| 
 | ||||
| - a FQMA: fully qualified market address, | ||||
| - a sane schema for FQMAs including derivatives, | ||||
| - a msg-serializeable description of markets for | ||||
|   easy sharing with other pikers B) | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from decimal import ( | ||||
|     Decimal, | ||||
|     ROUND_HALF_EVEN, | ||||
| ) | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Literal, | ||||
| ) | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| 
 | ||||
| 
 | ||||
| # TODO: make these literals.. | ||||
| _underlyings: list[str] = [ | ||||
|     'stock', | ||||
|     'bond', | ||||
|     'crypto', | ||||
|     'fiat', | ||||
|     'commodity', | ||||
| ] | ||||
| 
 | ||||
| _crypto_derivs: list[str] = [ | ||||
|     'perpetual_future', | ||||
|     'crypto_future', | ||||
| ] | ||||
| 
 | ||||
| _derivs: list[str] = [ | ||||
|     'swap', | ||||
|     'future', | ||||
|     'continuous_future', | ||||
|     'option', | ||||
|     'futures_option', | ||||
| 
 | ||||
|     # if we can't figure it out, presume the worst XD | ||||
|     'unknown', | ||||
| ] | ||||
| 
 | ||||
| # NOTE: a tag for other subsystems to try | ||||
| # and do default settings for certain things: | ||||
| # - allocator does unit vs. dolla size limiting. | ||||
| AssetTypeName: Literal[ | ||||
|     _underlyings | ||||
|     + | ||||
|     _derivs | ||||
|     + | ||||
|     _crypto_derivs | ||||
| ] | ||||
| 
 | ||||
| # egs. stock, futer, option, bond etc. | ||||
| 
 | ||||
| 
 | ||||
| def dec_digits( | ||||
|     value: float | str | Decimal, | ||||
| 
 | ||||
| ) -> int: | ||||
|     ''' | ||||
|     Return the number of precision digits read from a decimal or float | ||||
|     value. | ||||
| 
 | ||||
|     ''' | ||||
|     if value == 0: | ||||
|         return 0 | ||||
| 
 | ||||
|     return int( | ||||
|         -Decimal(str(value)).as_tuple().exponent | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| float_digits = dec_digits | ||||
| 
 | ||||
| 
 | ||||
| def digits_to_dec( | ||||
|     ndigits: int, | ||||
| ) -> Decimal: | ||||
|     ''' | ||||
|     Return the minimum float value for an input integer value. | ||||
| 
 | ||||
|     eg. 3 -> 0.001 | ||||
| 
 | ||||
|     ''' | ||||
|     if ndigits == 0: | ||||
|         return Decimal('0') | ||||
| 
 | ||||
|     return Decimal('0.' + '0'*(ndigits-1) + '1') | ||||
| 
 | ||||
| 
 | ||||
| class Asset(Struct, frozen=True): | ||||
|     ''' | ||||
|     Container type describing any transactable asset and its | ||||
|     contract-like and/or underlying technology meta-info. | ||||
| 
 | ||||
|     ''' | ||||
|     name: str | ||||
|     atype: str  # AssetTypeName | ||||
| 
 | ||||
|     # minimum transaction size / precision. | ||||
|     # eg. for buttcoin this is a "satoshi". | ||||
|     tx_tick: Decimal | ||||
| 
 | ||||
|     # NOTE: additional info optionally packed in by the backend, but | ||||
|     # should not be explicitly required in our generic API. | ||||
|     info: dict | None = None | ||||
| 
 | ||||
|     # `None` is not toml-compat so drop info | ||||
|     # if no extra data added.. | ||||
|     def to_dict( | ||||
|         self, | ||||
|         **kwargs, | ||||
|     ) -> dict: | ||||
|         dct = super().to_dict(**kwargs) | ||||
|         if (info := dct.pop('info', None)): | ||||
|             dct['info'] = info | ||||
| 
 | ||||
|         assert dct['tx_tick'] | ||||
|         return dct | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_msg( | ||||
|         cls, | ||||
|         msg: dict[str, Any], | ||||
|     ) -> Asset: | ||||
|         return cls( | ||||
|             tx_tick=Decimal(str(msg.pop('tx_tick'))), | ||||
|             info=msg.pop('info', None), | ||||
|             **msg, | ||||
|         ) | ||||
| 
 | ||||
|     def __str__(self) -> str: | ||||
|         return self.name | ||||
| 
 | ||||
|     def quantize( | ||||
|         self, | ||||
|         size: float, | ||||
| 
 | ||||
|     ) -> Decimal: | ||||
|         ''' | ||||
|         Truncate input ``size: float`` using ``Decimal`` | ||||
|         quantized form of the digit precision defined | ||||
|         by ``self.lot_tick_size``. | ||||
| 
 | ||||
|         ''' | ||||
|         digits = float_digits(self.tx_tick) | ||||
|         return Decimal(size).quantize( | ||||
|             Decimal(f'1.{"0".ljust(digits, "0")}'), | ||||
|             rounding=ROUND_HALF_EVEN | ||||
|         ) | ||||
| 
 | ||||
|     @classmethod | ||||
|     def guess_from_mkt_ep_key( | ||||
|         cls, | ||||
|         mkt_ep_key: str, | ||||
|         atype: str | None = None, | ||||
| 
 | ||||
|     ) -> Asset: | ||||
|         ''' | ||||
|         A hacky guess method for presuming a (target) asset's properties | ||||
|         based on either the actualy market endpoint key, or config settings | ||||
|         from the user. | ||||
| 
 | ||||
|         ''' | ||||
|         atype = atype or 'unknown' | ||||
| 
 | ||||
|         # attempt to strip off any source asset | ||||
|         # via presumed syntax of: | ||||
|         # - <dst>/<src> | ||||
|         # - <dst>.<src> | ||||
|         # - etc. | ||||
|         for char in ['/', '.']: | ||||
|             dst, _, src = mkt_ep_key.partition(char) | ||||
|             if src: | ||||
|                 if not atype: | ||||
|                     atype = 'fiat' | ||||
|                 break | ||||
| 
 | ||||
|         return Asset( | ||||
|             name=dst, | ||||
|             atype=atype, | ||||
|             tx_tick=Decimal('0.01'), | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def maybe_cons_tokens( | ||||
|     tokens: list[Any], | ||||
|     delim_char: str = '.', | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Construct `str` output from a maybe-concatenation of input | ||||
|     sequence of elements in ``tokens``. | ||||
| 
 | ||||
|     ''' | ||||
|     return delim_char.join(filter(bool, tokens)).lower() | ||||
| 
 | ||||
| 
 | ||||
| class MktPair(Struct, frozen=True): | ||||
|     ''' | ||||
|     Market description for a pair of assets which are tradeable: | ||||
|     a market which enables transactions of the form, | ||||
|         buy: source asset -> destination asset | ||||
|         sell: destination asset -> source asset | ||||
| 
 | ||||
|     The main intention of this type is for a **simple** cross-asset | ||||
|     venue/broker normalized descrption type from which all | ||||
|     market-auctions can be mapped from FQME identifiers. | ||||
| 
 | ||||
|     TODO: our eventual target fqme format/schema is: | ||||
|     <dst>/<src>.<expiry>.<con_info_1>.<con_info_2>. -> .<venue>.<broker> | ||||
|           ^ -- optional tokens ------------------------------- ^ | ||||
| 
 | ||||
| 
 | ||||
|     Notes: | ||||
|     ------ | ||||
| 
 | ||||
|     Some venues provide a different semantic (which we frankly find | ||||
|     confusing and non-general) such as "base" and "quote" asset. | ||||
|     For example this is how `binance` defines the terms: | ||||
| 
 | ||||
|     https://binance-docs.github.io/apidocs/websocket_api/en/#public-api-definitions | ||||
|     https://binance-docs.github.io/apidocs/futures/en/#public-endpoints-info | ||||
| 
 | ||||
|     - *base* asset refers to the asset that is the *quantity* of a symbol. | ||||
|     - *quote* asset refers to the asset that is the *price* of a symbol. | ||||
| 
 | ||||
|     In other words the "quote" asset is the asset that the market | ||||
|     is pricing "buys" *in*, and the *base* asset it the one that the market | ||||
|     allows you to "buy" an *amount of*. Put more simply the *quote* | ||||
|     asset is our "source" asset and the *base* asset is our "destination" | ||||
|     asset. | ||||
| 
 | ||||
|     This defintion can be further understood reading our | ||||
|     `.brokers.binance.api.Pair` type wherein the | ||||
|     `Pair.[quote/base]AssetPrecision` field determines the (transfer) | ||||
|     transaction precision available per asset; i.e. the satoshis | ||||
|     unit in bitcoin for representing the minimum size of a | ||||
|     transaction that can take place on the blockchain. | ||||
| 
 | ||||
|     ''' | ||||
|     dst: str | Asset | ||||
|     # "destination asset" (name) used to buy *to* | ||||
|     # (or used to sell *from*) | ||||
| 
 | ||||
|     price_tick: Decimal  # minimum price increment | ||||
|     size_tick: Decimal  # minimum size (aka vlm) increment | ||||
|     # the tick size is the number describing the smallest step in value | ||||
|     # available in this market between the source and destination | ||||
|     # assets. | ||||
|     # https://en.wikipedia.org/wiki/Tick_size | ||||
|     # https://en.wikipedia.org/wiki/Commodity_tick | ||||
|     # https://en.wikipedia.org/wiki/Percentage_in_point | ||||
| 
 | ||||
|     # unique "broker id" since every market endpoint provider | ||||
|     # has their own nomenclature and schema for market maps. | ||||
|     bs_mktid: str | ||||
|     broker: str  # the middle man giving access | ||||
| 
 | ||||
|     # NOTE: to start this field is optional but should eventually be | ||||
|     # required; the reason is for backward compat since more positioning | ||||
|     # calculations were not originally stored with a src asset.. | ||||
| 
 | ||||
|     src: str | Asset = '' | ||||
|     # "source asset" (name) used to buy *from* | ||||
|     # (or used to sell *to*). | ||||
| 
 | ||||
|     venue: str = ''  # market venue provider name | ||||
|     expiry: str = ''  # for derivs, expiry datetime parseable str | ||||
| 
 | ||||
|     # destination asset's financial type/classification name | ||||
|     # NOTE: this is required for the order size allocator system, | ||||
|     # since we use different default settings based on the type | ||||
|     # of the destination asset, eg. futes use a units limits vs. | ||||
|     # equities a $limit. | ||||
|     # dst_type: AssetTypeName | None = None | ||||
| 
 | ||||
|     # source asset's financial type/classification name | ||||
|     # TODO: is a src type required for trading? | ||||
|     # there's no reason to need any more then the one-way alloc-limiter | ||||
|     # config right? | ||||
|     # src_type: AssetTypeName | ||||
| 
 | ||||
|     # for derivs, info describing contract, egs. | ||||
|     # strike price, call or put, swap type, exercise model, etc. | ||||
|     contract_info: list[str] | None = None | ||||
| 
 | ||||
|     # TODO: rename to sectype since all of these can | ||||
|     # be considered "securities"? | ||||
|     _atype: str = '' | ||||
| 
 | ||||
|     # allow explicit disable of the src part of the market | ||||
|     # pair name -> useful for legacy markets like qqq.nasdaq.ib | ||||
|     _fqme_without_src: bool = False | ||||
| 
 | ||||
|     # NOTE: when cast to `str` return fqme | ||||
|     def __str__(self) -> str: | ||||
|         return self.fqme | ||||
| 
 | ||||
|     def to_dict( | ||||
|         self, | ||||
|         **kwargs, | ||||
|     ) -> dict: | ||||
|         d = super().to_dict(**kwargs) | ||||
|         d['src'] = self.src.to_dict(**kwargs) | ||||
| 
 | ||||
|         if not isinstance(self.dst, str): | ||||
|             d['dst'] = self.dst.to_dict(**kwargs) | ||||
|         else: | ||||
|             d['dst'] = str(self.dst) | ||||
| 
 | ||||
|         d['price_tick'] = str(self.price_tick) | ||||
|         d['size_tick'] = str(self.size_tick) | ||||
| 
 | ||||
|         if self.contract_info is None: | ||||
|             d.pop('contract_info') | ||||
| 
 | ||||
|         # d.pop('_fqme_without_src') | ||||
| 
 | ||||
|         return d | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_msg( | ||||
|         cls, | ||||
|         msg: dict[str, Any], | ||||
| 
 | ||||
|     ) -> MktPair: | ||||
|         ''' | ||||
|         Constructor for a received msg-dict normally received over IPC. | ||||
| 
 | ||||
|         ''' | ||||
|         if not isinstance( | ||||
|             dst_asset_msg := msg.pop('dst'), | ||||
|             str, | ||||
|         ): | ||||
|             dst: Asset = Asset.from_msg(dst_asset_msg)  # .copy() | ||||
|         else: | ||||
|             dst: str = dst_asset_msg | ||||
| 
 | ||||
|         src_asset_msg: dict = msg.pop('src') | ||||
|         src: Asset = Asset.from_msg(src_asset_msg)  # .copy() | ||||
| 
 | ||||
|         # XXX NOTE: ``msgspec`` can encode `Decimal` but it doesn't | ||||
|         # decide to it by default since we aren't spec-cing these | ||||
|         # msgs as structs proper to get them to decode implictily | ||||
|         # (yet) as per, | ||||
|         # - https://github.com/pikers/piker/pull/354 | ||||
|         # - https://github.com/goodboy/tractor/pull/311 | ||||
|         # SO we have to ensure we do a struct type | ||||
|         # case (which `.copy()` does) to ensure we get the right | ||||
|         # type! | ||||
|         return cls( | ||||
|             dst=dst, | ||||
|             src=src, | ||||
|             price_tick=Decimal(msg.pop('price_tick')), | ||||
|             size_tick=Decimal(msg.pop('size_tick')), | ||||
|             **msg, | ||||
|         ).copy() | ||||
| 
 | ||||
|     @property | ||||
|     def resolved(self) -> bool: | ||||
|         return isinstance(self.dst, Asset) | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_fqme( | ||||
|         cls, | ||||
|         fqme: str, | ||||
| 
 | ||||
|         price_tick: float | str, | ||||
|         size_tick: float | str, | ||||
|         bs_mktid: str, | ||||
| 
 | ||||
|         broker: str | None = None, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> MktPair: | ||||
| 
 | ||||
|         _fqme: str = fqme | ||||
|         if ( | ||||
|             broker | ||||
|             and broker not in fqme | ||||
|         ): | ||||
|             _fqme = f'{fqme}.{broker}' | ||||
| 
 | ||||
|         broker, mkt_ep_key, venue, expiry = unpack_fqme(_fqme) | ||||
| 
 | ||||
|         kven: str = kwargs.pop('venue', venue) | ||||
|         if venue: | ||||
|             assert venue == kven | ||||
|         else: | ||||
|             venue = kven | ||||
| 
 | ||||
|         exp: str = kwargs.pop('expiry', expiry) | ||||
|         if expiry: | ||||
|             assert exp == expiry | ||||
|         else: | ||||
|             expiry = exp | ||||
| 
 | ||||
|         dst: Asset = Asset.guess_from_mkt_ep_key( | ||||
|             mkt_ep_key, | ||||
|             atype=kwargs.get('_atype'), | ||||
|         ) | ||||
| 
 | ||||
|         # XXX: loading from a fqme string will | ||||
|         # leave this pair as "un resolved" meaning | ||||
|         # we don't yet have `.dst` set as an `Asset` | ||||
|         # which we expect to be filled in by some | ||||
|         # backend client with access to that data-info. | ||||
|         return cls( | ||||
|             dst=dst, | ||||
|             # XXX: not resolved to ``Asset`` :( | ||||
|             #src=src, | ||||
| 
 | ||||
|             broker=broker, | ||||
|             venue=venue, | ||||
|             # XXX NOTE: we presume this token | ||||
|             # if the expiry for now! | ||||
|             expiry=expiry, | ||||
| 
 | ||||
|             price_tick=price_tick, | ||||
|             size_tick=size_tick, | ||||
|             bs_mktid=bs_mktid, | ||||
| 
 | ||||
|             **kwargs, | ||||
| 
 | ||||
|         ).copy() | ||||
| 
 | ||||
|     @property | ||||
|     def key(self) -> str: | ||||
|         ''' | ||||
|         The "endpoint key" for this market. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.pair | ||||
| 
 | ||||
|     def pair( | ||||
|         self, | ||||
|         delim_char: str | None = None, | ||||
|     ) -> str: | ||||
|         ''' | ||||
|         The "endpoint asset pair key" for this market. | ||||
|         Eg. mnq/usd or btc/usdt or xmr/btc | ||||
| 
 | ||||
|         In most other tina platforms this is referred to as the | ||||
|         "symbol". | ||||
| 
 | ||||
|         ''' | ||||
|         return maybe_cons_tokens( | ||||
|             [str(self.dst), | ||||
|              str(self.src)], | ||||
|             # TODO: make the default '/' | ||||
|             delim_char=delim_char or '', | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def suffix(self) -> str: | ||||
|         ''' | ||||
|         The "contract suffix" for this market. | ||||
| 
 | ||||
|         Eg. mnq/usd.20230616.cme.ib | ||||
|                     ^ ----- ^ | ||||
|         or tsla/usd.20230324.200c.cboe.ib | ||||
|                     ^ ---------- ^ | ||||
| 
 | ||||
|         In most other tina platforms they only show you these details in | ||||
|         some kinda "meta data" format, we have FQMEs so we do this up | ||||
|         front and explicit. | ||||
| 
 | ||||
|         ''' | ||||
|         field_strs = [self.expiry] | ||||
|         con_info = self.contract_info | ||||
|         if con_info is not None: | ||||
|             field_strs.extend(con_info) | ||||
| 
 | ||||
|         return maybe_cons_tokens(field_strs) | ||||
| 
 | ||||
|     def get_fqme( | ||||
|         self, | ||||
| 
 | ||||
|         # NOTE: allow dropping the source asset from the | ||||
|         # market endpoint's pair key. Eg. to change | ||||
|         # mnq/usd.<> -> mnq.<> which is useful when | ||||
|         # searching (legacy) stock exchanges. | ||||
|         without_src: bool = False, | ||||
|         delim_char: str | None = None, | ||||
| 
 | ||||
|     ) -> str: | ||||
|         ''' | ||||
|         Return the fully qualified market endpoint-address for the | ||||
|         pair of transacting assets. | ||||
| 
 | ||||
|         fqme = "fully qualified market endpoint" | ||||
| 
 | ||||
|         And yes, you pronounce it colloquially as read.. | ||||
| 
 | ||||
|         Basically the idea here is for all client code (consumers of piker's | ||||
|         APIs which query the data/broker-provider agnostic layer(s)) should be | ||||
|         able to tell which backend / venue / derivative each data feed/flow is | ||||
|         from by an explicit string-key of the current form: | ||||
| 
 | ||||
|         <market-instrument-name> | ||||
|             .<venue> | ||||
|             .<expiry> | ||||
|             .<derivative-suffix-info> | ||||
|             .<brokerbackendname> | ||||
| 
 | ||||
|         eg. for an explicit daq mini futes contract: mnq.cme.20230317.ib | ||||
| 
 | ||||
|         TODO: I have thoughts that we should actually change this to be | ||||
|         more like an "attr lookup" (like how the web should have done | ||||
|         urls, but marketting peeps ruined it etc. etc.) | ||||
| 
 | ||||
|         <broker>.<venue>.<instrumentname>.<suffixwithmetadata> | ||||
| 
 | ||||
|         TODO: | ||||
|         See community discussion on naming and nomenclature, order | ||||
|         of addressing hierarchy, general schema, internal representation: | ||||
| 
 | ||||
|         https://github.com/pikers/piker/issues/467 | ||||
| 
 | ||||
|         ''' | ||||
|         key: str = ( | ||||
|             self.pair(delim_char=delim_char) | ||||
|             if not (without_src or self._fqme_without_src) | ||||
|             else str(self.dst) | ||||
|         ) | ||||
| 
 | ||||
|         return maybe_cons_tokens([ | ||||
|             key,  # final "pair name" (eg. qqq[/usd], btcusdt) | ||||
|             self.venue, | ||||
|             self.suffix,  # includes expiry and other con info | ||||
|             self.broker, | ||||
|         ]) | ||||
| 
 | ||||
|     # NOTE: the main idea behind an fqme is to map a "market address" | ||||
|     # to some endpoint from a transaction provider (eg. a broker) such | ||||
|     # that we build a table of `fqme: str -> bs_mktid: Any` where any "piker | ||||
|     # market address" maps 1-to-1 to some broker trading endpoint. | ||||
|     # @cached_property | ||||
|     fqme = property(get_fqme) | ||||
| 
 | ||||
|     def get_bs_fqme( | ||||
|         self, | ||||
|         **kwargs, | ||||
|     ) -> str: | ||||
|         ''' | ||||
|         FQME sin broker part XD | ||||
| 
 | ||||
|         ''' | ||||
|         sin_broker, *_ = self.get_fqme(**kwargs).rpartition('.') | ||||
|         return sin_broker | ||||
| 
 | ||||
|     bs_fqme = property(get_bs_fqme) | ||||
| 
 | ||||
|     @property | ||||
|     def fqsn(self) -> str: | ||||
|         return self.fqme | ||||
| 
 | ||||
|     def quantize( | ||||
|         self, | ||||
|         size: float, | ||||
| 
 | ||||
|         quantity_type: Literal['price', 'size'] = 'size', | ||||
| 
 | ||||
|     ) -> Decimal: | ||||
|         ''' | ||||
|         Truncate input ``size: float`` using ``Decimal`` | ||||
|         and ``.size_tick``'s # of digits. | ||||
| 
 | ||||
|         ''' | ||||
|         match quantity_type: | ||||
|             case 'price': | ||||
|                 digits = float_digits(self.price_tick) | ||||
|             case 'size': | ||||
|                 digits = float_digits(self.size_tick) | ||||
| 
 | ||||
|         return Decimal(size).quantize( | ||||
|             Decimal(f'1.{"0".ljust(digits, "0")}'), | ||||
|             rounding=ROUND_HALF_EVEN | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: BACKWARD COMPAT, TO REMOVE? | ||||
|     @property | ||||
|     def type_key(self) -> str: | ||||
| 
 | ||||
|         # if set explicitly then use it! | ||||
|         if self._atype: | ||||
|             return self._atype | ||||
| 
 | ||||
|         if isinstance(self.dst, Asset): | ||||
|             return str(self.dst.atype) | ||||
| 
 | ||||
|         return 'UNKNOWN' | ||||
| 
 | ||||
|     @property | ||||
|     def price_tick_digits(self) -> int: | ||||
|         return float_digits(self.price_tick) | ||||
| 
 | ||||
|     @property | ||||
|     def size_tick_digits(self) -> int: | ||||
|         return float_digits(self.size_tick) | ||||
| 
 | ||||
| 
 | ||||
| def unpack_fqme( | ||||
|     fqme: str, | ||||
| 
 | ||||
|     broker: str | None = None | ||||
| 
 | ||||
| ) -> tuple[str, ...]: | ||||
|     ''' | ||||
|     Unpack a fully-qualified-symbol-name to ``tuple``. | ||||
| 
 | ||||
|     ''' | ||||
|     venue = '' | ||||
|     suffix = '' | ||||
| 
 | ||||
|     # TODO: probably reverse the order of all this XD | ||||
|     tokens = fqme.split('.') | ||||
| 
 | ||||
|     match tokens: | ||||
|         case [mkt_ep, broker]: | ||||
|             # probably crypto | ||||
|             return ( | ||||
|                 broker, | ||||
|                 mkt_ep, | ||||
|                 '', | ||||
|                 '', | ||||
|             ) | ||||
| 
 | ||||
|         # TODO: swap venue and suffix/deriv-info here? | ||||
|         case [mkt_ep, venue, suffix, broker]: | ||||
|             pass | ||||
| 
 | ||||
|         # handle `bs_mktid` + `broker` input case | ||||
|         case [ | ||||
|             mkt_ep, venue, suffix | ||||
|         ] if ( | ||||
|             broker | ||||
|             and suffix != broker | ||||
|         ): | ||||
|             pass | ||||
| 
 | ||||
|         case [mkt_ep, venue, broker]: | ||||
|             suffix = '' | ||||
| 
 | ||||
|         case _: | ||||
|             raise ValueError(f'Invalid fqme: {fqme}') | ||||
| 
 | ||||
|     return ( | ||||
|         broker, | ||||
|         mkt_ep, | ||||
|         venue, | ||||
|         # '.'.join([mkt_ep, venue]), | ||||
|         suffix, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| class Symbol(Struct): | ||||
|     ''' | ||||
|     I guess this is some kinda container thing for dealing with | ||||
|     all the different meta-data formats from brokers? | ||||
| 
 | ||||
|     ''' | ||||
|     key: str | ||||
| 
 | ||||
|     broker: str = '' | ||||
|     venue: str = '' | ||||
| 
 | ||||
|     # precision descriptors for price and vlm | ||||
|     tick_size: Decimal = Decimal('0.01') | ||||
|     lot_tick_size: Decimal = Decimal('0.0') | ||||
| 
 | ||||
|     suffix: str = '' | ||||
|     broker_info: dict[str, dict[str, Any]] = {} | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_fqme( | ||||
|         cls, | ||||
|         fqsn: str, | ||||
|         info: dict[str, Any], | ||||
| 
 | ||||
|     ) -> Symbol: | ||||
|         broker, mktep, venue, suffix = unpack_fqme(fqsn) | ||||
|         tick_size = info.get('price_tick_size', 0.01) | ||||
|         lot_size = info.get('lot_tick_size', 0.0) | ||||
| 
 | ||||
|         return Symbol( | ||||
|             broker=broker, | ||||
|             key=mktep, | ||||
|             tick_size=tick_size, | ||||
|             lot_tick_size=lot_size, | ||||
|             venue=venue, | ||||
|             suffix=suffix, | ||||
|             broker_info={broker: info}, | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def type_key(self) -> str: | ||||
|         return list(self.broker_info.values())[0]['asset_type'] | ||||
| 
 | ||||
|     @property | ||||
|     def tick_size_digits(self) -> int: | ||||
|         return float_digits(self.tick_size) | ||||
| 
 | ||||
|     @property | ||||
|     def lot_size_digits(self) -> int: | ||||
|         return float_digits(self.lot_tick_size) | ||||
| 
 | ||||
|     @property | ||||
|     def price_tick(self) -> Decimal: | ||||
|         return Decimal(str(self.tick_size)) | ||||
| 
 | ||||
|     @property | ||||
|     def size_tick(self) -> Decimal: | ||||
|         return Decimal(str(self.lot_tick_size)) | ||||
| 
 | ||||
|     @property | ||||
|     def broker(self) -> str: | ||||
|         return list(self.broker_info.keys())[0] | ||||
| 
 | ||||
|     @property | ||||
|     def fqme(self) -> str: | ||||
|         return maybe_cons_tokens([ | ||||
|             self.key,  # final "pair name" (eg. qqq[/usd], btcusdt) | ||||
|             self.venue, | ||||
|             self.suffix,  # includes expiry and other con info | ||||
|             self.broker, | ||||
|         ]) | ||||
| 
 | ||||
|     def quantize( | ||||
|         self, | ||||
|         size: float, | ||||
|     ) -> Decimal: | ||||
|         digits = float_digits(self.lot_tick_size) | ||||
|         return Decimal(size).quantize( | ||||
|             Decimal(f'1.{"0".ljust(digits, "0")}'), | ||||
|             rounding=ROUND_HALF_EVEN | ||||
|         ) | ||||
| 
 | ||||
|     # NOTE: when cast to `str` return fqme | ||||
|     def __str__(self) -> str: | ||||
|         return self.fqme | ||||
|  | @ -1,983 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Personal/Private position parsing, calculating, summarizing in a way | ||||
| that doesn't try to cuk most humans who prefer to not lose their moneys.. | ||||
| 
 | ||||
| (looking at you `ib` and dirt-bird friends) | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import contextmanager as cm | ||||
| from decimal import Decimal | ||||
| from pprint import pformat | ||||
| from pathlib import Path | ||||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Iterator, | ||||
|     Generator | ||||
| ) | ||||
| 
 | ||||
| import pendulum | ||||
| from pendulum import ( | ||||
|     datetime, | ||||
|     now, | ||||
| ) | ||||
| import polars as pl | ||||
| import tomlkit | ||||
| 
 | ||||
| from ._ledger import ( | ||||
|     Transaction, | ||||
|     TransactionLedger, | ||||
| ) | ||||
| from ._mktinfo import ( | ||||
|     MktPair, | ||||
|     Asset, | ||||
|     unpack_fqme, | ||||
| ) | ||||
| from .calc import ( | ||||
|     ppu, | ||||
|     # iter_by_dt, | ||||
| ) | ||||
| from .. import config | ||||
| from ..clearing._messages import ( | ||||
|     BrokerdPosition, | ||||
| ) | ||||
| from piker.types import Struct | ||||
| from piker.data._symcache import SymbologyCache | ||||
| from ..log import get_logger | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class Position(Struct): | ||||
|     ''' | ||||
|     An asset "position" model with attached clearing transaction history. | ||||
| 
 | ||||
|     A financial "position" in `piker` terms is a summary of accounting | ||||
|     metrics computed from a transaction ledger; generally it describes | ||||
|     some accumulative "size" and "average price" from the summarized | ||||
|     underlying transaction set. | ||||
| 
 | ||||
|     In piker we focus on the `.ppu` (price per unit) and the `.bep` | ||||
|     (break even price) including all transaction entries and exits since | ||||
|     the last "net-zero" size of the destination asset's holding. | ||||
| 
 | ||||
|     This interface serves as an object API for computing and | ||||
|     tracking positions as well as supports serialization for | ||||
|     storage in the local file system (in TOML) and to interchange | ||||
|     as a msg over IPC. | ||||
| 
 | ||||
|     ''' | ||||
|     mkt: MktPair | ||||
| 
 | ||||
|     # can be +ve or -ve for long/short | ||||
|     # size: float | ||||
| 
 | ||||
|     # "price-per-unit price" above or below which pnl moves above and | ||||
|     # below zero for the entirety of the current "trade state". The ppu | ||||
|     # is only modified on "increases of" the absolute size of a position | ||||
|     # in one of a long/short "direction" (i.e. abs(.size_i) > 0 after | ||||
|     # the next transaction given .size was > 0 before that tx, and vice | ||||
|     # versa for -ve sized positions). | ||||
|     # ppu: float | ||||
| 
 | ||||
|     # TODO: break-even-price support! | ||||
|     # bep: float | ||||
| 
 | ||||
|     # unique "backend system market id" | ||||
|     bs_mktid: str | ||||
| 
 | ||||
|     split_ratio: int | None = None | ||||
| 
 | ||||
|     # TODO: use a `pl.DataFrame` intead? | ||||
|     _events: dict[str, Transaction | dict] = {} | ||||
| 
 | ||||
|     @property | ||||
|     def expiry(self) -> datetime | None: | ||||
|         ''' | ||||
|         Security expiry if it has a limited lifetime. | ||||
| 
 | ||||
|         For non-derivative markets this is normally `None`. | ||||
| 
 | ||||
|         ''' | ||||
|         exp: str | None = self.mkt.expiry | ||||
|         if exp is None: | ||||
|             return None | ||||
| 
 | ||||
|         match exp.lower(): | ||||
|             # empty str, 'perp' (contract) or simply a null | ||||
|             # signifies instrument with NO expiry. | ||||
|             case 'perp' | '' | None: | ||||
|                 return None | ||||
| 
 | ||||
|             case str(): | ||||
|                 return pendulum.parse(exp) | ||||
| 
 | ||||
|             case _: | ||||
|                 raise ValueError( | ||||
|                     f'Unhandled `MktPair.expiry`: `{exp}`' | ||||
|                 ) | ||||
| 
 | ||||
|     # TODO: idea: "real LIFO" dynamic positioning. | ||||
|     # - when a trade takes place where the pnl for | ||||
|     # the (set of) trade(s) is below the breakeven price | ||||
|     # it may be that the trader took a +ve pnl on a short(er) | ||||
|     # term trade in the same account. | ||||
|     # - in this case we could recalc the be price to | ||||
|     # be reverted back to it's prior value before the nearest term | ||||
|     # trade was opened.? | ||||
|     # def bep() -> float: | ||||
|     #     ... | ||||
|     def clears_df(self) -> pl.DataFrame: | ||||
|         ... | ||||
| 
 | ||||
|     def clearsitems(self) -> list[(str, dict)]: | ||||
|         return ppu( | ||||
|             self.iter_by_type('clear'), | ||||
|             as_ledger=True | ||||
|         ) | ||||
| 
 | ||||
|     def iter_by_type( | ||||
|         self, | ||||
|         etype: str, | ||||
| 
 | ||||
|     ) -> Iterator[dict | Transaction]: | ||||
|         ''' | ||||
|         Iterate the internally managed ``._events: dict`` table in | ||||
|         datetime-stamped order. | ||||
| 
 | ||||
|         ''' | ||||
|         # sort on the expected datetime field | ||||
|         # for event in iter_by_dt( | ||||
|         for event in sorted( | ||||
|             self._events.values(), | ||||
|             key=lambda entry: entry.dt | ||||
|         ): | ||||
|             # if event.etype == etype: | ||||
|             match event: | ||||
|                 case ( | ||||
|                     {'etype': _etype} | | ||||
|                     Transaction(etype=str(_etype)) | ||||
|                 ): | ||||
|                     assert _etype == etype | ||||
|                     yield event | ||||
| 
 | ||||
| 
 | ||||
|     def minimized_clears(self) -> dict[str, dict]: | ||||
|         ''' | ||||
|         Minimize the position's clears entries by removing | ||||
|         all transactions before the last net zero size except for when | ||||
|         a clear event causes a position "side" change (i.e. long to short | ||||
|         after a single fill) wherein we store the transaction prior to the | ||||
|         net-zero pass. | ||||
| 
 | ||||
|         This avoids unnecessary history irrelevant to the current | ||||
|         non-net-zero size state when serializing for offline storage. | ||||
| 
 | ||||
|         ''' | ||||
|         # scan for the last "net zero" position by iterating | ||||
|         # transactions until the next net-zero cumsize, rinse, | ||||
|         # repeat. | ||||
|         cumsize: float = 0 | ||||
|         clears_since_zero: list[dict] = [] | ||||
| 
 | ||||
|         for tid, cleardict in self.clearsitems(): | ||||
|             cumsize = float( | ||||
|                 # self.mkt.quantize(cumsize + cleardict['tx'].size | ||||
|                 self.mkt.quantize(cleardict['cumsize']) | ||||
|             ) | ||||
|             clears_since_zero.append(cleardict) | ||||
| 
 | ||||
|             # NOTE: always pop sign change since we just use it to | ||||
|             # determine which entry to clear "up to". | ||||
|             sign_change: bool = cleardict.pop('sign_change') | ||||
|             if cumsize == 0: | ||||
|                 clears_since_zero = clears_since_zero[:-2] | ||||
|                 # clears_since_zero.clear() | ||||
| 
 | ||||
|             elif sign_change: | ||||
|                 clears_since_zero = clears_since_zero[:-1] | ||||
| 
 | ||||
|         return clears_since_zero | ||||
| 
 | ||||
|     def to_pretoml(self) -> tuple[str, dict]: | ||||
|         ''' | ||||
|         Prep this position's data contents for export as an entry | ||||
|         in a TOML "account file" (such as | ||||
|         `account.binance.paper.toml`) including re-structuring of | ||||
|         the ``._events`` entries as an array of inline-subtables | ||||
|         for better ``pps.toml`` compactness. | ||||
| 
 | ||||
|         ''' | ||||
|         mkt: MktPair = self.mkt | ||||
|         assert isinstance(mkt, MktPair) | ||||
| 
 | ||||
|         # TODO: we need to figure out how to have one top level | ||||
|         # listing venue here even when the backend isn't providing | ||||
|         # it via the trades ledger.. | ||||
|         # drop symbol obj in serialized form | ||||
|         fqme: str = mkt.fqme | ||||
|         broker, mktep, venue, suffix = unpack_fqme(fqme) | ||||
| 
 | ||||
|         # an asset resolved mkt where we have ``Asset`` info about | ||||
|         # each tradeable asset in the market. | ||||
|         asset_type: str = 'n/a' | ||||
|         if mkt.resolved: | ||||
|             dst: Asset = mkt.dst | ||||
|             asset_type = dst.atype | ||||
| 
 | ||||
|         asdict: dict[str, Any] = { | ||||
|             'bs_mktid': self.bs_mktid, | ||||
|             # 'expiry': self.expiry or '', | ||||
|             'asset_type': asset_type, | ||||
|             'price_tick': mkt.price_tick, | ||||
|             'size_tick': mkt.size_tick, | ||||
|         } | ||||
|         if exp := self.expiry: | ||||
|             asdict['expiry'] = exp | ||||
| 
 | ||||
|         clears_since_zero: list[dict] = self.minimized_clears() | ||||
| 
 | ||||
|         # setup a "multi-line array of inline tables" which we call | ||||
|         # the "clears table", contained by each position entry in | ||||
|         # an "account file". | ||||
|         clears_table: tomlkit.Array = tomlkit.array() | ||||
|         clears_table.multiline( | ||||
|             multiline=True, | ||||
|             indent='', | ||||
|         ) | ||||
| 
 | ||||
|         for entry in clears_since_zero: | ||||
|             inline_table = tomlkit.inline_table() | ||||
| 
 | ||||
|             # insert optional clear fields in column order | ||||
|             for k in ['ppu', 'cumsize']: | ||||
|                 if val := entry.get(k): | ||||
|                     inline_table[k] = val | ||||
| 
 | ||||
|             # insert required fields | ||||
|             for k in ['price', 'size', 'cost']: | ||||
|                 inline_table[k] = entry[k] | ||||
| 
 | ||||
|             # NOTE: we don't actually need to serialize datetime to parsable `str` | ||||
|             # since `tomlkit` supports a native `DateTime` but | ||||
|             # seems like we're not doing it entirely in clearing | ||||
|             # tables yet? | ||||
|             inline_table['dt'] = entry['dt']  # .isoformat('T') | ||||
| 
 | ||||
|             tid: str = entry['tid'] | ||||
|             inline_table['tid'] = tid | ||||
|             clears_table.append(inline_table) | ||||
| 
 | ||||
|         # assert not events | ||||
|         asdict['clears'] = clears_table | ||||
| 
 | ||||
|         return fqme, asdict | ||||
| 
 | ||||
|     def update_from_msg( | ||||
|         self, | ||||
|         msg: BrokerdPosition, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Hard-set the current position from a remotely-received | ||||
|         (normally via IPC) msg by applying the msg as the one (and | ||||
|         only) txn in the `._events` table thus forcing the current | ||||
|         asset allocation blindly. | ||||
| 
 | ||||
|         ''' | ||||
|         mkt: MktPair = self.mkt | ||||
|         now_dt: pendulum.DateTime = now() | ||||
|         now_str: str = str(now_dt) | ||||
| 
 | ||||
|         # XXX: wipe all prior txn history since we wanted it we wouldn't | ||||
|         # be using this method to compute our state! | ||||
|         self._events.clear() | ||||
| 
 | ||||
|         # NOTE WARNING XXX: we summarize the pos with a single | ||||
|         # summary transaction (for now) until we either pass THIS | ||||
|         # type as msg directly from emsd or come up with a better | ||||
|         # way? | ||||
|         t = Transaction( | ||||
|             fqme=mkt.fqme, | ||||
|             bs_mktid=mkt.bs_mktid, | ||||
|             size=msg['size'], | ||||
|             price=msg['avg_price'], | ||||
|             cost=0, | ||||
| 
 | ||||
|             # NOTE: special provisions required! | ||||
|             # - tid needs to be unique or this txn will be ignored!! | ||||
|             tid=now_str, | ||||
| 
 | ||||
|             # TODO: also figure out how to avoid this! | ||||
|             dt=now_dt, | ||||
|         ) | ||||
|         self.add_clear(t) | ||||
| 
 | ||||
|     @property | ||||
|     def dsize(self) -> float: | ||||
|         ''' | ||||
|         The "dollar" size of the pp, normally in source asset | ||||
|         (fiat) units. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.ppu * self.cumsize | ||||
| 
 | ||||
|     def expired(self) -> bool: | ||||
|         ''' | ||||
|         Predicate which checks if the contract/instrument is past | ||||
|         its expiry. | ||||
| 
 | ||||
|         ''' | ||||
|         return bool(self.expiry) and self.expiry < now() | ||||
| 
 | ||||
|     def add_clear( | ||||
|         self, | ||||
|         t: Transaction, | ||||
|     ) -> bool: | ||||
|         ''' | ||||
|         Update clearing table by calculating the rolling ppu and | ||||
|         (accumulative) size in both the clears entry and local | ||||
|         attrs state. | ||||
| 
 | ||||
|         Inserts are always done in datetime sorted order. | ||||
| 
 | ||||
|         ''' | ||||
|         # added: bool = False | ||||
|         tid: str = t.tid | ||||
|         if tid in self._events: | ||||
|             log.warning(f'{t} is already added?!') | ||||
|             # return added | ||||
| 
 | ||||
|         # TODO: apparently this IS possible with a dict but not | ||||
|         # common and probably not that beneficial unless we're also | ||||
|         # going to do cum-calcs on each insert? | ||||
|         # https://stackoverflow.com/questions/38079171/python-insert-new-element-into-sorted-list-of-dictionaries | ||||
|         # from bisect import insort | ||||
|         # insort( | ||||
|         #     self._clears, | ||||
|         #     clear, | ||||
|         #     key=lambda entry: entry['dt'] | ||||
|         # ) | ||||
|         self._events[tid] = t | ||||
|         return True | ||||
| 
 | ||||
|     # TODO: compute these incrementally instead | ||||
|     # of re-looping through each time resulting in O(n**2) | ||||
|     # behaviour..? Can we have some kinda clears len to cached | ||||
|     # output subsys? | ||||
|     def calc_ppu(self) -> float: | ||||
|         return ppu(self.iter_by_type('clear')) | ||||
| 
 | ||||
|         # # return self.clearsdict() | ||||
|         # # ) | ||||
|         # return list(self.clearsdict())[-1][1]['ppu'] | ||||
| 
 | ||||
|     @property | ||||
|     def ppu(self) -> float: | ||||
|         return round( | ||||
|             self.calc_ppu(), | ||||
|             ndigits=self.mkt.price_tick_digits, | ||||
|         ) | ||||
| 
 | ||||
|     def calc_size(self) -> float: | ||||
|         ''' | ||||
|         Calculate the unit size of this position in the destination | ||||
|         asset using the clears/trade event table; zero if expired. | ||||
| 
 | ||||
|         ''' | ||||
|         # time-expired pps (normally derivatives) are "closed" | ||||
|         # and have a zero size. | ||||
|         if self.expired(): | ||||
|             return 0. | ||||
| 
 | ||||
|         clears: list[(str, dict)] = self.clearsitems() | ||||
|         if clears: | ||||
|             return clears[-1][1]['cumsize'] | ||||
|         else: | ||||
|             return 0. | ||||
| 
 | ||||
|         # if self.split_ratio is not None: | ||||
|         #     size = round(size * self.split_ratio) | ||||
| 
 | ||||
|         # return float( | ||||
|         #     self.mkt.quantize(size), | ||||
|         # ) | ||||
| 
 | ||||
|     # TODO: ideally we don't implicitly recompute the | ||||
|     # full sequence from `.clearsdict()` every read.. | ||||
|     # the writer-updates-local-attr-state was actually kinda nice | ||||
|     # before, but sometimes led to hard to detect bugs when | ||||
|     # state was de-synced. | ||||
|     @property | ||||
|     def cumsize(self) -> float: | ||||
| 
 | ||||
|         if ( | ||||
|             self.expiry | ||||
|             and self.expiry < now() | ||||
|         ): | ||||
|             return 0 | ||||
| 
 | ||||
|         return round( | ||||
|             self.calc_size(), | ||||
|             ndigits=self.mkt.size_tick_digits, | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: once we have an `.events` table with diff | ||||
|     # mkt event types..? | ||||
|     # def suggest_split(self) -> float: | ||||
|     #     ... | ||||
| 
 | ||||
| 
 | ||||
| class Account(Struct): | ||||
|     ''' | ||||
|     The real-time (double-entry accounting) state of | ||||
|     a given **asset ownership tracking system**, normally offered | ||||
|     or measured from some brokerage, CEX or (implied virtual) | ||||
|     summary crypto$ "wallets" aggregated and tracked over some set | ||||
|     of DEX-es. | ||||
| 
 | ||||
|     Both market-mapped and ledger-system-native (aka inter-account | ||||
|     "transfers") transactions are accounted and they pertain to | ||||
|     (implied) PnL relatve to any other accountable asset. | ||||
| 
 | ||||
|     More specifically in piker terms, an account tracks all of: | ||||
| 
 | ||||
|     - the *balances* of all assets currently available for use either | ||||
|       in (future) market or (inter-account/wallet) transfer | ||||
|       transactions. | ||||
|     - a transaction *ledger* from a given brokerd backend whic | ||||
|       is a recording of all (know) such transactions from the past. | ||||
|     - a set of financial *positions* as measured from the current | ||||
|       ledger state. | ||||
| 
 | ||||
|     See the semantic origins from double-bookeeping: | ||||
|     https://en.wikipedia.org/wiki/Double-entry_bookkeeping | ||||
| 
 | ||||
|     ''' | ||||
|     mod: ModuleType | ||||
|     acctid: str | ||||
|     pps: dict[str, Position] | ||||
| 
 | ||||
|     conf_path: Path | ||||
|     conf: dict | None = {} | ||||
| 
 | ||||
|     # TODO: track a table of asset balances as `.balances: | ||||
|     # dict[Asset, float]`? | ||||
| 
 | ||||
|     @property | ||||
|     def brokername(self) -> str: | ||||
|         return self.mod.name | ||||
| 
 | ||||
|     def update_from_ledger( | ||||
|         self, | ||||
|         ledger: TransactionLedger | dict[str, Transaction], | ||||
|         cost_scalar: float = 2, | ||||
|         symcache: SymbologyCache | None = None, | ||||
| 
 | ||||
|         _mktmap_table: dict[str, MktPair] | None = None, | ||||
| 
 | ||||
|     ) -> dict[str, Position]: | ||||
|         ''' | ||||
|         Update the internal `.pps[str, Position]` table from input | ||||
|         transactions recomputing the price-per-unit (ppu) and | ||||
|         accumulative size for each entry. | ||||
| 
 | ||||
|         ''' | ||||
|         if ( | ||||
|             not isinstance(ledger, TransactionLedger) | ||||
|         ): | ||||
|             if symcache is None: | ||||
|                 raise RuntimeError( | ||||
|                     'No ledger provided!\n' | ||||
|                     'We can not determine the `MktPair`s without a symcache..\n' | ||||
|                     'Please provide `symcache: SymbologyCache` when ' | ||||
|                     'processing NEW positions!' | ||||
|                 ) | ||||
|             itertxns = sorted( | ||||
|                 ledger.values(), | ||||
|                 key=lambda t: t.dt, | ||||
|             ) | ||||
|         else: | ||||
|             itertxns = ledger.iter_txns() | ||||
|             symcache = ledger.symcache | ||||
| 
 | ||||
|         pps = self.pps | ||||
|         updated: dict[str, Position] = {} | ||||
| 
 | ||||
|         # lifo update all pps from records, ensuring | ||||
|         # we compute the PPU and size sorted in time! | ||||
|         for txn in itertxns: | ||||
|             fqme: str = txn.fqme | ||||
|             bs_mktid: str = txn.bs_mktid | ||||
| 
 | ||||
|             # template the mkt-info presuming a legacy market ticks | ||||
|             # if no info exists in the transactions.. | ||||
|             try: | ||||
|                 mkt: MktPair = symcache.mktmaps[fqme] | ||||
|             except KeyError: | ||||
|                 if _mktmap_table is None: | ||||
|                     raise | ||||
| 
 | ||||
|                 # XXX: caller is allowed to provide a fallback | ||||
|                 # mktmap table for the case where a new position is | ||||
|                 # being added and the preloaded symcache didn't | ||||
|                 # have this entry prior (eg. with frickin IB..) | ||||
|                 mkt = _mktmap_table[fqme] | ||||
| 
 | ||||
|             if not (pos := pps.get(bs_mktid)): | ||||
| 
 | ||||
|                 assert isinstance( | ||||
|                     mkt, | ||||
|                     MktPair, | ||||
|                 ) | ||||
| 
 | ||||
|                 # if no existing pos, allocate fresh one. | ||||
|                 pos = pps[bs_mktid] = Position( | ||||
|                     mkt=mkt, | ||||
|                     bs_mktid=bs_mktid, | ||||
|                 ) | ||||
|             else: | ||||
|                 # NOTE: if for some reason a "less resolved" mkt pair | ||||
|                 # info has been set (based on the `.fqme` being | ||||
|                 # a shorter string), instead use the one from the | ||||
|                 # transaction since it likely has (more) full | ||||
|                 # information from the provider. | ||||
|                 if len(pos.mkt.fqme) < len(fqme): | ||||
|                     pos.mkt = mkt | ||||
| 
 | ||||
|             # update clearing acnt! | ||||
|             # NOTE: likely you'll see repeats of the same | ||||
|             # ``Transaction`` passed in here if/when you are | ||||
|             # restarting a ``brokerd.ib`` where the API will | ||||
|             # re-report trades from the current session, so we need | ||||
|             # to make sure we don't "double count" these in pp | ||||
|             # calculations; `Position.add_clear()` stores txs in | ||||
|             # a `._events: dict[tid, tx]` which should always | ||||
|             # ensure this is true! | ||||
|             pos.add_clear(txn) | ||||
|             updated[txn.bs_mktid] = pos | ||||
| 
 | ||||
|         # NOTE: deliver only the position entries that were | ||||
|         # actually updated (modified the state) from the input | ||||
|         # transaction set. | ||||
|         return updated | ||||
| 
 | ||||
|     def dump_active( | ||||
|         self, | ||||
|     ) -> tuple[ | ||||
|         dict[str, Position], | ||||
|         dict[str, Position] | ||||
|     ]: | ||||
|         ''' | ||||
|         Iterate all tabulated positions, render active positions to | ||||
|         a ``dict`` format amenable to serialization (via TOML) and drop | ||||
|         from state (``.pps``) as well as return in a ``dict`` all | ||||
|         ``Position``s which have recently closed. | ||||
| 
 | ||||
|         ''' | ||||
|         # NOTE: newly closed position are also important to report/return | ||||
|         # since a consumer, like an order mode UI ;), might want to react | ||||
|         # based on the closure (for example removing the breakeven line | ||||
|         # and clearing the entry from any lists/monitors). | ||||
|         closed_pp_objs: dict[str, Position] = {} | ||||
|         open_pp_objs: dict[str, Position] = {} | ||||
| 
 | ||||
|         pp_objs = self.pps | ||||
|         for bs_mktid in list(pp_objs): | ||||
|             pos = pp_objs[bs_mktid] | ||||
|             # pos.ensure_state() | ||||
| 
 | ||||
|             # "net-zero" is a "closed" position | ||||
|             if pos.cumsize == 0: | ||||
|                 # NOTE: we DO NOT pop the pos here since it can still be | ||||
|                 # used to check for duplicate clears that may come in as | ||||
|                 # new transaction from some backend API and need to be | ||||
|                 # ignored; the closed positions won't be written to the | ||||
|                 # ``pps.toml`` since ``pp_active_entries`` above is what's | ||||
|                 # written. | ||||
|                 closed_pp_objs[bs_mktid] = pos | ||||
| 
 | ||||
|             else: | ||||
|                 open_pp_objs[bs_mktid] = pos | ||||
| 
 | ||||
|         return open_pp_objs, closed_pp_objs | ||||
| 
 | ||||
|     def prep_toml( | ||||
|         self, | ||||
|         active: dict[str, Position] | None = None, | ||||
| 
 | ||||
|     ) -> dict[str, Any]: | ||||
| 
 | ||||
|         if active is None: | ||||
|             active, _ = self.dump_active() | ||||
| 
 | ||||
|         # ONLY dict-serialize all active positions; those that are | ||||
|         # closed we don't store in the ``pps.toml``. | ||||
|         to_toml_dict: dict[str, Any] = {} | ||||
| 
 | ||||
|         pos: Position | ||||
|         for bs_mktid, pos in active.items(): | ||||
|             # pos.ensure_state() | ||||
| 
 | ||||
|             # serialize to pre-toml form | ||||
|             # NOTE: we only store the minimal amount of clears that | ||||
|             # make up this position since the last net-zero state, | ||||
|             # see `Position.to_pretoml()` for details | ||||
|             fqme, asdict = pos.to_pretoml() | ||||
| 
 | ||||
|             # clears: list[dict] = asdict['clears'] | ||||
|             # assert 'Datetime' not in [0]['dt'] | ||||
|             log.info(f'Updating active pp: {fqme}') | ||||
| 
 | ||||
|             # XXX: ugh, it's cuz we push the section under | ||||
|             # the broker name.. maybe we need to rethink this? | ||||
|             brokerless_key = fqme.removeprefix(f'{self.brokername}.') | ||||
|             to_toml_dict[brokerless_key] = asdict | ||||
| 
 | ||||
|         return to_toml_dict | ||||
| 
 | ||||
|     def write_config(self) -> None: | ||||
|         ''' | ||||
|         Write the current account state to the user's account TOML file, normally | ||||
|         something like ``pps.toml``. | ||||
| 
 | ||||
|         ''' | ||||
|         # TODO: show diff output? | ||||
|         # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries | ||||
|         # active, closed_pp_objs = acnt.dump_active() | ||||
| 
 | ||||
|         active, closed = self.dump_active() | ||||
|         pp_entries = self.prep_toml(active=active) | ||||
|         if pp_entries: | ||||
|             log.info( | ||||
|                 f'Updating positions in ``{self.conf_path}``:\n' | ||||
|                 f'n{pformat(pp_entries)}' | ||||
|             ) | ||||
| 
 | ||||
|             if self.brokername in self.conf: | ||||
|                 log.warning( | ||||
|                     f'Rewriting {self.conf_path} keys to drop <broker.acct>!' | ||||
|                 ) | ||||
|                 # legacy key schema including <brokername.account>, so | ||||
|                 # rewrite all entries to drop those tables since we now | ||||
|                 # put that in the filename! | ||||
|                 accounts = self.conf.pop(self.brokername) | ||||
|                 assert len(accounts) == 1 | ||||
|                 entries = accounts.pop(self.acctid) | ||||
|                 self.conf.update(entries) | ||||
| 
 | ||||
|             self.conf.update(pp_entries) | ||||
| 
 | ||||
|             # drop any entries that are computed as net-zero | ||||
|             # we don't care about storing in the pps file. | ||||
|             if closed: | ||||
|                 bs_mktid: str | ||||
|                 for bs_mktid, pos in closed.items(): | ||||
|                     fqme: str = pos.mkt.fqme | ||||
|                     if fqme in self.conf: | ||||
|                         self.conf.pop(fqme) | ||||
|                     else: | ||||
|                         # TODO: we reallly need a diff set of | ||||
|                         # loglevels/colors per subsys. | ||||
|                         log.warning( | ||||
|                             f'Recent position for {fqme} was closed!' | ||||
|                         ) | ||||
| 
 | ||||
|         # if there are no active position entries according | ||||
|         # to the toml dump output above, then clear the config | ||||
|         # file of all entries. | ||||
|         elif self.conf: | ||||
|             for entry in list(self.conf): | ||||
|                 del self.conf[entry] | ||||
| 
 | ||||
|         # XXX WTF: if we use a tomlkit.Integer here we get this | ||||
|         # super weird --1 thing going on for cumsize!?1! | ||||
|         # NOTE: the fix was to always float() the size value loaded | ||||
|         # in open_pps() below! | ||||
|         config.write( | ||||
|             config=self.conf, | ||||
|             path=self.conf_path, | ||||
|             fail_empty=False, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def load_account( | ||||
|     brokername: str, | ||||
|     acctid: str, | ||||
| 
 | ||||
|     dirpath: Path | None = None, | ||||
| 
 | ||||
| ) -> tuple[dict, Path]: | ||||
|     ''' | ||||
|     Load a accounting (with positions) file from | ||||
|     $CONFIG_DIR/accounting/account.<brokername>.<acctid>.toml | ||||
| 
 | ||||
|     Where normally $CONFIG_DIR = ~/.config/piker/ | ||||
|     and we implicitly create a accounting subdir which should | ||||
|     normally be linked to a git repo managed by the user B) | ||||
| 
 | ||||
|     ''' | ||||
|     legacy_fn: str = f'pps.{brokername}.{acctid}.toml' | ||||
|     fn: str = f'account.{brokername}.{acctid}.toml' | ||||
| 
 | ||||
|     dirpath: Path = dirpath or (config._config_dir / 'accounting') | ||||
|     if not dirpath.is_dir(): | ||||
|         dirpath.mkdir() | ||||
| 
 | ||||
|     conf, path = config.load( | ||||
|         path=dirpath / fn, | ||||
|         decode=tomlkit.parse, | ||||
|         touch_if_dne=True, | ||||
|     ) | ||||
| 
 | ||||
|     if not conf: | ||||
|         legacypath = dirpath / legacy_fn | ||||
|         log.warning( | ||||
|             f'Your account file is using the legacy `pps.` prefix..\n' | ||||
|             f'Rewriting contents to new name -> {path}\n' | ||||
|             'Please delete the old file!\n' | ||||
|             f'|-> {legacypath}\n' | ||||
|         ) | ||||
|         if legacypath.is_file(): | ||||
|             legacy_config, _ = config.load( | ||||
|                 path=legacypath, | ||||
| 
 | ||||
|                 # TODO: move to tomlkit: | ||||
|                 # - needs to be fixed to support bidict? | ||||
|                 #   https://github.com/sdispater/tomlkit/issues/289 | ||||
|                 # - we need to use or fork's fix to do multiline array | ||||
|                 #   indenting. | ||||
|                 decode=tomlkit.parse, | ||||
|             ) | ||||
|             conf.update(legacy_config) | ||||
| 
 | ||||
|             # XXX: override the presumably previously non-existant | ||||
|             # file with legacy's contents. | ||||
|             config.write( | ||||
|                 conf, | ||||
|                 path=path, | ||||
|                 fail_empty=False, | ||||
|             ) | ||||
| 
 | ||||
|     return conf, path | ||||
| 
 | ||||
| 
 | ||||
| # TODO: make this async and offer a `get_account()` that | ||||
| # can be used from sync code which does the same thing as | ||||
| # open_trade_ledger()! | ||||
| @cm | ||||
| def open_account( | ||||
|     brokername: str, | ||||
|     acctid: str, | ||||
|     write_on_exit: bool = False, | ||||
| 
 | ||||
|     # for testing or manual load from file | ||||
|     _fp: Path | None = None, | ||||
| 
 | ||||
| ) -> Generator[Account, None, None]: | ||||
|     ''' | ||||
|     Read out broker-specific position entries from | ||||
|     incremental update file: ``pps.toml``. | ||||
| 
 | ||||
|     ''' | ||||
|     conf: dict | ||||
|     conf_path: Path | ||||
|     conf, conf_path = load_account( | ||||
|         brokername, | ||||
|         acctid, | ||||
|         dirpath=_fp, | ||||
|     ) | ||||
| 
 | ||||
|     if brokername in conf: | ||||
|         log.warning( | ||||
|             f'Rewriting {conf_path} keys to drop <broker.acct>!' | ||||
|         ) | ||||
|         # legacy key schema including <brokername.account>, so | ||||
|         # rewrite all entries to drop those tables since we now | ||||
|         # put that in the filename! | ||||
|         accounts = conf.pop(brokername) | ||||
|         for acctid in accounts.copy(): | ||||
|             entries = accounts.pop(acctid) | ||||
|             conf.update(entries) | ||||
| 
 | ||||
|     # TODO: ideally we can pass in an existing | ||||
|     # pps state to this right? such that we | ||||
|     # don't have to do a ledger reload all the | ||||
|     # time.. a couple ideas I can think of, | ||||
|     # - mirror this in some client side actor which | ||||
|     #   does the actual ledger updates (say the paper | ||||
|     #   engine proc if we decide to always spawn it?), | ||||
|     # - do diffs against updates from the ledger writer | ||||
|     #   actor and the in-mem state here? | ||||
|     from ..brokers import get_brokermod | ||||
|     mod: ModuleType = get_brokermod(brokername) | ||||
| 
 | ||||
|     pp_objs: dict[str, Position] = {} | ||||
|     acnt = Account( | ||||
|         mod, | ||||
|         acctid, | ||||
|         pp_objs, | ||||
|         conf_path, | ||||
|         conf=conf, | ||||
|     ) | ||||
| 
 | ||||
|     # unmarshal/load ``pps.toml`` config entries into object form | ||||
|     # and update `Account` obj entries. | ||||
|     for fqme, entry in conf.items(): | ||||
| 
 | ||||
|         # unique broker-backend-system market id | ||||
|         bs_mktid = str( | ||||
|             entry.get('bsuid') | ||||
|             or entry.get('bs_mktid') | ||||
|         ) | ||||
|         price_tick = Decimal(str( | ||||
|             entry.get('price_tick_size') | ||||
|             or entry.get('price_tick') | ||||
|             or '0.01' | ||||
|         )) | ||||
|         size_tick = Decimal(str( | ||||
|             entry.get('lot_tick_size') | ||||
|             or entry.get('size_tick') | ||||
|             or '0.0' | ||||
|         )) | ||||
| 
 | ||||
|         # load the pair using the fqme which | ||||
|         # will make the pair "unresolved" until | ||||
|         # the backend broker actually loads | ||||
|         # the market and position info. | ||||
|         mkt = MktPair.from_fqme( | ||||
|             fqme, | ||||
|             price_tick=price_tick, | ||||
|             size_tick=size_tick, | ||||
|             bs_mktid=bs_mktid, | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: RE: general "events" instead of just "clears": | ||||
|         # - make this an `events` field and support more event types | ||||
|         #   such as 'split', 'name_change', 'mkt_info', etc.. | ||||
|         # - should be make a ``Struct`` for clear/event entries? convert | ||||
|         #   "clear events table" from the toml config (list of a dicts) | ||||
|         #   and load it into object form for use in position processing of | ||||
|         #   new clear events. | ||||
| 
 | ||||
|         # convert clears sub-tables (only in this form | ||||
|         # for toml re-presentation) back into a master table. | ||||
|         toml_clears_list: list[dict[str, Any]] = entry['clears'] | ||||
|         trans: list[Transaction] = [] | ||||
| 
 | ||||
|         for clears_table in toml_clears_list: | ||||
|             tid = clears_table['tid'] | ||||
|             dt: tomlkit.items.DateTime | str = clears_table['dt'] | ||||
| 
 | ||||
|             # woa cool, `tomlkit` will actually load datetimes into | ||||
|             # native form B) | ||||
|             if isinstance(dt, str): | ||||
|                 dt = pendulum.parse(dt) | ||||
| 
 | ||||
|             clears_table['dt'] = dt | ||||
|             trans.append(Transaction( | ||||
|                 fqme=bs_mktid, | ||||
|                 # sym=mkt, | ||||
|                 bs_mktid=bs_mktid, | ||||
|                 tid=tid, | ||||
|                 # XXX: not sure why sometimes these are loaded as | ||||
|                 # `tomlkit.Integer` and are eventually written with | ||||
|                 # an extra `-` in front like `--1`? | ||||
|                 size=float(clears_table['size']), | ||||
|                 price=float(clears_table['price']), | ||||
|                 cost=clears_table['cost'], | ||||
|                 dt=dt, | ||||
|             )) | ||||
| 
 | ||||
|         split_ratio = entry.get('split_ratio') | ||||
| 
 | ||||
|         # if a string-ified expiry field is loaded we try to parse | ||||
|         # it, THO, they should normally be serialized as native | ||||
|         # TOML datetimes, since that's supported. | ||||
|         if ( | ||||
|             (expiry := entry.get('expiry')) | ||||
|             and isinstance(expiry, str) | ||||
|         ): | ||||
|             expiry: pendulum.DateTime = pendulum.parse(expiry) | ||||
| 
 | ||||
|         pp = pp_objs[bs_mktid] = Position( | ||||
|             mkt, | ||||
|             split_ratio=split_ratio, | ||||
|             bs_mktid=bs_mktid, | ||||
|         ) | ||||
| 
 | ||||
|         # XXX: super critical, we need to be sure to include | ||||
|         # all pps.toml clears to avoid reusing clears that were | ||||
|         # already included in the current incremental update | ||||
|         # state, since today's records may have already been | ||||
|         # processed! | ||||
|         for t in trans: | ||||
|             pp.add_clear(t) | ||||
| 
 | ||||
|     try: | ||||
|         yield acnt | ||||
|     finally: | ||||
|         if write_on_exit: | ||||
|             acnt.write_config() | ||||
| 
 | ||||
| 
 | ||||
| # TODO: drop the old name and THIS! | ||||
| @cm | ||||
| def open_pps( | ||||
|     *args, | ||||
|     **kwargs, | ||||
| ) -> Generator[Account, None, None]: | ||||
|     log.warning( | ||||
|         '`open_pps()` is now deprecated!\n' | ||||
|         'Please use `with open_account() as cnt:`' | ||||
|     ) | ||||
|     with open_account(*args, **kwargs) as acnt: | ||||
|         yield acnt | ||||
| 
 | ||||
| 
 | ||||
| def load_account_from_ledger( | ||||
| 
 | ||||
|     brokername: str, | ||||
|     acctname: str, | ||||
| 
 | ||||
|     # post normalization filter on ledger entries to be processed | ||||
|     filter_by_ids: dict[str, list[str]] | None = None, | ||||
| 
 | ||||
|     ledger: TransactionLedger | None = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> Account: | ||||
|     ''' | ||||
|     Open a ledger file by broker name and account and read in and | ||||
|     process any trade records into our normalized ``Transaction`` form | ||||
|     and then update the equivalent ``Pptable`` and deliver the two | ||||
|     bs_mktid-mapped dict-sets of the transactions and pps. | ||||
| 
 | ||||
|     ''' | ||||
|     acnt: Account | ||||
|     with open_account( | ||||
|         brokername, | ||||
|         acctname, | ||||
|         **kwargs, | ||||
|     ) as acnt: | ||||
|         if ledger is not None: | ||||
|             acnt.update_from_ledger(ledger) | ||||
| 
 | ||||
|     return acnt | ||||
|  | @ -1,698 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Calculation routines for balance and position tracking such that | ||||
| you know when you're losing money (if possible) XD | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from collections.abc import ValuesView | ||||
| from contextlib import contextmanager as cm | ||||
| from math import copysign | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     Iterator, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import polars as pl | ||||
| from pendulum import ( | ||||
|     DateTime, | ||||
|     from_timestamp, | ||||
|     parse, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._ledger import ( | ||||
|         Transaction, | ||||
|         TransactionLedger, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def ppu( | ||||
|     clears: Iterator[Transaction], | ||||
| 
 | ||||
|     # include transaction cost in breakeven price | ||||
|     # and presume the worst case of the same cost | ||||
|     # to exit this transaction (even though in reality | ||||
|     # it will be dynamic based on exit stratetgy). | ||||
|     cost_scalar: float = 2, | ||||
| 
 | ||||
|     # return the ledger of clears as a (now dt sorted) dict with | ||||
|     # new position fields inserted alongside each entry. | ||||
|     as_ledger: bool = False, | ||||
| 
 | ||||
| ) -> float | list[(str, dict)]: | ||||
|     ''' | ||||
|     Compute the "price-per-unit" price for the given non-zero sized | ||||
|     rolling position. | ||||
| 
 | ||||
|     The recurrence relation which computes this (exponential) mean | ||||
|     per new clear which **increases** the accumulative postiion size | ||||
|     is: | ||||
| 
 | ||||
|     ppu[-1] = ( | ||||
|         ppu[-2] * accum_size[-2] | ||||
|         + | ||||
|         ppu[-1] * size | ||||
|     ) / accum_size[-1] | ||||
| 
 | ||||
|     where `cost_basis` for the current step is simply the price | ||||
|     * size of the most recent clearing transaction. | ||||
| 
 | ||||
|     ----- | ||||
|     TODO: get the BEP computed and working similarly! | ||||
|     ----- | ||||
|     the equivalent "break even price" or bep at each new clear | ||||
|     event step conversely only changes when an "position exiting | ||||
|     clear" which **decreases** the cumulative dst asset size: | ||||
| 
 | ||||
|     bep[-1] = ppu[-1] - (cum_pnl[-1] / cumsize[-1]) | ||||
| 
 | ||||
|     ''' | ||||
|     asize_h: list[float] = []  # historical accumulative size | ||||
|     ppu_h: list[float] = []  # historical price-per-unit | ||||
|     # ledger: dict[str, dict] = {} | ||||
|     ledger: list[dict] = [] | ||||
| 
 | ||||
|     t: Transaction | ||||
|     for t in clears: | ||||
|         clear_size: float = t.size | ||||
|         clear_price: str | float = t.price | ||||
|         is_clear: bool = not isinstance(clear_price, str) | ||||
| 
 | ||||
|         last_accum_size = asize_h[-1] if asize_h else 0 | ||||
|         accum_size: float = last_accum_size + clear_size | ||||
|         accum_sign = copysign(1, accum_size) | ||||
|         sign_change: bool = False | ||||
| 
 | ||||
|         # on transfers we normally write some non-valid | ||||
|         # price since withdrawal to another account/wallet | ||||
|         # has nothing to do with inter-asset-market prices. | ||||
|         # TODO: this should be better handled via a `type: 'tx'` | ||||
|         # field as per existing issue surrounding all this: | ||||
|         # https://github.com/pikers/piker/issues/510 | ||||
|         if isinstance(clear_price, str): | ||||
|             # TODO: we can't necessarily have this commit to | ||||
|             # the overall pos size since we also need to | ||||
|             # include other positions contributions to this | ||||
|             # balance or we might end up with a -ve balance for | ||||
|             # the position.. | ||||
|             continue | ||||
| 
 | ||||
|         # test if the pp somehow went "passed" a net zero size state | ||||
|         # resulting in a change of the "sign" of the size (+ve for | ||||
|         # long, -ve for short). | ||||
|         sign_change = ( | ||||
|             copysign(1, last_accum_size) + accum_sign == 0 | ||||
|             and last_accum_size != 0 | ||||
|         ) | ||||
| 
 | ||||
|         # since we passed the net-zero-size state the new size | ||||
|         # after sum should be the remaining size the new | ||||
|         # "direction" (aka, long vs. short) for this clear. | ||||
|         if sign_change: | ||||
|             clear_size: float = accum_size | ||||
|             abs_diff: float = abs(accum_size) | ||||
|             asize_h.append(0) | ||||
|             ppu_h.append(0) | ||||
| 
 | ||||
|         else: | ||||
|             # old size minus the new size gives us size diff with | ||||
|             # +ve -> increase in pp size | ||||
|             # -ve -> decrease in pp size | ||||
|             abs_diff = abs(accum_size) - abs(last_accum_size) | ||||
| 
 | ||||
|         # XXX: LIFO breakeven price update. only an increaze in size | ||||
|         # of the position contributes the breakeven price, | ||||
|         # a decrease does not (i.e. the position is being made | ||||
|         # smaller). | ||||
|         # abs_clear_size = abs(clear_size) | ||||
|         abs_new_size: float | int = abs(accum_size) | ||||
| 
 | ||||
|         if ( | ||||
|             abs_diff > 0 | ||||
|             and is_clear | ||||
|         ): | ||||
|             cost_basis = ( | ||||
|                 # cost basis for this clear | ||||
|                 clear_price * abs(clear_size) | ||||
|                 + | ||||
|                 # transaction cost | ||||
|                 accum_sign * cost_scalar * t.cost | ||||
|             ) | ||||
| 
 | ||||
|             if asize_h: | ||||
|                 size_last: float = abs(asize_h[-1]) | ||||
|                 cb_last: float = ppu_h[-1] * size_last | ||||
|                 ppu: float = (cost_basis + cb_last) / abs_new_size | ||||
| 
 | ||||
|             else: | ||||
|                 ppu: float = cost_basis / abs_new_size | ||||
| 
 | ||||
|         else: | ||||
|             # TODO: for PPU we should probably handle txs out | ||||
|             # (aka withdrawals) similarly by simply not having | ||||
|             # them contrib to the running PPU calc and only | ||||
|             # when the next entry clear comes in (which will | ||||
|             # then have a higher weighting on the PPU). | ||||
| 
 | ||||
|             # on "exit" clears from a given direction, | ||||
|             # only the size changes not the price-per-unit | ||||
|             # need to be updated since the ppu remains constant | ||||
|             # and gets weighted by the new size. | ||||
|             ppu: float = ppu_h[-1] if ppu_h else 0  # set to previous value | ||||
| 
 | ||||
|         # extend with new rolling metric for this step | ||||
|         ppu_h.append(ppu) | ||||
|         asize_h.append(accum_size) | ||||
| 
 | ||||
|         # ledger[t.tid] = { | ||||
|             # 'txn': t, | ||||
|         # ledger[t.tid] = t.to_dict() | { | ||||
|         ledger.append(( | ||||
|             t.tid, | ||||
|             t.to_dict() | { | ||||
|                 'ppu': ppu, | ||||
|                 'cumsize': accum_size, | ||||
|                 'sign_change': sign_change, | ||||
| 
 | ||||
|                 # TODO: cum_pnl, bep | ||||
|             } | ||||
|         )) | ||||
| 
 | ||||
|     final_ppu = ppu_h[-1] if ppu_h else 0 | ||||
|     # TODO: once we have etypes in all ledger entries.. | ||||
|     # handle any split info entered (for now) manually by user | ||||
|     # if self.split_ratio is not None: | ||||
|     #     final_ppu /= self.split_ratio | ||||
| 
 | ||||
|     if as_ledger: | ||||
|         return ledger | ||||
| 
 | ||||
|     else: | ||||
|         return final_ppu | ||||
| 
 | ||||
| 
 | ||||
| def iter_by_dt( | ||||
|     records: ( | ||||
|         dict[str, dict[str, Any]] | ||||
|         | ValuesView[dict]  # eg. `Position._events.values()` | ||||
|         | list[dict] | ||||
|         | list[Transaction]  # XXX preferred! | ||||
|     ), | ||||
| 
 | ||||
|     # NOTE: parsers are looked up in the insert order | ||||
|     # so if you know that the record stats show some field | ||||
|     # is more common then others, stick it at the top B) | ||||
|     parsers: dict[str, Callable | None] = { | ||||
|         'dt': parse,  # parity case | ||||
|         'datetime': parse,  # datetime-str | ||||
|         'time': from_timestamp,  # float epoch | ||||
|     }, | ||||
|     key: Callable | None = None, | ||||
| 
 | ||||
| ) -> Iterator[tuple[str, dict]]: | ||||
|     ''' | ||||
|     Iterate entries of a transaction table sorted by entry recorded | ||||
|     datetime presumably set at the ``'dt'`` field in each entry. | ||||
| 
 | ||||
|     ''' | ||||
|     if isinstance(records, dict): | ||||
|         records: list[tuple[str, dict]] = list(records.items()) | ||||
| 
 | ||||
|     def dyn_parse_to_dt( | ||||
|         tx: tuple[str, dict[str, Any]] | Transaction, | ||||
|     ) -> DateTime: | ||||
| 
 | ||||
|         # handle `.items()` inputs | ||||
|         if isinstance(tx, tuple): | ||||
|             tx = tx[1] | ||||
| 
 | ||||
|         # dict or tx object? | ||||
|         isdict: bool = isinstance(tx, dict) | ||||
| 
 | ||||
|         # get best parser for this record.. | ||||
|         for k in parsers: | ||||
|             if ( | ||||
|                 isdict and k in tx | ||||
|                  or getattr(tx, k, None) | ||||
|             ): | ||||
|                 v = tx[k] if isdict else tx.dt | ||||
|                 assert v is not None, f'No valid value for `{k}`!?' | ||||
| 
 | ||||
|                 # only call parser on the value if not None from | ||||
|                 # the `parsers` table above (when NOT using | ||||
|                 # `.get()`), otherwise pass through the value and | ||||
|                 # sort on it directly | ||||
|                 if ( | ||||
|                     not isinstance(v, DateTime) | ||||
|                     and (parser := parsers.get(k)) | ||||
|                 ): | ||||
|                     return parser(v) | ||||
|                 else: | ||||
|                     return v | ||||
| 
 | ||||
|         else: | ||||
|             # XXX: should never get here.. | ||||
|             breakpoint() | ||||
| 
 | ||||
|     entry: tuple[str, dict] | Transaction | ||||
|     for entry in sorted( | ||||
|         records, | ||||
|         key=key or dyn_parse_to_dt, | ||||
|     ): | ||||
|         # NOTE the type sig above; either pairs or txns B) | ||||
|         yield entry | ||||
| 
 | ||||
| 
 | ||||
| # TODO: probably just move this into the test suite or | ||||
| # keep it here for use from as such? | ||||
| # def ensure_state(self) -> None: | ||||
| #     ''' | ||||
| #     Audit either the `.cumsize` and `.ppu` local instance vars against | ||||
| #     the clears table calculations and return the calc-ed values if | ||||
| #     they differ and log warnings to console. | ||||
| 
 | ||||
| #     ''' | ||||
| #     # clears: list[dict] = self._clears | ||||
| 
 | ||||
| #     # self.first_clear_dt = min(clears, key=lambda e: e['dt'])['dt'] | ||||
| #     last_clear: dict = clears[-1] | ||||
| #     csize: float = self.calc_size() | ||||
| #     accum: float = last_clear['accum_size'] | ||||
| 
 | ||||
| #     if not self.expired(): | ||||
| #         if ( | ||||
| #             csize != accum | ||||
| #             and csize != round(accum * (self.split_ratio or 1)) | ||||
| #         ): | ||||
| #             raise ValueError(f'Size mismatch: {csize}') | ||||
| #     else: | ||||
| #         assert csize == 0, 'Contract is expired but non-zero size?' | ||||
| 
 | ||||
| #     if self.cumsize != csize: | ||||
| #         log.warning( | ||||
| #             'Position state mismatch:\n' | ||||
| #             f'{self.cumsize} => {csize}' | ||||
| #         ) | ||||
| #         self.cumsize = csize | ||||
| 
 | ||||
| #     cppu: float = self.calc_ppu() | ||||
| #     ppu: float = last_clear['ppu'] | ||||
| #     if ( | ||||
| #         cppu != ppu | ||||
| #         and self.split_ratio is not None | ||||
| 
 | ||||
| #         # handle any split info entered (for now) manually by user | ||||
| #         and cppu != (ppu / self.split_ratio) | ||||
| #     ): | ||||
| #         raise ValueError(f'PPU mismatch: {cppu}') | ||||
| 
 | ||||
| #     if self.ppu != cppu: | ||||
| #         log.warning( | ||||
| #             'Position state mismatch:\n' | ||||
| #             f'{self.ppu} => {cppu}' | ||||
| #         ) | ||||
| #         self.ppu = cppu | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def open_ledger_dfs( | ||||
| 
 | ||||
|     brokername: str, | ||||
|     acctname: str, | ||||
| 
 | ||||
|     ledger: TransactionLedger | None = None, | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     dict[str, pl.DataFrame], | ||||
|     TransactionLedger, | ||||
| ]: | ||||
|     ''' | ||||
|     Open a ledger of trade records (presumably from some broker | ||||
|     backend), normalize the records into `Transactions` via the | ||||
|     backend's declared endpoint, cast to a `polars.DataFrame` which | ||||
|     can update the ledger on exit. | ||||
| 
 | ||||
|     ''' | ||||
|     from piker.toolz import open_crash_handler | ||||
|     with open_crash_handler(): | ||||
|         if not ledger: | ||||
|             import time | ||||
|             from ._ledger import open_trade_ledger | ||||
| 
 | ||||
|             now = time.time() | ||||
| 
 | ||||
|             with open_trade_ledger( | ||||
|                     brokername, | ||||
|                     acctname, | ||||
|                     rewrite=True, | ||||
|                     allow_from_sync_code=True, | ||||
| 
 | ||||
|                     # proxied through from caller | ||||
|                     **kwargs, | ||||
| 
 | ||||
|             ) as ledger: | ||||
|                 if not ledger: | ||||
|                     raise ValueError(f'No ledger for {acctname}@{brokername} exists?') | ||||
| 
 | ||||
|                 print(f'LEDGER LOAD TIME: {time.time() - now}') | ||||
| 
 | ||||
|         yield ledger_to_dfs(ledger), ledger | ||||
| 
 | ||||
| 
 | ||||
| def ledger_to_dfs( | ||||
|     ledger: TransactionLedger, | ||||
| 
 | ||||
| ) -> dict[str, pl.DataFrame]: | ||||
| 
 | ||||
|     txns: dict[str, Transaction] = ledger.to_txns() | ||||
| 
 | ||||
|     # ldf = pl.DataFrame( | ||||
|     #     list(txn.to_dict() for txn in txns.values()), | ||||
|     ldf = pl.from_dicts( | ||||
|         list(txn.to_dict() for txn in txns.values()), | ||||
| 
 | ||||
|         # only for ordering the cols | ||||
|         schema=[ | ||||
|             ('fqme', str), | ||||
|             ('tid', str), | ||||
|             ('bs_mktid', str), | ||||
|             ('expiry', str), | ||||
|             ('etype', str), | ||||
|             ('dt', str), | ||||
|             ('size', pl.Float64), | ||||
|             ('price', pl.Float64), | ||||
|             ('cost', pl.Float64), | ||||
|         ], | ||||
|     ).sort(  # chronological order | ||||
|         'dt' | ||||
|     ).with_columns([ | ||||
|         pl.col('dt').str.to_datetime(), | ||||
|         # pl.col('expiry').str.to_datetime(), | ||||
|         # pl.col('expiry').dt.date(), | ||||
|     ]) | ||||
| 
 | ||||
|     # filter out to the columns matching values filter passed | ||||
|     # as input. | ||||
|     # if filter_by_ids: | ||||
|     #     for col, vals in filter_by_ids.items(): | ||||
|     #         str_vals = set(map(str, vals)) | ||||
|     #         pred: pl.Expr = pl.col(col).eq(str_vals.pop()) | ||||
|     #         for val in str_vals: | ||||
|     #             pred |= pl.col(col).eq(val) | ||||
| 
 | ||||
|     #     fdf = df.filter(pred) | ||||
| 
 | ||||
|     # TODO: originally i had tried just using a plain ol' groupby | ||||
|     # + agg here but the issue was re-inserting to the src frame. | ||||
|     # however, learning more about `polars` seems like maybe we can | ||||
|     # use `.over()`? | ||||
|     # https://pola-rs.github.io/polars/py-polars/html/reference/expressions/api/polars.Expr.over.html#polars.Expr.over | ||||
|     # => CURRENTLY we break up into a frame per mkt / fqme | ||||
|     dfs: dict[str, pl.DataFrame] = ldf.partition_by( | ||||
|         'bs_mktid', | ||||
|         as_dict=True, | ||||
|     ) | ||||
| 
 | ||||
|     # TODO: not sure if this is even possible but.. | ||||
|     # - it'd be more ideal to use `ppt = df.groupby('fqme').agg([` | ||||
|     # - ppu and bep calcs! | ||||
|     for key in dfs: | ||||
| 
 | ||||
|         # covert to lazy form (since apparently we might need it | ||||
|         # eventually ...) | ||||
|         df: pl.DataFrame = dfs[key] | ||||
| 
 | ||||
|         ldf: pl.LazyFrame = df.lazy() | ||||
| 
 | ||||
|         df = dfs[key] = ldf.with_columns([ | ||||
| 
 | ||||
|             pl.cumsum('size').alias('cumsize'), | ||||
| 
 | ||||
|             # amount of source asset "sent" (via buy txns in | ||||
|             # the market) to acquire the dst asset, PER txn. | ||||
|             # when this value is -ve (i.e. a sell operation) then | ||||
|             # the amount sent is actually "returned". | ||||
|             ( | ||||
|                 (pl.col('price') * pl.col('size')) | ||||
|                 + | ||||
|                 (pl.col('cost')) # * pl.col('size').sign()) | ||||
|             ).alias('dst_bot'), | ||||
| 
 | ||||
|         ]).with_columns([ | ||||
| 
 | ||||
|             # rolling balance in src asset units | ||||
|             (pl.col('dst_bot').cumsum() * -1).alias('src_balance'), | ||||
| 
 | ||||
|             # "position operation type" in terms of increasing the | ||||
|             # amount in the dst asset (entering) or decreasing the | ||||
|             # amount in the dst asset (exiting). | ||||
|             pl.when( | ||||
|                 pl.col('size').sign() == pl.col('cumsize').sign() | ||||
| 
 | ||||
|             ).then( | ||||
|                 pl.lit('enter')  # see above, but is just price * size per txn | ||||
| 
 | ||||
|             ).otherwise( | ||||
|                 pl.when(pl.col('cumsize') == 0) | ||||
|                 .then(pl.lit('exit_to_zero')) | ||||
|                 .otherwise(pl.lit('exit')) | ||||
|             ).alias('descr'), | ||||
| 
 | ||||
|             (pl.col('cumsize').sign() == pl.col('size').sign()) | ||||
|             .alias('is_enter'), | ||||
| 
 | ||||
|         ]).with_columns([ | ||||
| 
 | ||||
|             # pl.lit(0, dtype=pl.Utf8).alias('virt_cost'), | ||||
|             pl.lit(0, dtype=pl.Float64).alias('applied_cost'), | ||||
|             pl.lit(0, dtype=pl.Float64).alias('pos_ppu'), | ||||
|             pl.lit(0, dtype=pl.Float64).alias('per_txn_pnl'), | ||||
|             pl.lit(0, dtype=pl.Float64).alias('cum_pos_pnl'), | ||||
|             pl.lit(0, dtype=pl.Float64).alias('pos_bep'), | ||||
|             pl.lit(0, dtype=pl.Float64).alias('cum_ledger_pnl'), | ||||
|             pl.lit(None, dtype=pl.Float64).alias('ledger_bep'), | ||||
| 
 | ||||
|             # TODO: instead of the iterative loop below i guess we | ||||
|             # could try using embedded lists to track which txns | ||||
|             # are part of which ppu / bep calcs? Not sure this will | ||||
|             # look any better nor be any more performant though xD | ||||
|             # pl.lit([[0]], dtype=pl.List(pl.Float64)).alias('list'), | ||||
| 
 | ||||
|         # choose fields to emit for accounting puposes | ||||
|         ]).select([ | ||||
|             pl.exclude([ | ||||
|                 'tid', | ||||
|                 # 'dt', | ||||
|                 'expiry', | ||||
|                 'bs_mktid', | ||||
|                 'etype', | ||||
|                 # 'is_enter', | ||||
|             ]), | ||||
|         ]).collect() | ||||
| 
 | ||||
|         # compute recurrence relations for ppu and bep | ||||
|         last_ppu: float = 0 | ||||
|         last_cumsize: float = 0 | ||||
|         last_ledger_pnl: float = 0 | ||||
|         last_pos_pnl: float = 0 | ||||
|         virt_costs: list[float, float] = [0., 0.] | ||||
| 
 | ||||
|         # imperatively compute the PPU (price per unit) and BEP | ||||
|         # (break even price) iteratively over the ledger, oriented | ||||
|         # around each position state: a state of split balances in | ||||
|         # > 1 asset. | ||||
|         for i, row in enumerate(df.iter_rows(named=True)): | ||||
| 
 | ||||
|             cumsize: float = row['cumsize'] | ||||
|             is_enter: bool = row['is_enter'] | ||||
|             price: float = row['price'] | ||||
|             size: float = row['size'] | ||||
| 
 | ||||
|             # the profit is ALWAYS decreased, aka made a "loss" | ||||
|             # by the constant fee charged by the txn provider! | ||||
|             # see below in final PnL calculation and row element | ||||
|             # set. | ||||
|             txn_cost: float = row['cost'] | ||||
|             pnl: float = 0 | ||||
| 
 | ||||
|             # ALWAYS reset per-position cum PnL | ||||
|             if last_cumsize == 0: | ||||
|                 last_pos_pnl: float = 0 | ||||
| 
 | ||||
|             # a "position size INCREASING" or ENTER transaction | ||||
|             # which "makes larger", in src asset unit terms, the | ||||
|             # trade's side-size of the destination asset: | ||||
|             # - "buying" (more) units of the dst asset | ||||
|             # - "selling" (more short) units of the dst asset | ||||
|             if is_enter: | ||||
| 
 | ||||
|                 # Naively include transaction cost in breakeven | ||||
|                 # price and presume the worst case of the | ||||
|                 # exact-same-cost-to-exit this transaction's worth | ||||
|                 # of size even though in reality it will be dynamic | ||||
|                 # based on exit strategy, price, liquidity, etc.. | ||||
|                 virt_cost: float = txn_cost | ||||
| 
 | ||||
|                 # cpu: float = cost / size | ||||
|                 # cummean of the cost-per-unit used for modelling | ||||
|                 # a projected future exit cost which we immediately | ||||
|                 # include in the costs incorporated to BEP on enters | ||||
|                 last_cum_costs_size, last_cpu = virt_costs | ||||
|                 cum_costs_size: float = last_cum_costs_size + abs(size) | ||||
|                 cumcpu = ( | ||||
|                     (last_cpu * last_cum_costs_size) | ||||
|                     + | ||||
|                     txn_cost | ||||
|                 ) / cum_costs_size | ||||
|                 virt_costs = [cum_costs_size, cumcpu] | ||||
| 
 | ||||
|                 txn_cost = txn_cost + virt_cost | ||||
|                 # df[i, 'virt_cost'] = f'{-virt_cost} FROM {cumcpu}@{cum_costs_size}' | ||||
| 
 | ||||
|                 # a cumulative mean of the price-per-unit acquired | ||||
|                 # in the destination asset: | ||||
|                 # https://en.wikipedia.org/wiki/Moving_average#Cumulative_average | ||||
|                 # You could also think of this measure more | ||||
|                 # generally as an exponential mean with `alpha | ||||
|                 # = 1/N` where `N` is the current number of txns | ||||
|                 # included in the "position" defining set: | ||||
|                 # https://en.wikipedia.org/wiki/Exponential_smoothing | ||||
|                 ppu: float = ( | ||||
|                     ( | ||||
|                         (last_ppu * last_cumsize) | ||||
|                         + | ||||
|                         (price * size) | ||||
|                     ) / | ||||
|                     cumsize | ||||
|                 ) | ||||
| 
 | ||||
|             # a "position size DECREASING" or EXIT transaction | ||||
|             # which "makes smaller" the trade's side-size of the | ||||
|             # destination asset: | ||||
|             # - selling previously bought units of the dst asset | ||||
|             #   (aka 'closing' a long position). | ||||
|             # - buying previously borrowed and sold (short) units | ||||
|             #   of the dst asset (aka 'covering'/'closing' a short | ||||
|             #   position). | ||||
|             else: | ||||
|                 # only changes on position size increasing txns | ||||
|                 ppu: float = last_ppu | ||||
| 
 | ||||
|                 # UNWIND IMPLIED COSTS FROM ENTRIES | ||||
|                 # => Reverse the virtual/modelled (2x predicted) txn | ||||
|                 # cost that was included in the least-recently | ||||
|                 # entered txn that is still part of the current CSi | ||||
|                 # set. | ||||
|                 # => we look up the cost-per-unit cumsum and apply | ||||
|                 # if over the current txn size (by multiplication) | ||||
|                 # and then reverse that previusly applied cost on | ||||
|                 # the txn_cost for this record. | ||||
|                 # | ||||
|                 # NOTE: current "model" is just to previously assumed 2x | ||||
|                 # the txn cost for a matching enter-txn's | ||||
|                 # cost-per-unit; we then immediately reverse this | ||||
|                 # prediction and apply the real cost received here. | ||||
|                 last_cum_costs_size, last_cpu = virt_costs | ||||
|                 prev_virt_cost: float = last_cpu * abs(size) | ||||
|                 txn_cost: float = txn_cost - prev_virt_cost  # +ve thus a "reversal" | ||||
|                 cum_costs_size: float = last_cum_costs_size - abs(size) | ||||
|                 virt_costs = [cum_costs_size, last_cpu] | ||||
| 
 | ||||
|                 # df[i, 'virt_cost'] = ( | ||||
|                 #     f'{-prev_virt_cost} FROM {last_cpu}@{cum_costs_size}' | ||||
|                 # ) | ||||
| 
 | ||||
|                 # the per-txn profit or loss (PnL) given we are | ||||
|                 # (partially) "closing"/"exiting" the position via | ||||
|                 # this txn. | ||||
|                 pnl: float = (last_ppu - price) * size | ||||
| 
 | ||||
|             # always subtract txn cost from total txn pnl | ||||
|             txn_pnl: float = pnl - txn_cost | ||||
| 
 | ||||
|             # cumulative PnLs per txn | ||||
|             last_ledger_pnl = ( | ||||
|                 last_ledger_pnl + txn_pnl | ||||
|             ) | ||||
|             last_pos_pnl = df[i, 'cum_pos_pnl'] = ( | ||||
|                 last_pos_pnl + txn_pnl | ||||
|             ) | ||||
| 
 | ||||
|             if cumsize == 0: | ||||
|                 last_ppu = ppu = 0 | ||||
| 
 | ||||
|             # compute the BEP: "break even price", a value that | ||||
|             # determines at what price the remaining cumsize can be | ||||
|             # liquidated such that the net-PnL on the current | ||||
|             # position will result in ZERO gain or loss from open | ||||
|             # to close including all txn costs B) | ||||
|             if ( | ||||
|                 abs(cumsize) > 0  # non-exit-to-zero position txn | ||||
|             ): | ||||
|                 cumsize_sign: float = copysign(1, cumsize) | ||||
|                 ledger_bep: float = ( | ||||
|                     ( | ||||
|                         (ppu * cumsize) | ||||
|                         - | ||||
|                         (last_ledger_pnl * cumsize_sign) | ||||
|                     ) / cumsize | ||||
|                 ) | ||||
| 
 | ||||
|                 # NOTE: when we "enter more" dst asset units (aka | ||||
|                 # increase position state) AFTER having exited some | ||||
|                 # units (aka decreasing the pos size some) the bep | ||||
|                 # needs to be RECOMPUTED based on new ppu such that | ||||
|                 # liquidation of the cumsize at the bep price | ||||
|                 # results in a zero-pnl for the existing position | ||||
|                 # (since the last one). | ||||
|                 # for position lifetime BEP we never can have | ||||
|                 # a valid value once the position is "closed" | ||||
|                 # / full exitted Bo | ||||
|                 pos_bep: float = ( | ||||
|                     ( | ||||
|                         (ppu * cumsize) | ||||
|                         - | ||||
|                         (last_pos_pnl * cumsize_sign) | ||||
|                     ) / cumsize | ||||
|                 ) | ||||
| 
 | ||||
|             # inject DF row with all values | ||||
|             df[i, 'pos_ppu'] = ppu | ||||
|             df[i, 'per_txn_pnl'] = txn_pnl | ||||
|             df[i, 'applied_cost'] = -txn_cost | ||||
|             df[i, 'cum_pos_pnl'] = last_pos_pnl | ||||
|             df[i, 'pos_bep'] = pos_bep | ||||
|             df[i, 'cum_ledger_pnl'] = last_ledger_pnl | ||||
|             df[i, 'ledger_bep'] = ledger_bep | ||||
| 
 | ||||
|             # keep backrefs to suffice reccurence relation | ||||
|             last_ppu: float = ppu | ||||
|             last_cumsize: float = cumsize | ||||
| 
 | ||||
|     # TODO?: pass back the current `Position` object loaded from | ||||
|     # the account as well? Would provide incentive to do all | ||||
|     # this ledger loading inside a new async open_account(). | ||||
|     # bs_mktid: str = df[0]['bs_mktid'] | ||||
|     # pos: Position = acnt.pps[bs_mktid] | ||||
| 
 | ||||
|     return dfs | ||||
|  | @ -1,311 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| CLI front end for trades ledger and position tracking management. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from pprint import pformat | ||||
| 
 | ||||
| 
 | ||||
| from rich.console import Console | ||||
| from rich.markdown import Markdown | ||||
| import polars as pl | ||||
| import tractor | ||||
| import trio | ||||
| import typer | ||||
| 
 | ||||
| from ..log import get_logger | ||||
| from ..service import ( | ||||
|     open_piker_runtime, | ||||
| ) | ||||
| from ..clearing._messages import BrokerdPosition | ||||
| from ..calc import humanize | ||||
| from ..brokers._daemon import broker_init | ||||
| from ._ledger import ( | ||||
|     load_ledger, | ||||
|     TransactionLedger, | ||||
|     # open_trade_ledger, | ||||
| ) | ||||
| from .calc import ( | ||||
|     open_ledger_dfs, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| ledger = typer.Typer() | ||||
| 
 | ||||
| 
 | ||||
| def unpack_fqan( | ||||
|     fully_qualified_account_name: str, | ||||
|     console: Console | None = None, | ||||
| ) -> tuple | bool: | ||||
|     try: | ||||
|         brokername, account = fully_qualified_account_name.split('.') | ||||
|         return brokername, account | ||||
|     except ValueError: | ||||
|         if console is not None: | ||||
|             md = Markdown( | ||||
|                 f'=> `{fully_qualified_account_name}` <=\n\n' | ||||
|                 'is not a valid ' | ||||
|                 '__fully qualified account name?__\n\n' | ||||
|                 'Your account name needs to be of the form ' | ||||
|                 '`<brokername>.<account_name>`\n' | ||||
|             ) | ||||
|             console.print(md) | ||||
|         return False | ||||
| 
 | ||||
| 
 | ||||
| @ledger.command() | ||||
| def sync( | ||||
|     fully_qualified_account_name: str, | ||||
|     pdb: bool = False, | ||||
| 
 | ||||
|     loglevel: str = typer.Option( | ||||
|         'error', | ||||
|         "-l", | ||||
|     ), | ||||
| ): | ||||
|     log = get_logger(loglevel) | ||||
|     console = Console() | ||||
| 
 | ||||
|     pair: tuple[str, str] | ||||
|     if not (pair := unpack_fqan( | ||||
|         fully_qualified_account_name, | ||||
|         console, | ||||
|     )): | ||||
|         return | ||||
| 
 | ||||
|     brokername, account = pair | ||||
| 
 | ||||
|     brokermod, start_kwargs, deamon_ep = broker_init( | ||||
|         brokername, | ||||
|         loglevel=loglevel, | ||||
|     ) | ||||
|     brokername: str = brokermod.name | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         async with ( | ||||
|             open_piker_runtime( | ||||
|                 name='ledger_cli', | ||||
|                 loglevel=loglevel, | ||||
|                 debug_mode=pdb, | ||||
| 
 | ||||
|             ) as (actor, sockaddr), | ||||
| 
 | ||||
|             tractor.open_nursery() as an, | ||||
|         ): | ||||
|             try: | ||||
|                 log.info( | ||||
|                     f'Piker runtime up as {actor.uid}@{sockaddr}' | ||||
|                 ) | ||||
| 
 | ||||
|                 portal = await an.start_actor( | ||||
|                     loglevel=loglevel, | ||||
|                     debug_mode=pdb, | ||||
|                     **start_kwargs, | ||||
|                 ) | ||||
| 
 | ||||
|                 from ..clearing import ( | ||||
|                     open_brokerd_dialog, | ||||
|                 ) | ||||
|                 brokerd_stream: tractor.MsgStream | ||||
| 
 | ||||
|                 async with ( | ||||
|                     # engage the brokerd daemon context | ||||
|                     portal.open_context( | ||||
|                         deamon_ep, | ||||
|                         brokername=brokername, | ||||
|                         loglevel=loglevel, | ||||
|                     ), | ||||
| 
 | ||||
|                     # manually open the brokerd trade dialog EP | ||||
|                     # (what the EMS normally does internall) B) | ||||
|                     open_brokerd_dialog( | ||||
|                         brokermod, | ||||
|                         portal, | ||||
|                         exec_mode=( | ||||
|                             'paper' | ||||
|                             if account == 'paper' | ||||
|                             else 'live' | ||||
|                         ), | ||||
|                         loglevel=loglevel, | ||||
|                     ) as ( | ||||
|                         brokerd_stream, | ||||
|                         pp_msg_table, | ||||
|                         accounts, | ||||
|                     ), | ||||
|                 ): | ||||
|                     try: | ||||
|                         assert len(accounts) == 1 | ||||
|                         if not pp_msg_table: | ||||
|                             ld, fpath = load_ledger(brokername, account) | ||||
|                             assert not ld, f'WTF did we fail to parse ledger:\n{ld}' | ||||
| 
 | ||||
|                             console.print( | ||||
|                                 '[yellow]' | ||||
|                                 'No pps found for ' | ||||
|                                 f'`{brokername}.{account}` ' | ||||
|                                 'account!\n\n' | ||||
|                                 '[/][underline]' | ||||
|                                 'None of the following ledger files exist:\n\n[/]' | ||||
|                                 f'{fpath.as_uri()}\n' | ||||
|                             ) | ||||
|                             return | ||||
| 
 | ||||
|                         pps_by_symbol: dict[str, BrokerdPosition] = pp_msg_table[ | ||||
|                             brokername, | ||||
|                             account, | ||||
|                         ] | ||||
| 
 | ||||
|                         summary: str = ( | ||||
|                             '[dim underline]Piker Position Summary[/] ' | ||||
|                             f'[dim blue underline]{brokername}[/]' | ||||
|                             '[dim].[/]' | ||||
|                             f'[blue underline]{account}[/]' | ||||
|                             f'[dim underline] -> total pps: [/]' | ||||
|                             f'[green]{len(pps_by_symbol)}[/]\n' | ||||
|                         ) | ||||
|                         # for ppdict in positions: | ||||
|                         for fqme, ppmsg in pps_by_symbol.items(): | ||||
|                             # ppmsg = BrokerdPosition(**ppdict) | ||||
|                             size = ppmsg.size | ||||
|                             if size: | ||||
|                                 ppu: float = round( | ||||
|                                     ppmsg.avg_price, | ||||
|                                     ndigits=2, | ||||
|                                 ) | ||||
|                                 cost_basis: str = humanize(size * ppu) | ||||
|                                 h_size: str = humanize(size) | ||||
| 
 | ||||
|                                 if size < 0: | ||||
|                                     pcolor = 'red' | ||||
|                                 else: | ||||
|                                     pcolor = 'green' | ||||
| 
 | ||||
|                                 # sematic-highlight of fqme | ||||
|                                 fqme = ppmsg.symbol | ||||
|                                 tokens = fqme.split('.') | ||||
|                                 styled_fqme = f'[blue underline]{tokens[0]}[/]' | ||||
|                                 for tok in tokens[1:]: | ||||
|                                     styled_fqme += '[dim].[/]' | ||||
|                                     styled_fqme += f'[dim blue underline]{tok}[/]' | ||||
| 
 | ||||
|                                 # TODO: instead display in a ``rich.Table``? | ||||
|                                 summary += ( | ||||
|                                     styled_fqme + | ||||
|                                     '[dim]: [/]' | ||||
|                                     f'[{pcolor}]{h_size}[/]' | ||||
|                                     '[dim blue]u @[/]' | ||||
|                                     f'[{pcolor}]{ppu}[/]' | ||||
|                                     '[dim blue] = [/]' | ||||
|                                     f'[{pcolor}]$ {cost_basis}\n[/]' | ||||
|                                 ) | ||||
| 
 | ||||
|                         console.print(summary) | ||||
| 
 | ||||
|                     finally: | ||||
|                         # exit via ctx cancellation. | ||||
|                         brokerd_ctx: tractor.Context = brokerd_stream._ctx | ||||
|                         await brokerd_ctx.cancel(timeout=1) | ||||
| 
 | ||||
|                     # TODO: once ported to newer tractor branch we should | ||||
|                     # be able to do a loop like this: | ||||
|                     # while brokerd_ctx.cancel_called_remote is None: | ||||
|                     #     await trio.sleep(0.01) | ||||
|                     #     await brokerd_ctx.cancel() | ||||
| 
 | ||||
|             finally: | ||||
|                 await portal.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @ledger.command() | ||||
| def disect( | ||||
|     # "fully_qualified_account_name" | ||||
|     fqan: str, | ||||
|     fqme: str,  # for ib | ||||
| 
 | ||||
|     # TODO: in tractor we should really have | ||||
|     # a debug_mode ctx for wrapping any kind of code no? | ||||
|     pdb: bool = False, | ||||
|     bs_mktid: str = typer.Option( | ||||
|         None, | ||||
|         "-bid", | ||||
|     ), | ||||
|     loglevel: str = typer.Option( | ||||
|         'error', | ||||
|         "-l", | ||||
|     ), | ||||
| ): | ||||
|     from piker.log import get_console_log | ||||
|     from piker.toolz import open_crash_handler | ||||
|     get_console_log(loglevel) | ||||
| 
 | ||||
|     pair: tuple[str, str] | ||||
|     if not (pair := unpack_fqan(fqan)): | ||||
|         raise ValueError('{fqan} malformed!?') | ||||
| 
 | ||||
|     brokername, account = pair | ||||
| 
 | ||||
|     # ledger dfs groupby-partitioned by fqme | ||||
|     dfs: dict[str, pl.DataFrame] | ||||
|     # actual ledger instance | ||||
|     ldgr: TransactionLedger | ||||
| 
 | ||||
|     pl.Config.set_tbl_cols(-1) | ||||
|     pl.Config.set_tbl_rows(-1) | ||||
|     with ( | ||||
|         open_crash_handler(), | ||||
|         open_ledger_dfs( | ||||
|             brokername, | ||||
|             account, | ||||
|         ) as (dfs, ldgr), | ||||
|     ): | ||||
| 
 | ||||
|         # look up specific frame for fqme-selected asset | ||||
|         if (df := dfs.get(fqme)) is None: | ||||
|             mktids2fqmes: dict[str, list[str]] = {} | ||||
|             for bs_mktid in dfs: | ||||
|                 df: pl.DataFrame = dfs[bs_mktid] | ||||
|                 fqmes: pl.Series[str] = df['fqme'] | ||||
|                 uniques: list[str] = fqmes.unique() | ||||
|                 mktids2fqmes[bs_mktid] = set(uniques) | ||||
|                 if fqme in uniques: | ||||
|                     break | ||||
|             print( | ||||
|                 f'No specific ledger for fqme={fqme} could be found in\n' | ||||
|                 f'{pformat(mktids2fqmes)}?\n' | ||||
|                 f'Maybe the `{brokername}` backend uses something ' | ||||
|                 'else for its `bs_mktid` then the `fqme`?\n' | ||||
|                 'Scanning for matches in unique fqmes per frame..\n' | ||||
|             ) | ||||
| 
 | ||||
|         # :pray: | ||||
|         assert not df.is_empty() | ||||
| 
 | ||||
|         # muck around in pdbp REPL | ||||
|         breakpoint() | ||||
| 
 | ||||
|         # TODO: we REALLY need a better console REPL for this | ||||
|         # kinda thing.. | ||||
|         # - `xonsh` is an obvious option (and it looks amazin) but | ||||
|         # we need to figure out how to embed it better then just: | ||||
|         # from xonsh.main import main | ||||
|         # main(argv=[]) | ||||
|         # which will not actually inject the `df` to globals? | ||||
|  | @ -17,95 +17,33 @@ | |||
| """ | ||||
| Broker clients, daemons and general back end machinery. | ||||
| """ | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from importlib import import_module | ||||
| from types import ModuleType | ||||
| 
 | ||||
| from tractor.trionics import maybe_open_context | ||||
| # TODO: move to urllib3/requests once supported | ||||
| import asks | ||||
| asks.init('trio') | ||||
| 
 | ||||
| from ._util import ( | ||||
|     log, | ||||
|     BrokerError, | ||||
|     SymbolNotFound, | ||||
|     NoData, | ||||
|     DataUnavailable, | ||||
|     DataThrottle, | ||||
|     resproc, | ||||
|     get_logger, | ||||
| ) | ||||
| 
 | ||||
| __all__: list[str] = [ | ||||
|     'BrokerError', | ||||
|     'SymbolNotFound', | ||||
|     'NoData', | ||||
|     'DataUnavailable', | ||||
|     'DataThrottle', | ||||
|     'resproc', | ||||
|     'get_logger', | ||||
| ] | ||||
| 
 | ||||
| __brokers__: list[str] = [ | ||||
| __brokers__ = [ | ||||
|     'binance', | ||||
|     'questrade', | ||||
|     'robinhood', | ||||
|     'ib', | ||||
|     'kraken', | ||||
|     'kucoin', | ||||
| 
 | ||||
|     # broken but used to work | ||||
|     # 'questrade', | ||||
|     # 'robinhood', | ||||
| 
 | ||||
|     # TODO: we should get on these stat! | ||||
|     # alpaca | ||||
|     # wstrade | ||||
|     # iex | ||||
| 
 | ||||
|     # deribit | ||||
|     # bitso | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| def get_brokermod(brokername: str) -> ModuleType: | ||||
|     ''' | ||||
|     Return the imported broker module by name. | ||||
| 
 | ||||
|     ''' | ||||
|     module: ModuleType = import_module('.' + brokername, 'piker.brokers') | ||||
|     """Return the imported broker module by name. | ||||
|     """ | ||||
|     module = import_module('.' + brokername, 'piker.brokers') | ||||
|     # we only allow monkeying because it's for internal keying | ||||
|     module.name = module.__name__.split('.')[-1] | ||||
|     module.name =  module.__name__.split('.')[-1] | ||||
|     return module | ||||
| 
 | ||||
| 
 | ||||
| def iter_brokermods(): | ||||
|     ''' | ||||
|     Iterate all built-in broker modules. | ||||
| 
 | ||||
|     ''' | ||||
|     """Iterate all built-in broker modules. | ||||
|     """ | ||||
|     for name in __brokers__: | ||||
|         yield get_brokermod(name) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_cached_client( | ||||
|     brokername: str, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> 'Client':  # noqa | ||||
|     ''' | ||||
|     Get a cached broker client from the current actor's local vars. | ||||
| 
 | ||||
|     If one has not been setup do it and cache it. | ||||
| 
 | ||||
|     ''' | ||||
|     brokermod = get_brokermod(brokername) | ||||
|     async with maybe_open_context( | ||||
|         acm_func=brokermod.get_client, | ||||
|         kwargs=kwargs, | ||||
| 
 | ||||
|     ) as (cache_hit, client): | ||||
| 
 | ||||
|         if cache_hit: | ||||
|             log.runtime(f'Reusing existing {client}') | ||||
| 
 | ||||
|         yield client | ||||
|  |  | |||
|  | @ -1,276 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Broker-daemon-actor "endpoint-hooks": the service task entry points for | ||||
| ``brokerd``. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
|     AsyncContextManager, | ||||
| ) | ||||
| import exceptiongroup as eg | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| from . import _util | ||||
| from . import get_brokermod | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ..data import _FeedsBus | ||||
| 
 | ||||
| # `brokerd` enabled modules | ||||
| # TODO: move this def to the `.data` subpkg.. | ||||
| # NOTE: keeping this list as small as possible is part of our caps-sec | ||||
| # model and should be treated with utmost care! | ||||
| _data_mods: str = [ | ||||
|     'piker.brokers.core', | ||||
|     'piker.brokers.data', | ||||
|     'piker.brokers._daemon', | ||||
|     'piker.data', | ||||
|     'piker.data.feed', | ||||
|     'piker.data._sampling' | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| # TODO: we should rename the daemon to datad prolly once we split up | ||||
| # broker vs. data tasks into separate actors? | ||||
| @tractor.context | ||||
| async def _setup_persistent_brokerd( | ||||
|     ctx: tractor.Context, | ||||
|     brokername: str, | ||||
|     loglevel: str | None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Allocate a actor-wide service nursery in ``brokerd`` | ||||
|     such that feeds can be run in the background persistently by | ||||
|     the broker backend as needed. | ||||
| 
 | ||||
|     ''' | ||||
|     # NOTE: we only need to setup logging once (and only) here | ||||
|     # since all hosted daemon tasks will reference this same | ||||
|     # log instance's (actor local) state and thus don't require | ||||
|     # any further (level) configuration on their own B) | ||||
|     log = _util.get_console_log( | ||||
|         loglevel or tractor.current_actor().loglevel, | ||||
|         name=f'{_util.subsys}.{brokername}', | ||||
|     ) | ||||
| 
 | ||||
|     # set global for this actor to this new process-wide instance B) | ||||
|     _util.log = log | ||||
| 
 | ||||
|     # further, set the log level on any broker broker specific | ||||
|     # logger instance. | ||||
| 
 | ||||
|     from piker.data import feed | ||||
|     assert not feed._bus | ||||
| 
 | ||||
|     # allocate a nursery to the bus for spawning background | ||||
|     # tasks to service client IPC requests, normally | ||||
|     # `tractor.Context` connections to explicitly required | ||||
|     # `brokerd` endpoints such as: | ||||
|     # - `stream_quotes()`, | ||||
|     # - `manage_history()`, | ||||
|     # - `allocate_persistent_feed()`, | ||||
|     # - `open_symbol_search()` | ||||
|     # NOTE: see ep invocation details inside `.data.feed`. | ||||
|     try: | ||||
|         async with trio.open_nursery() as service_nursery: | ||||
|             bus: _FeedsBus = feed.get_feed_bus( | ||||
|                 brokername, | ||||
|                 service_nursery, | ||||
|             ) | ||||
|             assert bus is feed._bus | ||||
| 
 | ||||
|             # unblock caller | ||||
|             await ctx.started() | ||||
| 
 | ||||
|             # we pin this task to keep the feeds manager active until the | ||||
|             # parent actor decides to tear it down | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|     except eg.ExceptionGroup: | ||||
|         # TODO: likely some underlying `brokerd` IPC connection | ||||
|         # broke so here we handle a respawn and re-connect attempt! | ||||
|         # This likely should pair with development of the OCO task | ||||
|         # nusery in dev over @ `tractor` B) | ||||
|         # https://github.com/goodboy/tractor/pull/363 | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| def broker_init( | ||||
|     brokername: str, | ||||
|     loglevel: str | None = None, | ||||
| 
 | ||||
|     **start_actor_kwargs, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     ModuleType, | ||||
|     dict, | ||||
|     AsyncContextManager, | ||||
| ]: | ||||
|     ''' | ||||
|     Given an input broker name, load all named arguments | ||||
|     which can be passed for daemon endpoint + context spawn | ||||
|     as required in every `brokerd` (actor) service. | ||||
| 
 | ||||
|     This includes: | ||||
|     - load the appropriate <brokername>.py pkg module, | ||||
|     - reads any declared `__enable_modules__: listr[str]` which will be | ||||
|       passed to `tractor.ActorNursery.start_actor(enabled_modules=<this>)` | ||||
|       at actor start time, | ||||
|     - deliver a references to the daemon lifetime fixture, which | ||||
|       for now is always the `_setup_persistent_brokerd()` context defined | ||||
|       above. | ||||
| 
 | ||||
|     ''' | ||||
|     from ..brokers import get_brokermod | ||||
|     brokermod = get_brokermod(brokername) | ||||
|     modpath: str = brokermod.__name__ | ||||
| 
 | ||||
|     start_actor_kwargs['name'] = f'brokerd.{brokername}' | ||||
|     start_actor_kwargs.update( | ||||
|         getattr( | ||||
|             brokermod, | ||||
|             '_spawn_kwargs', | ||||
|             {}, | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
|     # XXX TODO: make this not so hacky/monkeypatched.. | ||||
|     # -> we need a sane way to configure the logging level for all | ||||
|     # code running in brokerd. | ||||
|     # if utilmod := getattr(brokermod, '_util', False): | ||||
|     #     utilmod.log.setLevel(loglevel.upper()) | ||||
| 
 | ||||
|     # lookup actor-enabled modules declared by the backend offering the | ||||
|     # `brokerd` endpoint(s). | ||||
|     enabled: list[str] | ||||
|     enabled = start_actor_kwargs['enable_modules'] = [ | ||||
|         __name__,  # so that eps from THIS mod can be invoked | ||||
|         modpath, | ||||
|     ] | ||||
|     for submodname in getattr( | ||||
|         brokermod, | ||||
|         '__enable_modules__', | ||||
|         [], | ||||
|     ): | ||||
|         subpath: str = f'{modpath}.{submodname}' | ||||
|         enabled.append(subpath) | ||||
| 
 | ||||
|     return ( | ||||
|         brokermod, | ||||
|         start_actor_kwargs,  # to `ActorNursery.start_actor()` | ||||
| 
 | ||||
|         # XXX see impl above; contains all (actor global) | ||||
|         # setup/teardown expected in all `brokerd` actor instances. | ||||
|         _setup_persistent_brokerd, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| async def spawn_brokerd( | ||||
| 
 | ||||
|     brokername: str, | ||||
|     loglevel: str | None = None, | ||||
| 
 | ||||
|     **tractor_kwargs, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     from piker.service._util import log  # use service mngr log | ||||
|     log.info(f'Spawning {brokername} broker daemon') | ||||
| 
 | ||||
|     ( | ||||
|         brokermode, | ||||
|         tractor_kwargs, | ||||
|         daemon_fixture_ep, | ||||
|     ) = broker_init( | ||||
|         brokername, | ||||
|         loglevel, | ||||
|         **tractor_kwargs, | ||||
|     ) | ||||
| 
 | ||||
|     brokermod = get_brokermod(brokername) | ||||
|     extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {}) | ||||
|     tractor_kwargs.update(extra_tractor_kwargs) | ||||
| 
 | ||||
|     # ask `pikerd` to spawn a new sub-actor and manage it under its | ||||
|     # actor nursery | ||||
|     from piker.service import Services | ||||
| 
 | ||||
|     dname: str = tractor_kwargs.pop('name')  # f'brokerd.{brokername}' | ||||
|     portal = await Services.actor_n.start_actor( | ||||
|         dname, | ||||
|         enable_modules=_data_mods + tractor_kwargs.pop('enable_modules'), | ||||
|         debug_mode=Services.debug_mode, | ||||
|         **tractor_kwargs | ||||
|     ) | ||||
| 
 | ||||
|     # NOTE: the service mngr expects an already spawned actor + its | ||||
|     # portal ref in order to do non-blocking setup of brokerd | ||||
|     # service nursery. | ||||
|     await Services.start_service_task( | ||||
|         dname, | ||||
|         portal, | ||||
| 
 | ||||
|         # signature of target root-task endpoint | ||||
|         daemon_fixture_ep, | ||||
|         brokername=brokername, | ||||
|         loglevel=loglevel, | ||||
|     ) | ||||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_spawn_brokerd( | ||||
| 
 | ||||
|     brokername: str, | ||||
|     loglevel: str | None = None, | ||||
| 
 | ||||
|     **pikerd_kwargs, | ||||
| 
 | ||||
| ) -> tractor.Portal: | ||||
|     ''' | ||||
|     Helper to spawn a brokerd service *from* a client who wishes to | ||||
|     use the sub-actor-daemon but is fine with re-using any existing | ||||
|     and contactable `brokerd`. | ||||
| 
 | ||||
|     Mas o menos, acts as a cached-actor-getter factory. | ||||
| 
 | ||||
|     ''' | ||||
|     from piker.service import maybe_spawn_daemon | ||||
| 
 | ||||
|     async with maybe_spawn_daemon( | ||||
| 
 | ||||
|         f'brokerd.{brokername}', | ||||
|         service_task_target=spawn_brokerd, | ||||
|         spawn_args={ | ||||
|             'brokername': brokername, | ||||
|         }, | ||||
|         loglevel=loglevel, | ||||
| 
 | ||||
|         **pikerd_kwargs, | ||||
| 
 | ||||
|     ) as portal: | ||||
|         yield portal | ||||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers) | ||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -15,32 +15,13 @@ | |||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Handy cross-broker utils. | ||||
| 
 | ||||
| Handy utils. | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from functools import partial | ||||
| 
 | ||||
| import json | ||||
| import httpx | ||||
| import asks | ||||
| import logging | ||||
| 
 | ||||
| from ..log import ( | ||||
|     get_logger, | ||||
|     get_console_log, | ||||
|     colorize_json, | ||||
| ) | ||||
| subsys: str = 'piker.brokers' | ||||
| 
 | ||||
| # NOTE: level should be reset by any actor that is spawned | ||||
| # as well as given a (more) explicit name/key such | ||||
| # as `piker.brokers.binance` matching the subpkg. | ||||
| log = get_logger(subsys) | ||||
| 
 | ||||
| get_console_log = partial( | ||||
|     get_console_log, | ||||
|     name=subsys, | ||||
| ) | ||||
| from ..log import colorize_json | ||||
| 
 | ||||
| 
 | ||||
| class BrokerError(Exception): | ||||
|  | @ -51,7 +32,6 @@ class SymbolNotFound(BrokerError): | |||
|     "Symbol not found by broker search" | ||||
| 
 | ||||
| 
 | ||||
| # TODO: these should probably be moved to `.tsp/.data`? | ||||
| class NoData(BrokerError): | ||||
|     ''' | ||||
|     Symbol data not permitted or no data | ||||
|  | @ -61,15 +41,14 @@ class NoData(BrokerError): | |||
|     def __init__( | ||||
|         self, | ||||
|         *args, | ||||
|         info: dict|None = None, | ||||
|         frame_size: int = 1000, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         super().__init__(*args) | ||||
|         self.info: dict|None = info | ||||
| 
 | ||||
|         # when raised, machinery can check if the backend | ||||
|         # set a "frame size" for doing datetime calcs. | ||||
|         # self.frame_size: int = 1000 | ||||
|         self.frame_size: int = 1000 | ||||
| 
 | ||||
| 
 | ||||
| class DataUnavailable(BrokerError): | ||||
|  | @ -90,19 +69,18 @@ class DataThrottle(BrokerError): | |||
|     # TODO: add in throttle metrics/feedback | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| def resproc( | ||||
|     resp: httpx.Response, | ||||
|     resp: asks.response_objects.Response, | ||||
|     log: logging.Logger, | ||||
|     return_json: bool = True, | ||||
|     log_resp: bool = False, | ||||
| 
 | ||||
| ) -> httpx.Response: | ||||
|     ''' | ||||
|     Process response and return its json content. | ||||
| ) -> asks.response_objects.Response: | ||||
|     """Process response and return its json content. | ||||
| 
 | ||||
|     Raise the appropriate error on non-200 OK responses. | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     if not resp.status_code == 200: | ||||
|         raise BrokerError(resp.body) | ||||
|     try: | ||||
|  |  | |||
|  | @ -0,0 +1,570 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Binance backend | ||||
| 
 | ||||
| """ | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from datetime import datetime | ||||
| from typing import ( | ||||
|     Any, Union, Optional, | ||||
|     AsyncGenerator, Callable, | ||||
| ) | ||||
| import time | ||||
| 
 | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import pendulum | ||||
| import asks | ||||
| from fuzzywuzzy import process as fuzzy | ||||
| import numpy as np | ||||
| import tractor | ||||
| from pydantic.dataclasses import dataclass | ||||
| import wsproto | ||||
| 
 | ||||
| from .._cacheables import open_cached_client | ||||
| from ._util import resproc, SymbolNotFound | ||||
| from ..log import get_logger, get_console_log | ||||
| from ..data import ShmArray | ||||
| from ..data.types import Struct | ||||
| from ..data._web_bs import open_autorecon_ws, NoBsWs | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| _url = 'https://api.binance.com' | ||||
| 
 | ||||
| 
 | ||||
| # Broker specific ohlc schema (rest) | ||||
| _ohlc_dtype = [ | ||||
|     ('index', int), | ||||
|     ('time', int), | ||||
|     ('open', float), | ||||
|     ('high', float), | ||||
|     ('low', float), | ||||
|     ('close', float), | ||||
|     ('volume', float), | ||||
|     ('bar_wap', float),  # will be zeroed by sampler if not filled | ||||
| 
 | ||||
|     # XXX: some additional fields are defined in the docs: | ||||
|     # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data | ||||
| 
 | ||||
|     # ('close_time', int), | ||||
|     # ('quote_vol', float), | ||||
|     # ('num_trades', int), | ||||
|     # ('buy_base_vol', float), | ||||
|     # ('buy_quote_vol', float), | ||||
|     # ('ignore', float), | ||||
| ] | ||||
| 
 | ||||
| # UI components allow this to be declared such that additional | ||||
| # (historical) fields can be exposed. | ||||
| ohlc_dtype = np.dtype(_ohlc_dtype) | ||||
| 
 | ||||
| _show_wap_in_history = False | ||||
| 
 | ||||
| 
 | ||||
| # https://binance-docs.github.io/apidocs/spot/en/#exchange-information | ||||
| class Pair(Struct, frozen=True): | ||||
|     symbol: str | ||||
|     status: str | ||||
| 
 | ||||
|     baseAsset: str | ||||
|     baseAssetPrecision: int | ||||
|     cancelReplaceAllowed: bool | ||||
|     allowTrailingStop: bool | ||||
|     quoteAsset: str | ||||
|     quotePrecision: int | ||||
|     quoteAssetPrecision: int | ||||
| 
 | ||||
|     baseCommissionPrecision: int | ||||
|     quoteCommissionPrecision: int | ||||
| 
 | ||||
|     orderTypes: list[str] | ||||
| 
 | ||||
|     icebergAllowed: bool | ||||
|     ocoAllowed: bool | ||||
|     quoteOrderQtyMarketAllowed: bool | ||||
|     isSpotTradingAllowed: bool | ||||
|     isMarginTradingAllowed: bool | ||||
| 
 | ||||
|     filters: list[dict[str, Union[str, int, float]]] | ||||
|     permissions: list[str] | ||||
| 
 | ||||
| 
 | ||||
| @dataclass | ||||
| class OHLC: | ||||
|     """Description of the flattened OHLC quote format. | ||||
| 
 | ||||
|     For schema details see: | ||||
|     https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams | ||||
| 
 | ||||
|     """ | ||||
|     time: int | ||||
| 
 | ||||
|     open: float | ||||
|     high: float | ||||
|     low: float | ||||
|     close: float | ||||
|     volume: float | ||||
| 
 | ||||
|     close_time: int | ||||
| 
 | ||||
|     quote_vol: float | ||||
|     num_trades: int | ||||
|     buy_base_vol: float | ||||
|     buy_quote_vol: float | ||||
|     ignore: int | ||||
| 
 | ||||
|     # null the place holder for `bar_wap` until we | ||||
|     # figure out what to extract for this. | ||||
|     bar_wap: float = 0.0 | ||||
| 
 | ||||
| 
 | ||||
| # convert datetime obj timestamp to unixtime in milliseconds | ||||
| def binance_timestamp(when): | ||||
|     return int((when.timestamp() * 1000) + (when.microsecond / 1000)) | ||||
| 
 | ||||
| 
 | ||||
| class Client: | ||||
| 
 | ||||
|     def __init__(self) -> None: | ||||
|         self._sesh = asks.Session(connections=4) | ||||
|         self._sesh.base_location = _url | ||||
|         self._pairs: dict[str, Any] = {} | ||||
| 
 | ||||
|     async def _api( | ||||
|         self, | ||||
|         method: str, | ||||
|         params: dict, | ||||
|     ) -> dict[str, Any]: | ||||
|         resp = await self._sesh.get( | ||||
|             path=f'/api/v3/{method}', | ||||
|             params=params, | ||||
|             timeout=float('inf') | ||||
|         ) | ||||
|         return resproc(resp, log) | ||||
| 
 | ||||
|     async def symbol_info( | ||||
| 
 | ||||
|         self, | ||||
|         sym: Optional[str] = None, | ||||
| 
 | ||||
|     ) -> dict[str, Any]: | ||||
|         '''Get symbol info for the exchange. | ||||
| 
 | ||||
|         ''' | ||||
|         # TODO: we can load from our self._pairs cache | ||||
|         # on repeat calls... | ||||
| 
 | ||||
|         # will retrieve all symbols by default | ||||
|         params = {} | ||||
| 
 | ||||
|         if sym is not None: | ||||
|             sym = sym.upper() | ||||
|             params = {'symbol': sym} | ||||
| 
 | ||||
|         resp = await self._api( | ||||
|             'exchangeInfo', | ||||
|             params=params, | ||||
|         ) | ||||
| 
 | ||||
|         entries = resp['symbols'] | ||||
|         if not entries: | ||||
|             raise SymbolNotFound(f'{sym} not found') | ||||
| 
 | ||||
|         syms = {item['symbol']: item for item in entries} | ||||
| 
 | ||||
|         if sym is not None: | ||||
|             return syms[sym] | ||||
|         else: | ||||
|             return syms | ||||
| 
 | ||||
|     async def cache_symbols( | ||||
|         self, | ||||
|     ) -> dict: | ||||
|         if not self._pairs: | ||||
|             self._pairs = await self.symbol_info() | ||||
| 
 | ||||
|         return self._pairs | ||||
| 
 | ||||
|     async def search_symbols( | ||||
|         self, | ||||
|         pattern: str, | ||||
|         limit: int = None, | ||||
|     ) -> dict[str, Any]: | ||||
|         if self._pairs is not None: | ||||
|             data = self._pairs | ||||
|         else: | ||||
|             data = await self.symbol_info() | ||||
| 
 | ||||
|         matches = fuzzy.extractBests( | ||||
|             pattern, | ||||
|             data, | ||||
|             score_cutoff=50, | ||||
|         ) | ||||
|         # repack in dict form | ||||
|         return {item[0]['symbol']: item[0] | ||||
|                 for item in matches} | ||||
| 
 | ||||
|     async def bars( | ||||
|         self, | ||||
|         symbol: str, | ||||
|         start_dt: Optional[datetime] = None, | ||||
|         end_dt: Optional[datetime] = None, | ||||
|         limit: int = 1000,  # <- max allowed per query | ||||
|         as_np: bool = True, | ||||
| 
 | ||||
|     ) -> dict: | ||||
| 
 | ||||
|         if end_dt is None: | ||||
|             end_dt = pendulum.now('UTC') | ||||
| 
 | ||||
|         if start_dt is None: | ||||
|             start_dt = end_dt.start_of( | ||||
|                 'minute').subtract(minutes=limit) | ||||
| 
 | ||||
|         start_time = binance_timestamp(start_dt) | ||||
|         end_time = binance_timestamp(end_dt) | ||||
| 
 | ||||
|         # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data | ||||
|         bars = await self._api( | ||||
|             'klines', | ||||
|             params={ | ||||
|                 'symbol': symbol.upper(), | ||||
|                 'interval': '1m', | ||||
|                 'startTime': start_time, | ||||
|                 'endTime': end_time, | ||||
|                 'limit': limit | ||||
|             } | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: pack this bars scheme into a ``pydantic`` validator type: | ||||
|         # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data | ||||
| 
 | ||||
|         # TODO: we should port this to ``pydantic`` to avoid doing | ||||
|         # manual validation ourselves.. | ||||
|         new_bars = [] | ||||
|         for i, bar in enumerate(bars): | ||||
| 
 | ||||
|             bar = OHLC(*bar) | ||||
| 
 | ||||
|             row = [] | ||||
|             for j, (name, ftype) in enumerate(_ohlc_dtype[1:]): | ||||
| 
 | ||||
|                 # TODO: maybe we should go nanoseconds on all | ||||
|                 # history time stamps? | ||||
|                 if name == 'time': | ||||
|                     # convert to epoch seconds: float | ||||
|                     row.append(bar.time / 1000.0) | ||||
| 
 | ||||
|                 else: | ||||
|                     row.append(getattr(bar, name)) | ||||
| 
 | ||||
|             new_bars.append((i,) + tuple(row)) | ||||
| 
 | ||||
|         array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars | ||||
|         return array | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_client() -> Client: | ||||
|     client = Client() | ||||
|     await client.cache_symbols() | ||||
|     yield client | ||||
| 
 | ||||
| 
 | ||||
| # validation type | ||||
| class AggTrade(Struct): | ||||
|     e: str  # Event type | ||||
|     E: int  # Event time | ||||
|     s: str  # Symbol | ||||
|     a: int  # Aggregate trade ID | ||||
|     p: float  # Price | ||||
|     q: float  # Quantity | ||||
|     f: int  # First trade ID | ||||
|     l: int  # Last trade ID | ||||
|     T: int  # Trade time | ||||
|     m: bool  # Is the buyer the market maker? | ||||
|     M: bool  # Ignore | ||||
| 
 | ||||
| 
 | ||||
| async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]: | ||||
| 
 | ||||
|     timeouts = 0 | ||||
|     while True: | ||||
| 
 | ||||
|         with trio.move_on_after(3) as cs: | ||||
|             msg = await ws.recv_msg() | ||||
| 
 | ||||
|         if cs.cancelled_caught: | ||||
| 
 | ||||
|             timeouts += 1 | ||||
|             if timeouts > 2: | ||||
|                 log.error("binance feed seems down and slow af? rebooting...") | ||||
|                 await ws._connect() | ||||
| 
 | ||||
|             continue | ||||
| 
 | ||||
|         # for l1 streams binance doesn't add an event type field so | ||||
|         # identify those messages by matching keys | ||||
|         # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams | ||||
| 
 | ||||
|         if msg.get('u'): | ||||
|             sym = msg['s'] | ||||
|             bid = float(msg['b']) | ||||
|             bsize = float(msg['B']) | ||||
|             ask = float(msg['a']) | ||||
|             asize = float(msg['A']) | ||||
| 
 | ||||
|             yield 'l1', { | ||||
|                 'symbol': sym, | ||||
|                 'ticks': [ | ||||
|                     {'type': 'bid', 'price': bid, 'size': bsize}, | ||||
|                     {'type': 'bsize', 'price': bid, 'size': bsize}, | ||||
|                     {'type': 'ask', 'price': ask, 'size': asize}, | ||||
|                     {'type': 'asize', 'price': ask, 'size': asize} | ||||
|                 ] | ||||
|             } | ||||
| 
 | ||||
|         elif msg.get('e') == 'aggTrade': | ||||
| 
 | ||||
|             # NOTE: this is purely for a definition, ``msgspec.Struct`` | ||||
|             # does not runtime-validate until you decode/encode. | ||||
|             # see: https://jcristharif.com/msgspec/structs.html#type-validation | ||||
|             msg = AggTrade(**msg) | ||||
| 
 | ||||
|             # TODO: type out and require this quote format | ||||
|             # from all backends! | ||||
|             yield 'trade', { | ||||
|                 'symbol': msg.s, | ||||
|                 'last': msg.p, | ||||
|                 'brokerd_ts': time.time(), | ||||
|                 'ticks': [{ | ||||
|                     'type': 'trade', | ||||
|                     'price': float(msg.p), | ||||
|                     'size': float(msg.q), | ||||
|                     'broker_ts': msg.T, | ||||
|                 }], | ||||
|             } | ||||
| 
 | ||||
| 
 | ||||
| def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: | ||||
|     """Create a request subscription packet dict. | ||||
| 
 | ||||
|     https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams | ||||
|     """ | ||||
|     return { | ||||
|         'method': 'SUBSCRIBE', | ||||
|         'params': [ | ||||
|             f'{pair.lower()}@{sub_name}' | ||||
|             for pair in pairs | ||||
|         ], | ||||
|         'id': uid | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_history_client( | ||||
|     symbol: str, | ||||
| 
 | ||||
| ) -> tuple[Callable, int]: | ||||
| 
 | ||||
|     # TODO implement history getter for the new storage layer. | ||||
|     async with open_cached_client('binance') as client: | ||||
| 
 | ||||
|         async def get_ohlc( | ||||
|             end_dt: Optional[datetime] = None, | ||||
|             start_dt: Optional[datetime] = None, | ||||
| 
 | ||||
|         ) -> tuple[ | ||||
|             np.ndarray, | ||||
|             datetime,  # start | ||||
|             datetime,  # end | ||||
|         ]: | ||||
| 
 | ||||
|             array = await client.bars( | ||||
|                 symbol, | ||||
|                 start_dt=start_dt, | ||||
|                 end_dt=end_dt, | ||||
|             ) | ||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) | ||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) | ||||
|             return array, start_dt, end_dt | ||||
| 
 | ||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} | ||||
| 
 | ||||
| 
 | ||||
| async def backfill_bars( | ||||
|     sym: str, | ||||
|     shm: ShmArray,  # type: ignore # noqa | ||||
|     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||
| ) -> None: | ||||
|     """Fill historical bars into shared mem / storage afap. | ||||
|     """ | ||||
|     with trio.CancelScope() as cs: | ||||
|         async with open_cached_client('binance') as client: | ||||
|             bars = await client.bars(symbol=sym) | ||||
|             shm.push(bars) | ||||
|             task_status.started(cs) | ||||
| 
 | ||||
| 
 | ||||
| async def stream_quotes( | ||||
| 
 | ||||
|     send_chan: trio.abc.SendChannel, | ||||
|     symbols: list[str], | ||||
|     feed_is_live: trio.Event, | ||||
|     loglevel: str = None, | ||||
| 
 | ||||
|     # startup sync | ||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging | ||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) | ||||
| 
 | ||||
|     sym_infos = {} | ||||
|     uid = 0 | ||||
| 
 | ||||
|     async with ( | ||||
|         open_cached_client('binance') as client, | ||||
|         send_chan as send_chan, | ||||
|     ): | ||||
| 
 | ||||
|         # keep client cached for real-time section | ||||
|         cache = await client.cache_symbols() | ||||
| 
 | ||||
|         for sym in symbols: | ||||
|             d = cache[sym.upper()] | ||||
|             syminfo = Pair(**d)  # validation | ||||
| 
 | ||||
|             si = sym_infos[sym] = syminfo.to_dict() | ||||
| 
 | ||||
|             # XXX: after manually inspecting the response format we | ||||
|             # just directly pick out the info we need | ||||
|             si['price_tick_size'] = float(syminfo.filters[0]['tickSize']) | ||||
|             si['lot_tick_size'] = float(syminfo.filters[2]['stepSize']) | ||||
|             si['asset_type'] = 'crypto' | ||||
| 
 | ||||
|         symbol = symbols[0] | ||||
| 
 | ||||
|         init_msgs = { | ||||
|             # pass back token, and bool, signalling if we're the writer | ||||
|             # and that history has been written | ||||
|             symbol: { | ||||
|                 'symbol_info': sym_infos[sym], | ||||
|                 'shm_write_opts': {'sum_tick_vml': False}, | ||||
|                 'fqsn': sym, | ||||
|             }, | ||||
|         } | ||||
| 
 | ||||
|         @acm | ||||
|         async def subscribe(ws: wsproto.WSConnection): | ||||
|             # setup subs | ||||
| 
 | ||||
|             # trade data (aka L1) | ||||
|             # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker | ||||
|             l1_sub = make_sub(symbols, 'bookTicker', uid) | ||||
|             await ws.send_msg(l1_sub) | ||||
| 
 | ||||
|             # aggregate (each order clear by taker **not** by maker) | ||||
|             # trades data: | ||||
|             # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams | ||||
|             agg_trades_sub = make_sub(symbols, 'aggTrade', uid) | ||||
|             await ws.send_msg(agg_trades_sub) | ||||
| 
 | ||||
|             # ack from ws server | ||||
|             res = await ws.recv_msg() | ||||
|             assert res['id'] == uid | ||||
| 
 | ||||
|             yield | ||||
| 
 | ||||
|             subs = [] | ||||
|             for sym in symbols: | ||||
|                 subs.append("{sym}@aggTrade") | ||||
|                 subs.append("{sym}@bookTicker") | ||||
| 
 | ||||
|             # unsub from all pairs on teardown | ||||
|             await ws.send_msg({ | ||||
|                 "method": "UNSUBSCRIBE", | ||||
|                 "params": subs, | ||||
|                 "id": uid, | ||||
|             }) | ||||
| 
 | ||||
|             # XXX: do we need to ack the unsub? | ||||
|             # await ws.recv_msg() | ||||
| 
 | ||||
|         async with open_autorecon_ws( | ||||
|             'wss://stream.binance.com/ws', | ||||
|             fixture=subscribe, | ||||
|         ) as ws: | ||||
| 
 | ||||
|             # pull a first quote and deliver | ||||
|             msg_gen = stream_messages(ws) | ||||
| 
 | ||||
|             typ, quote = await msg_gen.__anext__() | ||||
| 
 | ||||
|             while typ != 'trade': | ||||
|                 # TODO: use ``anext()`` when it lands in 3.10! | ||||
|                 typ, quote = await msg_gen.__anext__() | ||||
| 
 | ||||
|             task_status.started((init_msgs,  quote)) | ||||
| 
 | ||||
|             # signal to caller feed is ready for consumption | ||||
|             feed_is_live.set() | ||||
| 
 | ||||
|             # import time | ||||
|             # last = time.time() | ||||
| 
 | ||||
|             # start streaming | ||||
|             async for typ, msg in msg_gen: | ||||
| 
 | ||||
|                 # period = time.time() - last | ||||
|                 # hz = 1/period if period else float('inf') | ||||
|                 # if hz > 60: | ||||
|                 #     log.info(f'Binance quotez : {hz}') | ||||
| 
 | ||||
|                 topic = msg['symbol'].lower() | ||||
|                 await send_chan.send({topic: msg}) | ||||
|                 # last = time.time() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_symbol_search( | ||||
|     ctx: tractor.Context, | ||||
| ) -> Client: | ||||
|     async with open_cached_client('binance') as client: | ||||
| 
 | ||||
|         # load all symbols locally for fast search | ||||
|         cache = await client.cache_symbols() | ||||
|         await ctx.started() | ||||
| 
 | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             async for pattern in stream: | ||||
|                 # results = await client.symbol_info(sym=pattern.upper()) | ||||
| 
 | ||||
|                 matches = fuzzy.extractBests( | ||||
|                     pattern, | ||||
|                     cache, | ||||
|                     score_cutoff=50, | ||||
|                 ) | ||||
|                 # repack in dict form | ||||
|                 await stream.send( | ||||
|                     {item[0]['symbol']: item[0] | ||||
|                      for item in matches} | ||||
|                 ) | ||||
|  | @ -1,60 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) | ||||
| #   Guillermo Rodriguez (aka ze jefe) | ||||
| #   Tyler Goodlet | ||||
| #   (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| binancial secs on the floor, in the office, behind the dumpster. | ||||
| 
 | ||||
| """ | ||||
| from .api import ( | ||||
|     get_client, | ||||
| ) | ||||
| from .feed import ( | ||||
|     get_mkt_info, | ||||
|     open_history_client, | ||||
|     open_symbol_search, | ||||
|     stream_quotes, | ||||
| ) | ||||
| from .broker import ( | ||||
|     open_trade_dialog, | ||||
|     get_cost, | ||||
| ) | ||||
| from .venues import ( | ||||
|     SpotPair, | ||||
|     FutesPair, | ||||
| ) | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'get_client', | ||||
|     'get_mkt_info', | ||||
|     'get_cost', | ||||
|     'SpotPair', | ||||
|     'FutesPair', | ||||
|     'open_trade_dialog', | ||||
|     'open_history_client', | ||||
|     'open_symbol_search', | ||||
|     'stream_quotes', | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| # `brokerd` modules | ||||
| __enable_modules__: list[str] = [ | ||||
|     'api', | ||||
|     'feed', | ||||
|     'broker', | ||||
| ] | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,710 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) | ||||
| #   Guillermo Rodriguez (aka ze jefe) | ||||
| #   Tyler Goodlet | ||||
| #   (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Live order control B) | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncIterator, | ||||
| ) | ||||
| import time | ||||
| from time import time_ns | ||||
| 
 | ||||
| from bidict import bidict | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| from piker.accounting import ( | ||||
|     Asset, | ||||
| ) | ||||
| from piker.brokers._util import ( | ||||
|     get_logger, | ||||
| ) | ||||
| from piker.data._web_bs import ( | ||||
|     open_autorecon_ws, | ||||
|     NoBsWs, | ||||
| ) | ||||
| from piker.brokers import ( | ||||
|     open_cached_client, | ||||
|     BrokerError, | ||||
| ) | ||||
| from piker.clearing import ( | ||||
|     OrderDialogs, | ||||
| ) | ||||
| from piker.clearing._messages import ( | ||||
|     BrokerdOrder, | ||||
|     BrokerdOrderAck, | ||||
|     BrokerdStatus, | ||||
|     BrokerdPosition, | ||||
|     BrokerdFill, | ||||
|     BrokerdCancel, | ||||
|     BrokerdError, | ||||
|     Status, | ||||
|     Order, | ||||
| ) | ||||
| from .venues import ( | ||||
|     Pair, | ||||
|     _futes_ws, | ||||
|     _testnet_futes_ws, | ||||
| ) | ||||
| from .api import Client | ||||
| 
 | ||||
| log = get_logger('piker.brokers.binance') | ||||
| 
 | ||||
| 
 | ||||
| # Fee schedule template, mostly for paper engine fees modelling. | ||||
| # https://www.binance.com/en/support/faq/what-are-market-makers-and-takers-360007720071 | ||||
| def get_cost( | ||||
|     price: float, | ||||
|     size: float, | ||||
|     is_taker: bool = False, | ||||
| 
 | ||||
| ) -> float: | ||||
| 
 | ||||
|     # https://www.binance.com/en/fee/trading | ||||
|     cb: float = price * size | ||||
|     match is_taker: | ||||
|         case True: | ||||
|             return cb * 0.001000 | ||||
| 
 | ||||
|         case False if cb < 1e6: | ||||
|             return cb * 0.001000 | ||||
| 
 | ||||
|         case False if 1e6 >= cb < 5e6: | ||||
|             return cb * 0.000900 | ||||
| 
 | ||||
|         # NOTE: there's more but are you really going | ||||
|         # to have a cb bigger then this per trade? | ||||
|         case False if cb >= 5e6: | ||||
|             return cb * 0.000800 | ||||
| 
 | ||||
| 
 | ||||
| async def handle_order_requests( | ||||
|     ems_order_stream: tractor.MsgStream, | ||||
|     client: Client, | ||||
|     dids: bidict[str, str], | ||||
|     dialogs: OrderDialogs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Receive order requests from `emsd`, translate tramsit API calls and transmit. | ||||
| 
 | ||||
|     ''' | ||||
|     msg: dict | BrokerdOrder | BrokerdCancel | ||||
|     async for msg in ems_order_stream: | ||||
|         log.info(f'Rx order request:\n{pformat(msg)}') | ||||
|         match msg: | ||||
|             case { | ||||
|                 'action': 'cancel', | ||||
|             }: | ||||
|                 cancel = BrokerdCancel(**msg) | ||||
|                 existing: BrokerdOrder | None = dialogs.get(cancel.oid) | ||||
|                 if not existing: | ||||
|                     log.error( | ||||
|                         f'NO Existing order-dialog for {cancel.oid}!?' | ||||
|                     ) | ||||
|                     await ems_order_stream.send(BrokerdError( | ||||
|                         oid=cancel.oid, | ||||
| 
 | ||||
|                         # TODO: do we need the symbol? | ||||
|                         # https://github.com/pikers/piker/issues/514 | ||||
|                         symbol='unknown', | ||||
| 
 | ||||
|                         reason=( | ||||
|                             'Invalid `binance` order request dialog oid', | ||||
|                         ) | ||||
|                     )) | ||||
|                     continue | ||||
| 
 | ||||
|                 else: | ||||
|                     symbol: str = existing['symbol'] | ||||
|                     try: | ||||
|                         await client.submit_cancel( | ||||
|                             symbol, | ||||
|                             cancel.oid, | ||||
|                         ) | ||||
|                     except BrokerError as be: | ||||
|                         await ems_order_stream.send( | ||||
|                             BrokerdError( | ||||
|                                 oid=msg['oid'], | ||||
|                                 symbol=symbol, | ||||
|                                 reason=( | ||||
|                                     '`binance` CANCEL failed:\n' | ||||
|                                     f'{be}' | ||||
|                                 )) | ||||
|                         ) | ||||
|                         continue | ||||
| 
 | ||||
|             case { | ||||
|                 'account': ('binance.usdtm' | 'binance.spot') as account, | ||||
|                 'action': action, | ||||
|             } if action in {'buy', 'sell'}: | ||||
| 
 | ||||
|                 # validate | ||||
|                 order = BrokerdOrder(**msg) | ||||
|                 oid: str = order.oid  # emsd order id | ||||
|                 modify: bool = False | ||||
| 
 | ||||
|                 # NOTE: check and report edits | ||||
|                 if existing := dialogs.get(order.oid): | ||||
|                     log.info( | ||||
|                         f'Existing order for {oid} updated:\n' | ||||
|                         f'{pformat(existing.maps[-1])} -> {pformat(msg)}' | ||||
|                     ) | ||||
|                     modify = True | ||||
| 
 | ||||
|                     # only add new msg AFTER the existing check | ||||
|                     dialogs.add_msg(oid, msg) | ||||
| 
 | ||||
|                 else: | ||||
|                     # XXX NOTE: update before the ack! | ||||
|                     # track latest request state such that map | ||||
|                     # lookups start at the most recent msg and then | ||||
|                     # scan reverse-chronologically. | ||||
|                     dialogs.add_msg(oid, msg) | ||||
| 
 | ||||
|                     # XXX: ACK the request **immediately** before sending | ||||
|                     # the api side request to ensure the ems maps the oid -> | ||||
|                     # reqid correctly! | ||||
|                     resp = BrokerdOrderAck( | ||||
|                         oid=oid,  # ems order request id | ||||
|                         reqid=oid,  # our custom int mapping | ||||
|                         account='binance',  # piker account | ||||
|                     ) | ||||
|                     await ems_order_stream.send(resp) | ||||
| 
 | ||||
|                 # call our client api to submit the order | ||||
|                 # NOTE: modifies only require diff key for user oid: | ||||
|                 # https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade | ||||
|                 try: | ||||
|                     reqid = await client.submit_limit( | ||||
|                         symbol=order.symbol, | ||||
|                         side=order.action, | ||||
|                         quantity=order.size, | ||||
|                         price=order.price, | ||||
|                         oid=oid, | ||||
|                         modify=modify, | ||||
|                     ) | ||||
| 
 | ||||
|                     # SMH they do gen their own order id: ints..  | ||||
|                     # assert reqid == order.oid | ||||
|                     dids[order.oid] = reqid | ||||
| 
 | ||||
|                 except BrokerError as be: | ||||
|                     await ems_order_stream.send( | ||||
|                         BrokerdError( | ||||
|                             oid=msg['oid'], | ||||
|                             symbol=msg['symbol'], | ||||
|                             reason=( | ||||
|                                 '`binance` request failed:\n' | ||||
|                                 f'{be}' | ||||
|                             )) | ||||
|                     ) | ||||
|                     continue | ||||
| 
 | ||||
|             case _: | ||||
|                 account = msg.get('account') | ||||
|                 if account not in {'binance.spot', 'binance.futes'}: | ||||
|                     log.error( | ||||
|                         'Order request does not have a valid binance account name?\n' | ||||
|                         'Only one of\n' | ||||
|                         '- `binance.spot` or,\n' | ||||
|                         '- `binance.usdtm`\n' | ||||
|                         'is currently valid!' | ||||
|                     ) | ||||
|                 await ems_order_stream.send( | ||||
|                     BrokerdError( | ||||
|                         oid=msg['oid'], | ||||
|                         symbol=msg['symbol'], | ||||
|                         reason=( | ||||
|                             f'Invalid `binance` broker request msg:\n{msg}' | ||||
|                         )) | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_trade_dialog( | ||||
|     ctx: tractor.Context, | ||||
| 
 | ||||
| ) -> AsyncIterator[dict[str, Any]]: | ||||
| 
 | ||||
|     # TODO: how do we set this from the EMS such that | ||||
|     # positions are loaded from the correct venue on the user | ||||
|     # stream at startup? (that is in an attempt to support both | ||||
|     # spot and futes markets?) | ||||
|     # - I guess we just want to instead start 2 separate user | ||||
|     #   stream tasks right? unless we want another actor pool? | ||||
|     #   XXX: see issue: <urlhere> | ||||
|     venue_name: str = 'futes' | ||||
|     venue_mode: str = 'usdtm_futes' | ||||
|     account_name: str = 'usdtm' | ||||
|     use_testnet: bool = False | ||||
| 
 | ||||
|     # TODO: if/when we add .accounting support we need to | ||||
|     # do a open_symcache() call.. though maybe we can hide | ||||
|     # this in a new async version of open_account()? | ||||
|     async with open_cached_client('binance') as client: | ||||
|         subconf: dict|None = client.conf.get(venue_name) | ||||
| 
 | ||||
|         # XXX: if no futes.api_key or spot.api_key has been set we | ||||
|         # always fall back to the paper engine! | ||||
|         if ( | ||||
|             not subconf | ||||
|             or | ||||
|             not subconf.get('api_key') | ||||
|         ): | ||||
|             await ctx.started('paper') | ||||
|             return | ||||
| 
 | ||||
|         use_testnet: bool = subconf.get('use_testnet', False) | ||||
| 
 | ||||
|     async with ( | ||||
|         open_cached_client('binance') as client, | ||||
|     ): | ||||
|         client.mkt_mode: str = venue_mode | ||||
| 
 | ||||
|         # TODO: map these wss urls depending on spot or futes | ||||
|         # setting passed when this task is spawned? | ||||
|         wss_url: str = _futes_ws if not use_testnet else _testnet_futes_ws | ||||
| 
 | ||||
|         wss: NoBsWs | ||||
|         async with ( | ||||
|             client.manage_listen_key() as listen_key, | ||||
|             open_autorecon_ws(f'{wss_url}/?listenKey={listen_key}') as wss, | ||||
|         ): | ||||
|             nsid: int = time_ns() | ||||
|             await wss.send_msg({ | ||||
|                 # "method": "SUBSCRIBE", | ||||
|                 "method": "REQUEST", | ||||
|                 "params": | ||||
|                 [ | ||||
|                     f"{listen_key}@account", | ||||
|                     f"{listen_key}@balance", | ||||
|                     f"{listen_key}@position", | ||||
| 
 | ||||
|                     # TODO: does this even work!? seems to cause | ||||
|                     # a hang on the first msg..? lelelel. | ||||
|                     # f"{listen_key}@order", | ||||
|                 ], | ||||
|                 "id": nsid | ||||
|             }) | ||||
| 
 | ||||
|             with trio.fail_after(6): | ||||
|                 msg = await wss.recv_msg() | ||||
|                 assert msg['id'] == nsid | ||||
| 
 | ||||
|             # TODO: load other market wide data / statistics: | ||||
|             # - OI: https://binance-docs.github.io/apidocs/futures/en/#open-interest | ||||
|             # - OI stats: https://binance-docs.github.io/apidocs/futures/en/#open-interest-statistics | ||||
|             accounts: bidict[str, str] = bidict({'binance.usdtm': None}) | ||||
|             balances: dict[Asset, float] = {} | ||||
|             positions: list[BrokerdPosition] = [] | ||||
| 
 | ||||
|             for resp_dict in msg['result']: | ||||
|                 resp: dict = resp_dict['res'] | ||||
|                 req: str = resp_dict['req'] | ||||
| 
 | ||||
|                 # @account response should be something like: | ||||
|                 # {'accountAlias': 'sRFzFzAuuXsR', | ||||
|                 #  'canDeposit': True, | ||||
|                 #  'canTrade': True, | ||||
|                 #  'canWithdraw': True, | ||||
|                 #  'feeTier': 0} | ||||
|                 if 'account' in req: | ||||
|                     # NOTE: fill in the hash-like key/alias binance | ||||
|                     # provides for the account. | ||||
|                     alias: str = resp['accountAlias'] | ||||
|                     accounts['binance.usdtm'] = alias | ||||
| 
 | ||||
|                 # @balance response: | ||||
|                 # {'accountAlias': 'sRFzFzAuuXsR', | ||||
|                 #      'balances': [{'asset': 'BTC', | ||||
|                 #                    'availableBalance': '0.00000000', | ||||
|                 #                    'balance': '0.00000000', | ||||
|                 #                    'crossUnPnl': '0.00000000', | ||||
|                 #                    'crossWalletBalance': '0.00000000', | ||||
|                 #                    'maxWithdrawAmount': '0.00000000', | ||||
|                 #                    'updateTime': 0}] | ||||
|                 #                     ... | ||||
|                 # } | ||||
|                 elif 'balance' in req: | ||||
|                     for entry in resp['balances']: | ||||
|                         name: str = entry['asset'] | ||||
|                         balance: float = float(entry['balance']) | ||||
|                         last_update_t: int = entry['updateTime'] | ||||
| 
 | ||||
|                         spot_asset: Asset = client._venue2assets['spot'][name] | ||||
| 
 | ||||
|                         if balance > 0: | ||||
|                             balances[spot_asset] = (balance, last_update_t) | ||||
|                             # await tractor.pause() | ||||
| 
 | ||||
|                 # @position response: | ||||
|                 # {'positions': [{'entryPrice': '0.0', | ||||
|                 #                    'isAutoAddMargin': False, | ||||
|                 #                    'isolatedMargin': '0', | ||||
|                 #                    'leverage': 20, | ||||
|                 #                    'liquidationPrice': '0', | ||||
|                 #                    'marginType': 'CROSSED', | ||||
|                 #                    'markPrice': '0.60289650', | ||||
|                 #                    'markPrice': '0.00000000', | ||||
|                 #                    'maxNotionalValue': '25000', | ||||
|                 #                    'notional': '0', | ||||
|                 #                    'positionAmt': '0', | ||||
|                 #                    'positionSide': 'BOTH', | ||||
|                 #                    'symbol': 'ETHUSDT_230630', | ||||
|                 #                    'unRealizedProfit': '0.00000000', | ||||
|                 #                    'updateTime': 1672741444894} | ||||
|                 #                    ... | ||||
|                 # } | ||||
|                 elif 'position' in req: | ||||
|                     for entry in resp['positions']: | ||||
|                         bs_mktid: str = entry['symbol'] | ||||
|                         entry_size: float = float(entry['positionAmt']) | ||||
| 
 | ||||
|                         pair: Pair | None = client._venue2pairs[ | ||||
|                             venue_mode | ||||
|                         ].get(bs_mktid) | ||||
|                         if ( | ||||
|                             pair | ||||
|                             and entry_size > 0 | ||||
|                         ): | ||||
|                             entry_price: float = float(entry['entryPrice']) | ||||
| 
 | ||||
|                             ppmsg = BrokerdPosition( | ||||
|                                 broker='binance', | ||||
|                                 account=f'binance.{account_name}', | ||||
| 
 | ||||
|                                 # TODO: maybe we should be passing back | ||||
|                                 # a `MktPair` here? | ||||
|                                 symbol=pair.bs_fqme.lower() + '.binance', | ||||
| 
 | ||||
|                                 size=entry_size, | ||||
|                                 avg_price=entry_price, | ||||
|                             ) | ||||
|                             positions.append(ppmsg) | ||||
| 
 | ||||
|                         if pair is None: | ||||
|                             log.warning( | ||||
|                                 f'`{bs_mktid}` Position entry but no market pair?\n' | ||||
|                                 f'{pformat(entry)}\n' | ||||
|                             ) | ||||
| 
 | ||||
|             await ctx.started(( | ||||
|                 positions, | ||||
|                 list(accounts) | ||||
|             )) | ||||
| 
 | ||||
|             # TODO: package more state tracking into the dialogs API? | ||||
|             # - hmm maybe we could include `OrderDialogs.dids: | ||||
|             #   bidict` as part of the interface and then ask for | ||||
|             #   a reqid field to be passed at init? | ||||
|             #   |-> `OrderDialog(reqid_field='orderId')` kinda thing? | ||||
|             # - also maybe bundle in some kind of dialog to account | ||||
|             #   table? | ||||
|             dialogs = OrderDialogs() | ||||
|             dids: dict[str, int] = bidict() | ||||
| 
 | ||||
|             # TODO: further init setup things to get full EMS and | ||||
|             # .accounting support B) | ||||
|             # - live order loading via user stream subscription and | ||||
|             #   update to the order dialog table. | ||||
|             #   - MAKE SURE we add live orders loaded during init | ||||
|             #   into the dialogs table to ensure they can be | ||||
|             #   cancelled, meaning we can do a symbol lookup. | ||||
|             # - position loading using `piker.accounting` subsys | ||||
|             #   and comparison with binance's own position calcs. | ||||
|             # - load pps and accounts using accounting apis, write | ||||
|             #   the ledger and account files | ||||
|             #   - table: Account | ||||
|             #   - ledger: TransactionLedger | ||||
| 
 | ||||
|             async with ( | ||||
|                 trio.open_nursery() as tn, | ||||
|                 ctx.open_stream() as ems_stream, | ||||
|             ): | ||||
|                 # deliver all pre-exist open orders to EMS thus syncing | ||||
|                 # state with existing live limits reported by them. | ||||
|                 order: Order | ||||
|                 for order in await client.get_open_orders(): | ||||
|                     status_msg = Status( | ||||
|                         time_ns=time.time_ns(), | ||||
|                         resp='open', | ||||
|                         oid=order.oid, | ||||
|                         reqid=order.oid, | ||||
| 
 | ||||
|                         # embedded order info | ||||
|                         req=order, | ||||
|                         src='binance', | ||||
|                     ) | ||||
|                     dialogs.add_msg(order.oid, order.to_dict()) | ||||
|                     await ems_stream.send(status_msg) | ||||
| 
 | ||||
|                 tn.start_soon( | ||||
|                     handle_order_requests, | ||||
|                     ems_stream, | ||||
|                     client, | ||||
|                     dids, | ||||
|                     dialogs, | ||||
|                 ) | ||||
|                 tn.start_soon( | ||||
|                     handle_order_updates, | ||||
|                     venue_mode, | ||||
|                     account_name, | ||||
|                     client, | ||||
|                     ems_stream, | ||||
|                     wss, | ||||
|                     dialogs, | ||||
| 
 | ||||
|                 ) | ||||
| 
 | ||||
|                 await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| async def handle_order_updates( | ||||
|     venue: str, | ||||
|     account_name: str, | ||||
|     client: Client, | ||||
|     ems_stream: tractor.MsgStream, | ||||
|     wss: NoBsWs, | ||||
|     dialogs: OrderDialogs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Main msg handling loop for all things order management. | ||||
| 
 | ||||
|     This code is broken out to make the context explicit and state | ||||
|     variables defined in the signature clear to the reader. | ||||
| 
 | ||||
|     ''' | ||||
|     async for msg in wss: | ||||
|         log.info(f'Rx USERSTREAM msg:\n{pformat(msg)}') | ||||
|         match msg: | ||||
| 
 | ||||
|             # ORDER update | ||||
|             # spot: https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update | ||||
|             # futes: https://binance-docs.github.io/apidocs/futures/en/#event-order-update | ||||
|             # futes: https://binance-docs.github.io/apidocs/futures/en/#event-balance-and-position-update | ||||
|             # {'o': { | ||||
|             #    'L': '0', | ||||
|             #    'N': 'USDT', | ||||
|             #    'R': False, | ||||
|             #    'S': 'BUY', | ||||
|             #    'T': 1687028772484, | ||||
|             #    'X': 'NEW', | ||||
|             #    'a': '0', | ||||
|             #    'ap': '0', | ||||
|             #    'b': '7012.06520', | ||||
|             #    'c': '518d4122-8d3e-49b0-9a1e-1fabe6f62e4c', | ||||
|             #    'cp': False, | ||||
|             #    'f': 'GTC', | ||||
|             #    'i': 3376956924, | ||||
|             #    'l': '0', | ||||
|             #    'm': False, | ||||
|             #    'n': '0', | ||||
|             #    'o': 'LIMIT', | ||||
|             #    'ot': 'LIMIT', | ||||
|             #    'p': '21136.80', | ||||
|             #    'pP': False, | ||||
|             #    'ps': 'BOTH', | ||||
|             #    'q': '0.047', | ||||
|             #    'rp': '0', | ||||
|             #    's': 'BTCUSDT', | ||||
|             #    'si': 0, | ||||
|             #    'sp': '0', | ||||
|             #    'ss': 0, | ||||
|             #    't': 0, | ||||
|             #    'wt': 'CONTRACT_PRICE', | ||||
|             #    'x': 'NEW', | ||||
|             #    'z': '0'} | ||||
|             # } | ||||
|             case { | ||||
|                 # 'e': 'executionReport', | ||||
|                 'e': 'ORDER_TRADE_UPDATE', | ||||
|                 'T': int(epoch_ms), | ||||
|                 'o': { | ||||
|                     's': bs_mktid, | ||||
| 
 | ||||
|                     # XXX NOTE XXX see special ids for market | ||||
|                     # events or margin calls: | ||||
|                     # // special client order id: | ||||
|                     # // starts with "autoclose-": liquidation order | ||||
|                     # // "adl_autoclose": ADL auto close order | ||||
|                     # // "settlement_autoclose-": settlement order | ||||
|                     #     for delisting or delivery | ||||
|                     'c': oid, | ||||
|                     # 'i': reqid,  # binance internal int id | ||||
| 
 | ||||
|                     # prices | ||||
|                     'a': submit_price, | ||||
|                     'ap': avg_price, | ||||
|                     'L': fill_price, | ||||
| 
 | ||||
|                     # sizing | ||||
|                     'q': req_size, | ||||
|                     'l': clear_size_filled,  # this event | ||||
|                     'z': accum_size_filled,  # accum | ||||
| 
 | ||||
|                     # commissions | ||||
|                     'n': cost, | ||||
|                     'N': cost_asset, | ||||
| 
 | ||||
|                     # state | ||||
|                     'S': side, | ||||
|                     'X': status, | ||||
|                 }, | ||||
|             } as order_msg: | ||||
|                 log.info( | ||||
|                     f'{status} for {side} ORDER oid: {oid}\n' | ||||
|                     f'bs_mktid: {bs_mktid}\n\n' | ||||
| 
 | ||||
|                     f'order size: {req_size}\n' | ||||
|                     f'cleared size: {clear_size_filled}\n' | ||||
|                     f'accum filled size: {accum_size_filled}\n\n' | ||||
| 
 | ||||
|                     f'submit price: {submit_price}\n' | ||||
|                     f'fill_price: {fill_price}\n' | ||||
|                     f'avg clearing price: {avg_price}\n\n' | ||||
| 
 | ||||
|                     f'cost: {cost}@{cost_asset}\n' | ||||
|                 ) | ||||
| 
 | ||||
|                 # status remap from binance to piker's | ||||
|                 # status set: | ||||
|                 # - NEW | ||||
|                 # - PARTIALLY_FILLED | ||||
|                 # - FILLED | ||||
|                 # - CANCELED | ||||
|                 # - EXPIRED | ||||
|                 # https://binance-docs.github.io/apidocs/futures/en/#event-order-update | ||||
| 
 | ||||
|                 req_size: float = float(req_size) | ||||
|                 accum_size_filled: float = float(accum_size_filled) | ||||
|                 fill_price: float = float(fill_price) | ||||
| 
 | ||||
|                 match status: | ||||
|                     case 'PARTIALLY_FILLED' | 'FILLED': | ||||
|                         status = 'fill' | ||||
| 
 | ||||
|                         fill_msg = BrokerdFill( | ||||
|                             time_ns=time_ns(), | ||||
|                             # reqid=reqid, | ||||
|                             reqid=oid, | ||||
| 
 | ||||
|                             # just use size value for now? | ||||
|                             # action=action, | ||||
|                             size=clear_size_filled, | ||||
|                             price=fill_price, | ||||
| 
 | ||||
|                             # TODO: maybe capture more msg data | ||||
|                             # i.e fees? | ||||
|                             broker_details={'name': 'broker'} | order_msg, | ||||
|                             broker_time=time.time(), | ||||
|                         ) | ||||
|                         await ems_stream.send(fill_msg) | ||||
| 
 | ||||
|                         if accum_size_filled == req_size: | ||||
|                             status = 'closed' | ||||
|                             dialogs.pop(oid) | ||||
| 
 | ||||
|                     case 'NEW': | ||||
|                         status = 'open' | ||||
| 
 | ||||
|                     case 'EXPIRED': | ||||
|                         status = 'canceled' | ||||
|                         dialogs.pop(oid) | ||||
| 
 | ||||
|                     case _: | ||||
|                         status = status.lower() | ||||
| 
 | ||||
|                 resp = BrokerdStatus( | ||||
|                     time_ns=time_ns(), | ||||
|                     # reqid=reqid, | ||||
|                     reqid=oid, | ||||
| 
 | ||||
|                     # TODO: i feel like we don't need to make the | ||||
|                     # ems and upstream clients aware of this? | ||||
|                     # account='binance.usdtm', | ||||
| 
 | ||||
|                     status=status, | ||||
| 
 | ||||
|                     filled=accum_size_filled, | ||||
|                     remaining=req_size - accum_size_filled, | ||||
|                     broker_details={ | ||||
|                         'name': 'binance', | ||||
|                         'broker_time': epoch_ms / 1000. | ||||
|                     } | ||||
|                 ) | ||||
|                 await ems_stream.send(resp) | ||||
| 
 | ||||
|             # ACCOUNT and POSITION update B) | ||||
|             # { | ||||
|             #  'E': 1687036749218, | ||||
|             #  'e': 'ACCOUNT_UPDATE' | ||||
|             #  'T': 1687036749215, | ||||
|             #  'a': {'B': [{'a': 'USDT', | ||||
|             #               'bc': '0', | ||||
|             #               'cw': '1267.48920735', | ||||
|             #               'wb': '1410.90245576'}], | ||||
|             #        'P': [{'cr': '-3292.10973007', | ||||
|             #               'ep': '26349.90000', | ||||
|             #               'iw': '143.41324841', | ||||
|             #               'ma': 'USDT', | ||||
|             #               'mt': 'isolated', | ||||
|             #               'pa': '0.038', | ||||
|             #               'ps': 'BOTH', | ||||
|             #               's': 'BTCUSDT', | ||||
|             #               'up': '5.17555453'}], | ||||
|             #        'm': 'ORDER'}, | ||||
|             # } | ||||
|             case { | ||||
|                 'T': int(epoch_ms), | ||||
|                 'e': 'ACCOUNT_UPDATE', | ||||
|                 'a': { | ||||
|                     'P': [{ | ||||
|                         's': bs_mktid, | ||||
|                         'pa': pos_amount, | ||||
|                         'ep': entry_price, | ||||
|                     }], | ||||
|                 }, | ||||
|             }: | ||||
|                 # real-time relay position updates back to EMS | ||||
|                 pair: Pair | None = client._venue2pairs[venue].get(bs_mktid) | ||||
|                 ppmsg = BrokerdPosition( | ||||
|                     broker='binance', | ||||
|                     account=f'binance.{account_name}', | ||||
| 
 | ||||
|                     # TODO: maybe we should be passing back | ||||
|                     # a `MktPair` here? | ||||
|                     symbol=pair.bs_fqme.lower() + '.binance', | ||||
| 
 | ||||
|                     size=float(pos_amount), | ||||
|                     avg_price=float(entry_price), | ||||
|                 ) | ||||
|                 await ems_stream.send(ppmsg) | ||||
| 
 | ||||
|             case _: | ||||
|                 log.warning( | ||||
|                     'Unhandled event:\n' | ||||
|                     f'{pformat(msg)}' | ||||
|                 ) | ||||
|  | @ -1,557 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Real-time and historical data feed endpoints. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     aclosing, | ||||
| ) | ||||
| from datetime import datetime | ||||
| from functools import ( | ||||
|     partial, | ||||
| ) | ||||
| import itertools | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncGenerator, | ||||
|     Callable, | ||||
|     Generator, | ||||
| ) | ||||
| import time | ||||
| 
 | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| from pendulum import ( | ||||
|     from_timestamp, | ||||
| ) | ||||
| import numpy as np | ||||
| import tractor | ||||
| 
 | ||||
| from piker.brokers import ( | ||||
|     open_cached_client, | ||||
|     NoData, | ||||
| ) | ||||
| from piker._cacheables import ( | ||||
|     async_lifo_cache, | ||||
| ) | ||||
| from piker.accounting import ( | ||||
|     Asset, | ||||
|     DerivTypes, | ||||
|     MktPair, | ||||
|     unpack_fqme, | ||||
| ) | ||||
| from piker.types import Struct | ||||
| from piker.data.validate import FeedInit | ||||
| from piker.data._web_bs import ( | ||||
|     open_autorecon_ws, | ||||
|     NoBsWs, | ||||
| ) | ||||
| from piker.brokers._util import ( | ||||
|     DataUnavailable, | ||||
|     get_logger, | ||||
| ) | ||||
| 
 | ||||
| from .api import ( | ||||
|     Client, | ||||
| ) | ||||
| from .venues import ( | ||||
|     Pair, | ||||
|     FutesPair, | ||||
|     get_api_eps, | ||||
| ) | ||||
| 
 | ||||
| log = get_logger('piker.brokers.binance') | ||||
| 
 | ||||
| 
 | ||||
| class L1(Struct): | ||||
|     # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams | ||||
| 
 | ||||
|     update_id: int | ||||
|     sym: str | ||||
| 
 | ||||
|     bid: float | ||||
|     bsize: float | ||||
|     ask: float | ||||
|     asize: float | ||||
| 
 | ||||
| 
 | ||||
| # validation type | ||||
| class AggTrade(Struct, frozen=True): | ||||
|     e: str  # Event type | ||||
|     E: int  # Event time | ||||
|     s: str  # Symbol | ||||
|     a: int  # Aggregate trade ID | ||||
|     p: float  # Price | ||||
|     q: float  # Quantity | ||||
|     f: int  # First trade ID | ||||
|     l: int  # noqa Last trade ID | ||||
|     T: int  # Trade time | ||||
|     m: bool  # Is the buyer the market maker? | ||||
|     M: bool | None = None  # Ignore | ||||
| 
 | ||||
| 
 | ||||
| async def stream_messages( | ||||
|     ws: NoBsWs, | ||||
| 
 | ||||
| ) -> AsyncGenerator[NoBsWs, dict]: | ||||
| 
 | ||||
|     # TODO: match syntax here! | ||||
|     msg: dict[str, Any] | ||||
|     async for msg in ws: | ||||
|         match msg: | ||||
|             # for l1 streams binance doesn't add an event type field so | ||||
|             # identify those messages by matching keys | ||||
|             # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams | ||||
|             case { | ||||
|                 # NOTE: this is never an old value it seems, so | ||||
|                 # they are always sending real L1 spread updates. | ||||
|                 'u': upid,  # update id | ||||
|                 's': sym, | ||||
|                 'b': bid, | ||||
|                 'B': bsize, | ||||
|                 'a': ask, | ||||
|                 'A': asize, | ||||
|             }: | ||||
|                 # TODO: it would be super nice to have a `L1` piker type | ||||
|                 # which "renders" incremental tick updates from a packed | ||||
|                 # msg-struct: | ||||
|                 # - backend msgs after packed into the type such that we | ||||
|                 #   can reduce IPC usage but without each backend having | ||||
|                 #   to do that incremental update logic manually B) | ||||
|                 # - would it maybe be more efficient to use this instead? | ||||
|                 #   https://binance-docs.github.io/apidocs/spot/en/#diff-depth-stream | ||||
|                 l1 = L1( | ||||
|                     update_id=upid, | ||||
|                     sym=sym, | ||||
|                     bid=bid, | ||||
|                     bsize=bsize, | ||||
|                     ask=ask, | ||||
|                     asize=asize, | ||||
|                 ) | ||||
|                 # for speed probably better to only specifically | ||||
|                 # cast fields we need in numerical form? | ||||
|                 # l1.typecast() | ||||
| 
 | ||||
|                 # repack into piker's tick-quote format | ||||
|                 yield 'l1', { | ||||
|                     'symbol': l1.sym, | ||||
|                     'ticks': [ | ||||
|                         { | ||||
|                             'type': 'bid', | ||||
|                             'price': float(l1.bid), | ||||
|                             'size': float(l1.bsize), | ||||
|                         }, | ||||
|                         { | ||||
|                             'type': 'bsize', | ||||
|                             'price': float(l1.bid), | ||||
|                             'size': float(l1.bsize), | ||||
|                         }, | ||||
|                         { | ||||
|                             'type': 'ask', | ||||
|                             'price': float(l1.ask), | ||||
|                             'size': float(l1.asize), | ||||
|                         }, | ||||
|                         { | ||||
|                             'type': 'asize', | ||||
|                             'price': float(l1.ask), | ||||
|                             'size': float(l1.asize), | ||||
|                         } | ||||
|                     ] | ||||
|                 } | ||||
| 
 | ||||
|             # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams | ||||
|             case { | ||||
|                 'e': 'aggTrade', | ||||
|             }: | ||||
|                 # NOTE: this is purely for a definition, | ||||
|                 # ``msgspec.Struct`` does not runtime-validate until you | ||||
|                 # decode/encode, see: | ||||
|                 # https://jcristharif.com/msgspec/structs.html#type-validation | ||||
|                 msg = AggTrade(**msg)  # TODO: should we .copy() ? | ||||
|                 piker_quote: dict = { | ||||
|                     'symbol': msg.s, | ||||
|                     'last': float(msg.p), | ||||
|                     'brokerd_ts': time.time(), | ||||
|                     'ticks': [{ | ||||
|                         'type': 'trade', | ||||
|                         'price': float(msg.p), | ||||
|                         'size': float(msg.q), | ||||
|                         'broker_ts': msg.T, | ||||
|                     }], | ||||
|                 } | ||||
|                 yield 'trade', piker_quote | ||||
| 
 | ||||
| 
 | ||||
| def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: | ||||
|     ''' | ||||
|     Create a request subscription packet dict. | ||||
| 
 | ||||
|     - spot: | ||||
|       https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams | ||||
| 
 | ||||
|     - futes: | ||||
|       https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams | ||||
| 
 | ||||
|     ''' | ||||
|     return { | ||||
|         'method': 'SUBSCRIBE', | ||||
|         'params': [ | ||||
|             f'{pair.lower()}@{sub_name}' | ||||
|             for pair in pairs | ||||
|         ], | ||||
|         'id': uid | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
| # TODO, why aren't frame resp `log.info()`s showing in upstream | ||||
| # code?! | ||||
| @acm | ||||
| async def open_history_client( | ||||
|     mkt: MktPair, | ||||
| 
 | ||||
| ) -> tuple[Callable, int]: | ||||
| 
 | ||||
|     # TODO implement history getter for the new storage layer. | ||||
|     async with open_cached_client('binance') as client: | ||||
| 
 | ||||
|         async def get_ohlc( | ||||
|             timeframe: float, | ||||
|             end_dt: datetime | None = None, | ||||
|             start_dt: datetime | None = None, | ||||
| 
 | ||||
|         ) -> tuple[ | ||||
|             np.ndarray, | ||||
|             datetime,  # start | ||||
|             datetime,  # end | ||||
|         ]: | ||||
|             if timeframe != 60: | ||||
|                 raise DataUnavailable('Only 1m bars are supported') | ||||
| 
 | ||||
|             # TODO: better wrapping for venue / mode? | ||||
|             # - eventually logic for usd vs. coin settled futes | ||||
|             #   based on `MktPair.src` type/value? | ||||
|             # - maybe something like `async with | ||||
|             # Client.use_venue('usdtm_futes')` | ||||
|             if mkt.type_key in DerivTypes: | ||||
|                 client.mkt_mode = 'usdtm_futes' | ||||
|             else: | ||||
|                 client.mkt_mode = 'spot' | ||||
| 
 | ||||
|             array: np.ndarray = await client.bars( | ||||
|                 mkt=mkt, | ||||
|                 start_dt=start_dt, | ||||
|                 end_dt=end_dt, | ||||
|             ) | ||||
|             if array.size == 0: | ||||
|                 raise NoData( | ||||
|                     f'No frame for {start_dt} -> {end_dt}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             times = array['time'] | ||||
|             if not times.any(): | ||||
|                 raise ValueError( | ||||
|                     'Bad frame with null-times?\n\n' | ||||
|                     f'{times}' | ||||
|                 ) | ||||
| 
 | ||||
|             if end_dt is None: | ||||
|                 inow: int = round(time.time()) | ||||
|                 if (inow - times[-1]) > 60: | ||||
|                     await tractor.pause() | ||||
| 
 | ||||
|             start_dt = from_timestamp(times[0]) | ||||
|             end_dt = from_timestamp(times[-1]) | ||||
|             return array, start_dt, end_dt | ||||
| 
 | ||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} | ||||
| 
 | ||||
| 
 | ||||
| @async_lifo_cache() | ||||
| async def get_mkt_info( | ||||
|     fqme: str, | ||||
| 
 | ||||
| ) -> tuple[MktPair, Pair] | None: | ||||
| 
 | ||||
|     # uppercase since kraken bs_mktid is always upper | ||||
|     if 'binance' not in fqme.lower(): | ||||
|         fqme += '.binance' | ||||
| 
 | ||||
|     mkt_mode: str = '' | ||||
|     broker, mkt_ep, venue, expiry = unpack_fqme(fqme) | ||||
| 
 | ||||
|     # NOTE: we always upper case all tokens to be consistent with | ||||
|     # binance's symbology style for pairs, like `BTCUSDT`, but in | ||||
|     # theory we could also just keep things lower case; as long as | ||||
|     # we're consistent and the symcache matches whatever this func | ||||
|     # returns, always! | ||||
|     expiry: str = expiry.upper() | ||||
|     venue: str = venue.upper() | ||||
|     venue_lower: str = venue.lower() | ||||
| 
 | ||||
|     # XXX TODO: we should change the usdtm_futes name to just | ||||
|     # usdm_futes (dropping the tether part) since it turns out that | ||||
|     # there are indeed USD-tokens OTHER THEN tether being used as | ||||
|     # the margin assets.. it's going to require a wholesale | ||||
|     # (variable/key) rename as well as file name adjustments to any | ||||
|     # existing tsdb set.. | ||||
|     if 'usd' in venue_lower: | ||||
|         mkt_mode: str = 'usdtm_futes' | ||||
| 
 | ||||
|     # NO IDEA what these contracts (some kinda DEX-ish futes?) are | ||||
|     # but we're masking them for now.. | ||||
|     elif ( | ||||
|         'defi' in venue_lower | ||||
| 
 | ||||
|         # TODO: handle coinm futes which have a margin asset that | ||||
|         # is some crypto token! | ||||
|         # https://binance-docs.github.io/apidocs/delivery/en/#exchange-information | ||||
|         or 'btc' in venue_lower | ||||
|     ): | ||||
|         return None | ||||
| 
 | ||||
|     else: | ||||
|         # NOTE: see the `FutesPair.bs_fqme: str` implementation | ||||
|         # to understand the reverse market info lookup below. | ||||
|         mkt_mode = venue_lower or 'spot' | ||||
| 
 | ||||
|     if ( | ||||
|         venue | ||||
|         and 'spot' not in venue_lower | ||||
| 
 | ||||
|         # XXX: catch all in case user doesn't know which | ||||
|         # venue they want (usdtm vs. coinm) and we can choose | ||||
|         # a default (via config?) once we support coin-m APIs. | ||||
|         or 'perp' in venue_lower | ||||
|     ): | ||||
|         if not mkt_mode: | ||||
|             mkt_mode: str = f'{venue_lower}_futes' | ||||
| 
 | ||||
|     async with open_cached_client( | ||||
|         'binance', | ||||
|     ) as client: | ||||
| 
 | ||||
|         assets: dict[str, Asset] = await client.get_assets() | ||||
|         pair_str: str = mkt_ep.upper() | ||||
| 
 | ||||
|         # switch venue-mode depending on input pattern parsing | ||||
|         # since we want to use a particular endpoint (set) for | ||||
|         # pair info lookup! | ||||
|         client.mkt_mode = mkt_mode | ||||
| 
 | ||||
|         pair: Pair = await client.exch_info( | ||||
|             pair_str, | ||||
|             venue=mkt_mode,  # explicit | ||||
|             expiry=expiry, | ||||
|         ) | ||||
| 
 | ||||
|         if 'futes' in mkt_mode: | ||||
|             assert isinstance(pair, FutesPair) | ||||
| 
 | ||||
|         dst: Asset | None = assets.get(pair.bs_dst_asset) | ||||
|         if ( | ||||
|             not dst | ||||
|             # TODO: a known asset DNE list? | ||||
|             # and pair.baseAsset == 'DEFI' | ||||
|         ): | ||||
|             log.warning( | ||||
|                 f'UNKNOWN {venue} asset {pair.baseAsset} from,\n' | ||||
|                 f'{pformat(pair.to_dict())}' | ||||
|             ) | ||||
| 
 | ||||
|             # XXX UNKNOWN missing "asset", though no idea why? | ||||
|             # maybe it's only avail in the margin venue(s): /dapi/ ? | ||||
|             return None | ||||
| 
 | ||||
|         mkt = MktPair( | ||||
|             dst=dst, | ||||
|             src=assets[pair.bs_src_asset], | ||||
|             price_tick=pair.price_tick, | ||||
|             size_tick=pair.size_tick, | ||||
|             bs_mktid=pair.symbol, | ||||
|             expiry=expiry, | ||||
|             venue=venue, | ||||
|             broker='binance', | ||||
| 
 | ||||
|             # NOTE: sectype is always taken from dst, see | ||||
|             # `MktPair.type_key` and `Client._cache_pairs()` | ||||
|             # _atype=sectype, | ||||
|         ) | ||||
|         return mkt, pair | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def subscribe( | ||||
|     ws: NoBsWs, | ||||
|     symbols: list[str], | ||||
| 
 | ||||
|     # defined once at import time to keep a global state B) | ||||
|     iter_subids: Generator[int, None, None] = itertools.count(), | ||||
| 
 | ||||
| ): | ||||
|     # setup subs | ||||
| 
 | ||||
|     subid: int = next(iter_subids) | ||||
| 
 | ||||
|     # trade data (aka L1) | ||||
|     # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker | ||||
|     l1_sub = make_sub(symbols, 'bookTicker', subid) | ||||
|     await ws.send_msg(l1_sub) | ||||
| 
 | ||||
|     # aggregate (each order clear by taker **not** by maker) | ||||
|     # trades data: | ||||
|     # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams | ||||
|     agg_trades_sub = make_sub(symbols, 'aggTrade', subid) | ||||
|     await ws.send_msg(agg_trades_sub) | ||||
| 
 | ||||
|     # might get ack from ws server, or maybe some | ||||
|     # other msg still in transit.. | ||||
|     res = await ws.recv_msg() | ||||
|     subid: str | None = res.get('id') | ||||
|     if subid: | ||||
|         assert res['id'] == subid | ||||
| 
 | ||||
|     yield | ||||
| 
 | ||||
|     subs = [] | ||||
|     for sym in symbols: | ||||
|         subs.append("{sym}@aggTrade") | ||||
|         subs.append("{sym}@bookTicker") | ||||
| 
 | ||||
|     # unsub from all pairs on teardown | ||||
|     if ws.connected(): | ||||
|         await ws.send_msg({ | ||||
|             "method": "UNSUBSCRIBE", | ||||
|             "params": subs, | ||||
|             "id": subid, | ||||
|         }) | ||||
| 
 | ||||
|         # XXX: do we need to ack the unsub? | ||||
|         # await ws.recv_msg() | ||||
| 
 | ||||
| 
 | ||||
| async def stream_quotes( | ||||
| 
 | ||||
|     send_chan: trio.abc.SendChannel, | ||||
|     symbols: list[str], | ||||
|     feed_is_live: trio.Event, | ||||
|     loglevel: str = None, | ||||
| 
 | ||||
|     # startup sync | ||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with ( | ||||
|         send_chan as send_chan, | ||||
|         open_cached_client('binance') as client, | ||||
|     ): | ||||
|         init_msgs: list[FeedInit] = [] | ||||
|         for sym in symbols: | ||||
|             mkt: MktPair | ||||
|             pair: Pair | ||||
|             mkt, pair = await get_mkt_info(sym) | ||||
| 
 | ||||
|             # build out init msgs according to latest spec | ||||
|             init_msgs.append( | ||||
|                 FeedInit(mkt_info=mkt) | ||||
|             ) | ||||
| 
 | ||||
|         wss_url: str = get_api_eps(client.mkt_mode)[1]  # 2nd elem is wss url | ||||
| 
 | ||||
|         # TODO: for sanity, but remove eventually Xp | ||||
|         if 'future' in mkt.type_key: | ||||
|             assert 'fstream' in wss_url | ||||
| 
 | ||||
|         async with ( | ||||
|             open_autorecon_ws( | ||||
|                 url=wss_url, | ||||
|                 fixture=partial( | ||||
|                     subscribe, | ||||
|                     symbols=[mkt.bs_mktid], | ||||
|                 ), | ||||
|             ) as ws, | ||||
| 
 | ||||
|             # avoid stream-gen closure from breaking trio.. | ||||
|             aclosing(stream_messages(ws)) as msg_gen, | ||||
|         ): | ||||
|             # log.info('WAITING ON FIRST LIVE QUOTE..') | ||||
|             typ, quote = await anext(msg_gen) | ||||
| 
 | ||||
|             # pull a first quote and deliver | ||||
|             while typ != 'trade': | ||||
|                 typ, quote = await anext(msg_gen) | ||||
| 
 | ||||
|             task_status.started((init_msgs, quote)) | ||||
| 
 | ||||
|             # signal to caller feed is ready for consumption | ||||
|             feed_is_live.set() | ||||
| 
 | ||||
|             # import time | ||||
|             # last = time.time() | ||||
| 
 | ||||
|             # XXX NOTE: can't include the `.binance` suffix | ||||
|             # or the sampling loop will not broadcast correctly | ||||
|             # since `bus._subscribers.setdefault(bs_fqme, set())` | ||||
|             # is used inside `.data.open_feed_bus()` !!! | ||||
|             topic: str = mkt.bs_fqme | ||||
| 
 | ||||
|             # start streaming | ||||
|             async for typ, quote in msg_gen: | ||||
|                 # period = time.time() - last | ||||
|                 # hz = 1/period if period else float('inf') | ||||
|                 # if hz > 60: | ||||
|                 #     log.info(f'Binance quotez : {hz}') | ||||
|                 await send_chan.send({topic: quote}) | ||||
|                 # last = time.time() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_symbol_search( | ||||
|     ctx: tractor.Context, | ||||
| ) -> Client: | ||||
| 
 | ||||
|     # NOTE: symbology tables are loaded as part of client | ||||
|     # startup in ``.api.get_client()`` and in this case | ||||
|     # are stored as `Client._pairs`. | ||||
|     async with open_cached_client('binance') as client: | ||||
| 
 | ||||
|         # TODO: maybe we should deliver the cache | ||||
|         # so that client's can always do a local-lookup-first | ||||
|         # style try and then update async as (new) match results | ||||
|         # are delivered from here? | ||||
|         await ctx.started() | ||||
| 
 | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             pattern: str | ||||
|             async for pattern in stream: | ||||
|                 # NOTE: pattern fuzzy-matching is done within | ||||
|                 # the methd impl. | ||||
|                 pairs: dict[str, Pair] = await client.search_symbols( | ||||
|                     pattern, | ||||
|                 ) | ||||
| 
 | ||||
|                 # repack in fqme-keyed table | ||||
|                 byfqme: dict[str, Pair] = {} | ||||
|                 for pair in pairs.values(): | ||||
|                     byfqme[pair.bs_fqme] = pair | ||||
| 
 | ||||
|                 await stream.send(byfqme) | ||||
|  | @ -1,303 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Per market data-type definitions and schemas types. | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from typing import ( | ||||
|     Literal, | ||||
| ) | ||||
| from decimal import Decimal | ||||
| 
 | ||||
| from msgspec import field | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| 
 | ||||
| 
 | ||||
| # API endpoint paths by venue / sub-API | ||||
| _domain: str = 'binance.com' | ||||
| _spot_url = f'https://api.{_domain}' | ||||
| _futes_url = f'https://fapi.{_domain}' | ||||
| 
 | ||||
| # WEBsocketz | ||||
| # NOTE XXX: see api docs which show diff addr? | ||||
| # https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information | ||||
| _spot_ws: str = 'wss://stream.binance.com/ws' | ||||
| # or this one? .. | ||||
| # 'wss://ws-api.binance.com:443/ws-api/v3', | ||||
| 
 | ||||
| # https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams | ||||
| _futes_ws: str = f'wss://fstream.{_domain}/ws' | ||||
| _auth_futes_ws: str = 'wss://fstream-auth.{_domain}/ws' | ||||
| 
 | ||||
| # test nets | ||||
| # NOTE: spot test network only allows certain ep sets: | ||||
| # https://testnet.binance.vision/ | ||||
| # https://www.binance.com/en/support/faq/how-to-test-my-functions-on-binance-testnet-ab78f9a1b8824cf0a106b4229c76496d | ||||
| _testnet_spot_url: str = 'https://testnet.binance.vision/api' | ||||
| _testnet_spot_ws: str = 'wss://testnet.binance.vision/ws' | ||||
| # or this one? .. | ||||
| # 'wss://testnet.binance.vision/ws-api/v3' | ||||
| 
 | ||||
| _testnet_futes_url: str = 'https://testnet.binancefuture.com' | ||||
| _testnet_futes_ws: str = 'wss://stream.binancefuture.com/ws' | ||||
| 
 | ||||
| 
 | ||||
| MarketType = Literal[ | ||||
|     'spot', | ||||
|     # 'margin', | ||||
|     'usdtm_futes', | ||||
|     # 'coinm_futes', | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| def get_api_eps(venue: MarketType) -> tuple[str, str]: | ||||
|     ''' | ||||
|     Return API ep root paths per venue. | ||||
| 
 | ||||
|     ''' | ||||
|     return { | ||||
|         'spot': ( | ||||
|             _spot_url, | ||||
|             _spot_ws, | ||||
|         ), | ||||
|         'usdtm_futes': ( | ||||
|             _futes_url, | ||||
|             _futes_ws, | ||||
|         ), | ||||
|     }[venue] | ||||
| 
 | ||||
| 
 | ||||
| class Pair(Struct, frozen=True, kw_only=True): | ||||
| 
 | ||||
|     symbol: str | ||||
|     status: str | ||||
|     orderTypes: list[str] | ||||
| 
 | ||||
|     # src | ||||
|     quoteAsset: str | ||||
|     quotePrecision: int | ||||
| 
 | ||||
|     # dst | ||||
|     baseAsset: str | ||||
|     baseAssetPrecision: int | ||||
| 
 | ||||
|     filters: dict[ | ||||
|         str, | ||||
|         str | int | float, | ||||
|     ] = field(default_factory=dict) | ||||
| 
 | ||||
|     @property | ||||
|     def price_tick(self) -> Decimal: | ||||
|         # XXX: lul, after manually inspecting the response format we | ||||
|         # just directly pick out the info we need | ||||
|         step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0') | ||||
|         return Decimal(step_size) | ||||
| 
 | ||||
|     @property | ||||
|     def size_tick(self) -> Decimal: | ||||
|         step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0') | ||||
|         return Decimal(step_size) | ||||
| 
 | ||||
|     @property | ||||
|     def bs_fqme(self) -> str: | ||||
|         return self.symbol | ||||
| 
 | ||||
|     @property | ||||
|     def bs_mktid(self) -> str: | ||||
|         return f'{self.symbol}.{self.venue}' | ||||
| 
 | ||||
| 
 | ||||
| class SpotPair(Pair, frozen=True): | ||||
| 
 | ||||
|     cancelReplaceAllowed: bool | ||||
|     allowTrailingStop: bool | ||||
|     quoteAssetPrecision: int | ||||
| 
 | ||||
|     baseCommissionPrecision: int | ||||
|     quoteCommissionPrecision: int | ||||
| 
 | ||||
|     icebergAllowed: bool | ||||
|     ocoAllowed: bool | ||||
|     quoteOrderQtyMarketAllowed: bool | ||||
|     isSpotTradingAllowed: bool | ||||
|     isMarginTradingAllowed: bool | ||||
|     otoAllowed: bool | ||||
| 
 | ||||
|     defaultSelfTradePreventionMode: str | ||||
|     allowedSelfTradePreventionModes: list[str] | ||||
|     permissions: list[str] | ||||
|     permissionSets: list[list[str]] | ||||
| 
 | ||||
|     # NOTE: see `.data._symcache.SymbologyCache.load()` for why | ||||
|     ns_path: str = 'piker.brokers.binance:SpotPair' | ||||
| 
 | ||||
|     @property | ||||
|     def venue(self) -> str: | ||||
|         return 'SPOT' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_fqme(self) -> str: | ||||
|         return f'{self.symbol}.SPOT' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_src_asset(self) -> str: | ||||
|         return f'{self.quoteAsset}' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_dst_asset(self) -> str: | ||||
|         return f'{self.baseAsset}' | ||||
| 
 | ||||
| 
 | ||||
| class FutesPair(Pair): | ||||
|     symbol: str  # 'BTCUSDT', | ||||
|     pair: str  # 'BTCUSDT', | ||||
|     baseAssetPrecision: int # 8, | ||||
|     contractType: str  # 'PERPETUAL', | ||||
|     deliveryDate: int   # 4133404800000, | ||||
|     liquidationFee: float  # '0.012500', | ||||
|     maintMarginPercent: float  # '2.5000', | ||||
|     marginAsset: str  # 'USDT', | ||||
|     marketTakeBound: float  # '0.05', | ||||
|     maxMoveOrderLimit: int  # 10000, | ||||
|     onboardDate: int  # 1569398400000, | ||||
|     pricePrecision: int  # 2, | ||||
|     quantityPrecision: int  # 3, | ||||
|     quoteAsset: str  # 'USDT', | ||||
|     quotePrecision: int  # 8, | ||||
|     requiredMarginPercent: float  # '5.0000', | ||||
|     timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'], | ||||
|     triggerProtect: float  # '0.0500', | ||||
|     underlyingSubType: list[str]  # ['PoW'], | ||||
|     underlyingType: str  # 'COIN' | ||||
| 
 | ||||
|     # NOTE: see `.data._symcache.SymbologyCache.load()` for why | ||||
|     ns_path: str = 'piker.brokers.binance:FutesPair' | ||||
| 
 | ||||
|     # NOTE: for compat with spot pairs and `MktPair.src: Asset` | ||||
|     # processing.. | ||||
|     @property | ||||
|     def quoteAssetPrecision(self) -> int: | ||||
|         return self.quotePrecision | ||||
| 
 | ||||
|     @property | ||||
|     def expiry(self) -> str: | ||||
|         symbol: str = self.symbol | ||||
|         contype: str = self.contractType | ||||
|         match contype: | ||||
|             case ( | ||||
|                 'CURRENT_QUARTER' | ||||
|                 | 'CURRENT_QUARTER DELIVERING' | ||||
|                 | 'NEXT_QUARTER'  # su madre binance.. | ||||
|             ): | ||||
|                 pair, _, expiry = symbol.partition('_') | ||||
|                 assert pair == self.pair  # sanity | ||||
|                 return f'{expiry}' | ||||
| 
 | ||||
|             case 'PERPETUAL': | ||||
|                 return 'PERP' | ||||
| 
 | ||||
|             case '': | ||||
|                 subtype: list[str] = self.underlyingSubType | ||||
|                 if not subtype: | ||||
|                     if self.status == 'PENDING_TRADING': | ||||
|                         return 'PENDING' | ||||
| 
 | ||||
|                 match subtype: | ||||
|                     case ['DEFI']: | ||||
|                         return 'PERP' | ||||
| 
 | ||||
|         # wow, just wow you binance guys suck.. | ||||
|         if self.status == 'PENDING_TRADING': | ||||
|             return 'PENDING' | ||||
| 
 | ||||
|         # XXX: yeah no clue then.. | ||||
|         raise ValueError( | ||||
|             f'Bad .expiry token match: {contype} for {symbol}' | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def venue(self) -> str: | ||||
|         symbol: str = self.symbol | ||||
|         ctype: str = self.contractType | ||||
|         margin: str = self.marginAsset | ||||
| 
 | ||||
|         match ctype: | ||||
|             case 'PERPETUAL': | ||||
|                 return f'{margin}M' | ||||
| 
 | ||||
|             case ( | ||||
|                 'CURRENT_QUARTER' | ||||
|                 | 'CURRENT_QUARTER DELIVERING' | ||||
|                 | 'NEXT_QUARTER'  # su madre binance.. | ||||
|             ): | ||||
|                 _, _, expiry = symbol.partition('_') | ||||
|                 return f'{margin}M' | ||||
| 
 | ||||
|             case '': | ||||
|                 subtype: list[str] = self.underlyingSubType | ||||
|                 if not subtype: | ||||
|                     if self.status == 'PENDING_TRADING': | ||||
|                         return f'{margin}M' | ||||
| 
 | ||||
|                 match subtype: | ||||
|                     case ( | ||||
|                         ['DEFI'] | ||||
|                         | ['USDC'] | ||||
|                     ): | ||||
|                         return f'{subtype[0]}' | ||||
| 
 | ||||
|         # XXX: yeah no clue then.. | ||||
|         raise ValueError( | ||||
|             f'Bad .venue token match: {ctype}' | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def bs_fqme(self) -> str: | ||||
|         symbol: str = self.symbol | ||||
|         ctype: str = self.contractType | ||||
|         venue: str = self.venue | ||||
|         pair: str = self.pair | ||||
| 
 | ||||
|         match ctype: | ||||
|             case ( | ||||
|                 'CURRENT_QUARTER' | ||||
|                 | 'NEXT_QUARTER'  # su madre binance.. | ||||
|             ): | ||||
|                 pair, _, expiry = symbol.partition('_') | ||||
|                 assert pair == self.pair | ||||
| 
 | ||||
|         return f'{pair}.{venue}.{self.expiry}' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_src_asset(self) -> str: | ||||
|         return f'{self.quoteAsset}' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_dst_asset(self) -> str: | ||||
|         return f'{self.baseAsset}.{self.venue}' | ||||
| 
 | ||||
| 
 | ||||
| PAIRTYPES: dict[MarketType, Pair] = { | ||||
|     'spot': SpotPair, | ||||
|     'usdtm_futes': FutesPair, | ||||
| 
 | ||||
|     # TODO: support coin-margined venue: | ||||
|     # https://binance-docs.github.io/apidocs/delivery/en/#change-log | ||||
|     # 'coinm_futes': CoinFutesPair, | ||||
| } | ||||
|  | @ -21,7 +21,6 @@ import os | |||
| from functools import partial | ||||
| from operator import attrgetter | ||||
| from operator import itemgetter | ||||
| from types import ModuleType | ||||
| 
 | ||||
| import click | ||||
| import trio | ||||
|  | @ -29,173 +28,20 @@ import tractor | |||
| 
 | ||||
| from ..cli import cli | ||||
| from .. import watchlists as wl | ||||
| from ..log import ( | ||||
|     colorize_json, | ||||
| ) | ||||
| from ._util import ( | ||||
|     log, | ||||
|     get_console_log, | ||||
| ) | ||||
| from ..service import ( | ||||
|     maybe_spawn_brokerd, | ||||
|     maybe_open_pikerd, | ||||
| ) | ||||
| from ..brokers import ( | ||||
|     core, | ||||
|     get_brokermod, | ||||
|     data, | ||||
| ) | ||||
| DEFAULT_BROKER = 'binance' | ||||
| from ..log import get_console_log, colorize_json, get_logger | ||||
| from .._daemon import maybe_spawn_brokerd, maybe_open_pikerd | ||||
| from ..brokers import core, get_brokermod, data | ||||
| 
 | ||||
| log = get_logger('cli') | ||||
| DEFAULT_BROKER = 'questrade' | ||||
| 
 | ||||
| _config_dir = click.get_app_dir('piker') | ||||
| _watchlists_data_path = os.path.join(_config_dir, 'watchlists.json') | ||||
| 
 | ||||
| 
 | ||||
| OK = '\033[92m' | ||||
| WARNING = '\033[93m' | ||||
| FAIL = '\033[91m' | ||||
| ENDC = '\033[0m' | ||||
| 
 | ||||
| 
 | ||||
| def print_ok(s: str, **kwargs): | ||||
|     print(OK + s + ENDC, **kwargs) | ||||
| 
 | ||||
| 
 | ||||
| def print_error(s: str, **kwargs): | ||||
|     print(FAIL + s + ENDC, **kwargs) | ||||
| 
 | ||||
| 
 | ||||
| def get_method(client, meth_name: str): | ||||
|     print(f'checking client for method \'{meth_name}\'...', end='', flush=True) | ||||
|     method = getattr(client, meth_name, None) | ||||
|     assert method | ||||
|     print_ok('found!.') | ||||
|     return method | ||||
| 
 | ||||
| 
 | ||||
| async def run_method(client, meth_name: str, **kwargs): | ||||
|     method = get_method(client, meth_name) | ||||
|     print('running...', end='', flush=True) | ||||
|     result = await method(**kwargs) | ||||
|     print_ok(f'done! result: {type(result)}') | ||||
|     return result | ||||
| 
 | ||||
| 
 | ||||
| async def run_test(broker_name: str): | ||||
|     brokermod = get_brokermod(broker_name) | ||||
|     total = 0 | ||||
|     passed = 0 | ||||
|     failed = 0 | ||||
| 
 | ||||
|     print('getting client...', end='', flush=True) | ||||
|     if not hasattr(brokermod, 'get_client'): | ||||
|         print_error('fail! no \'get_client\' context manager found.') | ||||
|         return | ||||
| 
 | ||||
|     async with brokermod.get_client(is_brokercheck=True) as client: | ||||
|         print_ok('done! inside client context.') | ||||
| 
 | ||||
|         # check for methods present on brokermod | ||||
|         method_list = [ | ||||
|             'backfill_bars', | ||||
|             'get_client', | ||||
|             'trades_dialogue', | ||||
|             'open_history_client', | ||||
|             'open_symbol_search', | ||||
|             'stream_quotes', | ||||
| 
 | ||||
|         ] | ||||
| 
 | ||||
|         for method in method_list: | ||||
|             print( | ||||
|                 f'checking brokermod for method \'{method}\'...', | ||||
|                 end='', flush=True) | ||||
|             if not hasattr(brokermod, method): | ||||
|                 print_error(f'fail! method \'{method}\' not found.') | ||||
|                 failed += 1 | ||||
|             else: | ||||
|                 print_ok('done!') | ||||
|                 passed += 1 | ||||
| 
 | ||||
|             total += 1 | ||||
| 
 | ||||
|         # check for methods present con brokermod.Client and their | ||||
|         # results | ||||
| 
 | ||||
|         # for private methods only check is present | ||||
|         method_list = [ | ||||
|             'get_balances', | ||||
|             'get_assets', | ||||
|             'get_trades', | ||||
|             'get_xfers', | ||||
|             'submit_limit', | ||||
|             'submit_cancel', | ||||
|             'search_symbols', | ||||
|         ] | ||||
| 
 | ||||
|         for method_name in method_list: | ||||
|             try: | ||||
|                 get_method(client, method_name) | ||||
|                 passed += 1 | ||||
| 
 | ||||
|             except AssertionError: | ||||
|                 print_error(f'fail! method \'{method_name}\' not found.') | ||||
|                 failed += 1 | ||||
| 
 | ||||
|             total += 1 | ||||
| 
 | ||||
|         # check for methods present con brokermod.Client and their | ||||
|         # results | ||||
| 
 | ||||
|         syms = await run_method(client, 'symbol_info') | ||||
|         total += 1 | ||||
| 
 | ||||
|         if len(syms) == 0: | ||||
|             raise BaseException('Empty Symbol list?') | ||||
| 
 | ||||
|         passed += 1 | ||||
| 
 | ||||
|         first_sym = tuple(syms.keys())[0] | ||||
| 
 | ||||
|         method_list = [ | ||||
|             ('cache_symbols', {}), | ||||
|             ('search_symbols', {'pattern': first_sym[:-1]}), | ||||
|             ('bars', {'symbol': first_sym}) | ||||
|         ] | ||||
| 
 | ||||
|         for method_name, method_kwargs in method_list: | ||||
|             try: | ||||
|                 await run_method(client, method_name, **method_kwargs) | ||||
|                 passed += 1 | ||||
| 
 | ||||
|             except AssertionError: | ||||
|                 print_error(f'fail! method \'{method_name}\' not found.') | ||||
|                 failed += 1 | ||||
| 
 | ||||
|             total += 1 | ||||
| 
 | ||||
|         print(f'total: {total}, passed: {passed}, failed: {failed}') | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.argument('broker', nargs=1, required=True) | ||||
| @click.pass_obj | ||||
| def brokercheck(config, broker): | ||||
|     ''' | ||||
|     Test broker apis for completeness. | ||||
| 
 | ||||
|     ''' | ||||
|     async def bcheck_main(): | ||||
|         async with maybe_spawn_brokerd(broker) as portal: | ||||
|             await portal.run(run_test, broker) | ||||
|             await portal.cancel_actor() | ||||
| 
 | ||||
|     trio.run(run_test, broker) | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.option('--keys', '-k', multiple=True, | ||||
|             help='Return results only for these keys') | ||||
|               help='Return results only for these keys') | ||||
| @click.argument('meth', nargs=1) | ||||
| @click.argument('kwargs', nargs=-1) | ||||
| @click.pass_obj | ||||
|  | @ -242,7 +88,7 @@ def quote(config, tickers): | |||
| 
 | ||||
|     ''' | ||||
|     # global opts | ||||
|     brokermod = list(config['brokermods'].values())[0] | ||||
|     brokermod = config['brokermods'][0] | ||||
| 
 | ||||
|     quotes = trio.run(partial(core.stocks_quote, brokermod, tickers)) | ||||
|     if not quotes: | ||||
|  | @ -269,7 +115,7 @@ def bars(config, symbol, count): | |||
| 
 | ||||
|     ''' | ||||
|     # global opts | ||||
|     brokermod = list(config['brokermods'].values())[0] | ||||
|     brokermod = config['brokermods'][0] | ||||
| 
 | ||||
|     # broker backend should return at the least a | ||||
|     # list of candle dictionaries | ||||
|  | @ -304,7 +150,7 @@ def record(config, rate, name, dhost, filename): | |||
| 
 | ||||
|     ''' | ||||
|     # global opts | ||||
|     brokermod = list(config['brokermods'].values())[0] | ||||
|     brokermod = config['brokermods'][0] | ||||
|     loglevel = config['loglevel'] | ||||
|     log = config['log'] | ||||
| 
 | ||||
|  | @ -369,7 +215,7 @@ def optsquote(config, symbol, date): | |||
| 
 | ||||
|     ''' | ||||
|     # global opts | ||||
|     brokermod = list(config['brokermods'].values())[0] | ||||
|     brokermod = config['brokermods'][0] | ||||
| 
 | ||||
|     quotes = trio.run( | ||||
|         partial( | ||||
|  | @ -386,151 +232,58 @@ def optsquote(config, symbol, date): | |||
| @cli.command() | ||||
| @click.argument('tickers', nargs=-1, required=True) | ||||
| @click.pass_obj | ||||
| def mkt_info( | ||||
|     config: dict, | ||||
|     tickers: list[str], | ||||
| ): | ||||
| def symbol_info(config, tickers): | ||||
|     ''' | ||||
|     Print symbol quotes to the console | ||||
| 
 | ||||
|     ''' | ||||
|     from msgspec.json import encode, decode | ||||
|     from ..accounting import MktPair | ||||
|     from ..service import ( | ||||
|         open_piker_runtime, | ||||
|     ) | ||||
| 
 | ||||
|     # global opts | ||||
|     brokermods: dict[str, ModuleType] = config['brokermods'] | ||||
|     brokermod = config['brokermods'][0] | ||||
| 
 | ||||
|     mkts: list[MktPair] = [] | ||||
|     async def main(): | ||||
| 
 | ||||
|         async with open_piker_runtime( | ||||
|             name='mkt_info_query', | ||||
|             # loglevel=loglevel, | ||||
|             debug_mode=True, | ||||
| 
 | ||||
|         ) as (_, _): | ||||
|             for fqme in tickers: | ||||
|                 bs_fqme, _, broker = fqme.rpartition('.') | ||||
|                 brokermod: ModuleType = brokermods[broker] | ||||
|                 mkt, bs_pair = await core.mkt_info( | ||||
|                     brokermod, | ||||
|                     bs_fqme, | ||||
|                 ) | ||||
|                 mkts.append((mkt, bs_pair)) | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
|     if not mkts: | ||||
|         log.error( | ||||
|             f'No market info could be found for {tickers}' | ||||
|         ) | ||||
|     quotes = trio.run(partial(core.symbol_info, brokermod, tickers)) | ||||
|     if not quotes: | ||||
|         log.error(f"No quotes could be found for {tickers}?") | ||||
|         return | ||||
| 
 | ||||
|     if len(mkts) < len(tickers): | ||||
|         syms = tuple(map(itemgetter('fqme'), mkts)) | ||||
|     if len(quotes) < len(tickers): | ||||
|         syms = tuple(map(itemgetter('symbol'), quotes)) | ||||
|         for ticker in tickers: | ||||
|             if ticker not in syms: | ||||
|                 log.warn(f"Could not find symbol {ticker}?") | ||||
|                 brokermod.log.warn(f"Could not find symbol {ticker}?") | ||||
| 
 | ||||
| 
 | ||||
|     # TODO: use ``rich.Table`` intead here! | ||||
|     for mkt, bs_pair in mkts: | ||||
|         click.echo( | ||||
|             '\n' | ||||
|             '----------------------------------------------------\n' | ||||
|             f'{type(bs_pair)}\n' | ||||
|             '----------------------------------------------------\n' | ||||
|             f'{colorize_json(bs_pair.to_dict())}\n' | ||||
|             '----------------------------------------------------\n' | ||||
|             f'as piker `MktPair` with fqme: {mkt.fqme}\n' | ||||
|             '----------------------------------------------------\n' | ||||
|             # NOTE: roundtrip to json codec for console print | ||||
|             f'{colorize_json(decode(encode(mkt)))}' | ||||
|         ) | ||||
|     click.echo(colorize_json(quotes)) | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.argument('pattern', required=True) | ||||
| # TODO: move this to top level click/typer context for all subs | ||||
| @click.option( | ||||
|     '--pdb', | ||||
|     is_flag=True, | ||||
|     help='Enable tractor debug mode', | ||||
| ) | ||||
| @click.pass_obj | ||||
| def search( | ||||
|     config: dict, | ||||
|     pattern: str, | ||||
|     pdb: bool, | ||||
| ): | ||||
| def search(config, pattern): | ||||
|     ''' | ||||
|     Search for symbols from broker backend(s). | ||||
| 
 | ||||
|     ''' | ||||
|     # global opts | ||||
|     brokermods = list(config['brokermods'].values()) | ||||
|     brokermods = config['brokermods'] | ||||
| 
 | ||||
|     # define tractor entrypoint | ||||
|     async def main(func): | ||||
| 
 | ||||
|         async with maybe_open_pikerd( | ||||
|             loglevel=config['loglevel'], | ||||
|             debug_mode=pdb, | ||||
|         ): | ||||
|             return await func() | ||||
| 
 | ||||
|     from piker.toolz import open_crash_handler | ||||
|     with open_crash_handler(): | ||||
|         quotes = trio.run( | ||||
|             main, | ||||
|             partial( | ||||
|                 core.symbol_search, | ||||
|                 brokermods, | ||||
|                 pattern, | ||||
|             ), | ||||
|         ) | ||||
|     quotes = trio.run( | ||||
|         main, | ||||
|         partial( | ||||
|             core.symbol_search, | ||||
|             brokermods, | ||||
|             pattern, | ||||
|         ), | ||||
|     ) | ||||
| 
 | ||||
|         if not quotes: | ||||
|             log.error(f"No matches could be found for {pattern}?") | ||||
|             return | ||||
|     if not quotes: | ||||
|         log.error(f"No matches could be found for {pattern}?") | ||||
|         return | ||||
| 
 | ||||
|         click.echo(colorize_json(quotes)) | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.argument('section', required=False) | ||||
| @click.argument('value', required=False) | ||||
| @click.option('--delete', '-d', flag_value=True, help='Delete section') | ||||
| @click.pass_obj | ||||
| def brokercfg(config, section, value, delete): | ||||
|     ''' | ||||
|     If invoked with no arguments, open an editor to edit broker | ||||
|     configs file or get / update an individual section. | ||||
| 
 | ||||
|     ''' | ||||
|     from .. import config | ||||
| 
 | ||||
|     if section: | ||||
|         conf, path = config.load() | ||||
| 
 | ||||
|         if not delete: | ||||
|             if value: | ||||
|                 config.set_value(conf, section, value) | ||||
| 
 | ||||
|             click.echo( | ||||
|                 colorize_json( | ||||
|                     config.get_value(conf, section)) | ||||
|             ) | ||||
|         else: | ||||
|             config.del_value(conf, section) | ||||
| 
 | ||||
|         config.write(config=conf) | ||||
| 
 | ||||
|     else: | ||||
|         conf, path = config.load(raw=True) | ||||
|         config.write( | ||||
|             raw=click.edit(text=conf) | ||||
|         ) | ||||
|     click.echo(colorize_json(quotes)) | ||||
|  |  | |||
|  | @ -26,11 +26,13 @@ from typing import List, Dict, Any, Optional | |||
| 
 | ||||
| import trio | ||||
| 
 | ||||
| from ._util import log | ||||
| from ..log import get_logger | ||||
| from . import get_brokermod | ||||
| from ..service import maybe_spawn_brokerd | ||||
| from . import open_cached_client | ||||
| from ..accounting import MktPair | ||||
| from .._daemon import maybe_spawn_brokerd | ||||
| from .._cacheables import open_cached_client | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| async def api(brokername: str, methname: str, **kwargs) -> dict: | ||||
|  | @ -95,15 +97,15 @@ async def option_chain( | |||
|             return await client.option_chains(contracts) | ||||
| 
 | ||||
| 
 | ||||
| # async def contracts( | ||||
| #     brokermod: ModuleType, | ||||
| #     symbol: str, | ||||
| # ) -> Dict[str, Dict[str, Dict[str, Any]]]: | ||||
| #     """Return option contracts (all expiries) for ``symbol``. | ||||
| #     """ | ||||
| #     async with brokermod.get_client() as client: | ||||
| #         # return await client.get_all_contracts([symbol]) | ||||
| #         return await client.get_all_contracts([symbol]) | ||||
| async def contracts( | ||||
|     brokermod: ModuleType, | ||||
|     symbol: str, | ||||
| ) -> Dict[str, Dict[str, Dict[str, Any]]]: | ||||
|     """Return option contracts (all expiries) for ``symbol``. | ||||
|     """ | ||||
|     async with brokermod.get_client() as client: | ||||
|         # return await client.get_all_contracts([symbol]) | ||||
|         return await client.get_all_contracts([symbol]) | ||||
| 
 | ||||
| 
 | ||||
| async def bars( | ||||
|  | @ -117,6 +119,17 @@ async def bars( | |||
|         return await client.bars(symbol, **kwargs) | ||||
| 
 | ||||
| 
 | ||||
| async def symbol_info( | ||||
|     brokermod: ModuleType, | ||||
|     symbol: str, | ||||
|     **kwargs, | ||||
| ) -> Dict[str, Dict[str, Dict[str, Any]]]: | ||||
|     """Return symbol info from broker. | ||||
|     """ | ||||
|     async with brokermod.get_client() as client: | ||||
|         return await client.symbol_info(symbol, **kwargs) | ||||
| 
 | ||||
| 
 | ||||
| async def search_w_brokerd(name: str, pattern: str) -> dict: | ||||
| 
 | ||||
|     async with open_cached_client(name) as client: | ||||
|  | @ -145,11 +158,7 @@ async def symbol_search( | |||
| 
 | ||||
|         async with maybe_spawn_brokerd( | ||||
|             mod.name, | ||||
|             infect_asyncio=getattr( | ||||
|                 mod, | ||||
|                 '_infect_asyncio', | ||||
|                 False, | ||||
|             ), | ||||
|             infect_asyncio=getattr(mod, '_infect_asyncio', False), | ||||
|         ) as portal: | ||||
| 
 | ||||
|             results.append(( | ||||
|  | @ -167,20 +176,3 @@ async def symbol_search( | |||
|             n.start_soon(search_backend, mod.name) | ||||
| 
 | ||||
|     return results | ||||
| 
 | ||||
| 
 | ||||
| async def mkt_info( | ||||
|     brokermod: ModuleType, | ||||
|     fqme: str, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> MktPair: | ||||
|     ''' | ||||
|     Return MktPair info from broker including src and dst assets. | ||||
| 
 | ||||
|     ''' | ||||
|     async with open_cached_client(brokermod.name) as client: | ||||
|         assert client | ||||
|         return await brokermod.get_mkt_info( | ||||
|             fqme.replace(brokermod.name, '') | ||||
|         ) | ||||
|  |  | |||
|  | @ -41,13 +41,13 @@ import tractor | |||
| from tractor.experimental import msgpub | ||||
| from async_generator import asynccontextmanager | ||||
| 
 | ||||
| from ._util import ( | ||||
|     log, | ||||
|     get_console_log, | ||||
| ) | ||||
| from ..log import get_logger, get_console_log | ||||
| from . import get_brokermod | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| async def wait_for_network( | ||||
|     net_func: Callable, | ||||
|     sleep: int = 1 | ||||
|  | @ -227,28 +227,26 @@ async def get_cached_feed( | |||
| 
 | ||||
| @tractor.stream | ||||
| async def start_quote_stream( | ||||
|     stream: tractor.Context,  # marks this as a streaming func | ||||
|     ctx: tractor.Context,  # marks this as a streaming func | ||||
|     broker: str, | ||||
|     symbols: List[Any], | ||||
|     feed_type: str = 'stock', | ||||
|     rate: int = 3, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Handle per-broker quote stream subscriptions using a "lazy" pub-sub | ||||
|     """Handle per-broker quote stream subscriptions using a "lazy" pub-sub | ||||
|     pattern. | ||||
| 
 | ||||
|     Spawns new quoter tasks for each broker backend on-demand. | ||||
|     Since most brokers seems to support batch quote requests we | ||||
|     limit to one task per process (for now). | ||||
| 
 | ||||
|     ''' | ||||
|     """ | ||||
|     # XXX: why do we need this again? | ||||
|     get_console_log(tractor.current_actor().loglevel) | ||||
| 
 | ||||
|     # pull global vars from local actor | ||||
|     symbols = list(symbols) | ||||
|     log.info( | ||||
|         f"{stream.chan.uid} subscribed to {broker} for symbols {symbols}") | ||||
|         f"{ctx.chan.uid} subscribed to {broker} for symbols {symbols}") | ||||
|     # another actor task may have already created it | ||||
|     async with get_cached_feed(broker) as feed: | ||||
| 
 | ||||
|  | @ -292,13 +290,13 @@ async def start_quote_stream( | |||
|             assert fquote['displayable'] | ||||
|             payload[sym] = fquote | ||||
| 
 | ||||
|         await stream.send_yield(payload) | ||||
|         await ctx.send_yield(payload) | ||||
| 
 | ||||
|         await stream_poll_requests( | ||||
| 
 | ||||
|             # ``trionics.msgpub`` required kwargs | ||||
|             task_name=feed_type, | ||||
|             ctx=stream, | ||||
|             ctx=ctx, | ||||
|             topics=symbols, | ||||
|             packetizer=feed.mod.packetizer, | ||||
| 
 | ||||
|  | @ -321,11 +319,9 @@ async def call_client( | |||
| 
 | ||||
| 
 | ||||
| class DataFeed: | ||||
|     ''' | ||||
|     Data feed client for streaming symbol data from and making API | ||||
|     client calls to a (remote) ``brokerd`` daemon. | ||||
| 
 | ||||
|     ''' | ||||
|     """Data feed client for streaming symbol data from and making API client calls | ||||
|     to a (remote) ``brokerd`` daemon. | ||||
|     """ | ||||
|     _allowed = ('stock', 'option') | ||||
| 
 | ||||
|     def __init__(self, portal, brokermod): | ||||
|  |  | |||
|  | @ -1,70 +0,0 @@ | |||
| ``deribit`` backend | ||||
| ------------------ | ||||
| pretty good liquidity crypto derivatives, uses custom json rpc over ws for | ||||
| client methods, then `cryptofeed` for data streams. | ||||
| 
 | ||||
| status | ||||
| ****** | ||||
| - supports option charts | ||||
| - no order support yet  | ||||
| 
 | ||||
| 
 | ||||
| config | ||||
| ****** | ||||
| In order to get order mode support your ``brokers.toml`` | ||||
| needs to have something like the following: | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|     [deribit] | ||||
|     key_id = 'XXXXXXXX' | ||||
|     key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx' | ||||
| 
 | ||||
| To obtain an api id and secret you need to create an account, which can be a | ||||
| real market account over at: | ||||
| 
 | ||||
|     - deribit.com  (requires KYC for deposit address) | ||||
| 
 | ||||
| Or a testnet account over at: | ||||
| 
 | ||||
|     - test.deribit.com | ||||
| 
 | ||||
| For testnet once the account is created here is how you deposit fake crypto to | ||||
| try it out: | ||||
| 
 | ||||
| 1) Go to Wallet: | ||||
| 
 | ||||
| .. figure:: assets/0_wallet.png | ||||
|     :align: center | ||||
|     :target: assets/0_wallet.png | ||||
|     :alt: wallet page | ||||
| 
 | ||||
| 2) Then click on the elipsis menu and select deposit | ||||
| 
 | ||||
| .. figure:: assets/1_wallet_select_deposit.png | ||||
|     :align: center | ||||
|     :target: assets/1_wallet_select_deposit.png | ||||
|     :alt: wallet deposit page | ||||
| 
 | ||||
| 3) This will take you to the deposit address page | ||||
| 
 | ||||
| .. figure:: assets/2_gen_deposit_addr.png | ||||
|     :align: center | ||||
|     :target: assets/2_gen_deposit_addr.png | ||||
|     :alt: generate deposit address page | ||||
| 
 | ||||
| 4) After clicking generate you should see the address, copy it and go to the  | ||||
| `coin faucet <https://test.deribit.com/dericoin/BTC/deposit>`_ and send fake | ||||
| coins to that address. | ||||
| 
 | ||||
| .. figure:: assets/3_deposit_address.png | ||||
|     :align: center | ||||
|     :target: assets/3_deposit_address.png | ||||
|     :alt: generated address | ||||
| 
 | ||||
| 5) Back in the deposit address page you should see the deposit in your history | ||||
| 
 | ||||
| .. figure:: assets/4_wallet_deposit_history.png | ||||
|     :align: center | ||||
|     :target: assets/4_wallet_deposit_history.png | ||||
|     :alt: wallet deposit history | ||||
|  | @ -1,65 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Deribit backend. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| from piker.log import get_logger | ||||
| 
 | ||||
| from .api import ( | ||||
|     get_client, | ||||
| ) | ||||
| from .feed import ( | ||||
|     open_history_client, | ||||
|     open_symbol_search, | ||||
|     stream_quotes, | ||||
|     # backfill_bars, | ||||
| ) | ||||
| # from .broker import ( | ||||
|     # open_trade_dialog, | ||||
|     # norm_trade_records, | ||||
| # ) | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'get_client', | ||||
| #    'trades_dialogue', | ||||
|     'open_history_client', | ||||
|     'open_symbol_search', | ||||
|     'stream_quotes', | ||||
| #    'norm_trade_records', | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| # tractor RPC enable arg | ||||
| __enable_modules__: list[str] = [ | ||||
|     'api', | ||||
|     'feed', | ||||
| #   'broker', | ||||
| ] | ||||
| 
 | ||||
| # passed to ``tractor.ActorNursery.start_actor()`` | ||||
| _spawn_kwargs = { | ||||
|     'infect_asyncio': True, | ||||
| } | ||||
| 
 | ||||
| # annotation to let backend agnostic code | ||||
| # know if ``brokerd`` should be spawned with | ||||
| # ``tractor``'s aio mode. | ||||
| _infect_asyncio: bool = True | ||||
|  | @ -1,675 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Deribit backend. | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from datetime import datetime | ||||
| from functools import partial | ||||
| import time | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Callable, | ||||
| ) | ||||
| 
 | ||||
| import pendulum | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| from rapidfuzz import process as fuzzy | ||||
| import numpy as np | ||||
| from tractor.trionics import ( | ||||
|     broadcast_receiver, | ||||
|     maybe_open_context | ||||
| ) | ||||
| from tractor import to_asyncio | ||||
| # XXX WOOPS XD | ||||
| # yeah you'll need to install it since it was removed in #489 by | ||||
| # accident; well i thought we had removed all usage.. | ||||
| from cryptofeed import FeedHandler | ||||
| from cryptofeed.defines import ( | ||||
|     DERIBIT, | ||||
|     L1_BOOK, TRADES, | ||||
|     OPTION, CALL, PUT | ||||
| ) | ||||
| from cryptofeed.symbols import Symbol | ||||
| 
 | ||||
| from piker.data import ( | ||||
|     def_iohlcv_fields, | ||||
|     match_from_pairs, | ||||
|     Struct, | ||||
| ) | ||||
| from piker.data._web_bs import ( | ||||
|     open_jsonrpc_session | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| from piker import config | ||||
| from piker.log import get_logger | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| _spawn_kwargs = { | ||||
|     'infect_asyncio': True, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| _url = 'https://www.deribit.com' | ||||
| _ws_url = 'wss://www.deribit.com/ws/api/v2' | ||||
| _testnet_ws_url = 'wss://test.deribit.com/ws/api/v2' | ||||
| 
 | ||||
| 
 | ||||
| class JSONRPCResult(Struct): | ||||
|     jsonrpc: str = '2.0' | ||||
|     id: int | ||||
|     result: Optional[list[dict]] = None | ||||
|     error: Optional[dict] = None | ||||
|     usIn: int | ||||
|     usOut: int | ||||
|     usDiff: int | ||||
|     testnet: bool | ||||
| 
 | ||||
| class JSONRPCChannel(Struct): | ||||
|     jsonrpc: str = '2.0' | ||||
|     method: str | ||||
|     params: dict | ||||
| 
 | ||||
| 
 | ||||
| class KLinesResult(Struct): | ||||
|     close: list[float] | ||||
|     cost: list[float] | ||||
|     high: list[float] | ||||
|     low: list[float] | ||||
|     open: list[float] | ||||
|     status: str | ||||
|     ticks: list[int] | ||||
|     volume: list[float] | ||||
| 
 | ||||
| class Trade(Struct): | ||||
|     trade_seq: int | ||||
|     trade_id: str | ||||
|     timestamp: int | ||||
|     tick_direction: int | ||||
|     price: float | ||||
|     mark_price: float | ||||
|     iv: float | ||||
|     instrument_name: str | ||||
|     index_price: float | ||||
|     direction: str | ||||
|     combo_trade_id: Optional[int] = 0, | ||||
|     combo_id: Optional[str] = '', | ||||
|     amount: float | ||||
| 
 | ||||
| class LastTradesResult(Struct): | ||||
|     trades: list[Trade] | ||||
|     has_more: bool | ||||
| 
 | ||||
| 
 | ||||
| # convert datetime obj timestamp to unixtime in milliseconds | ||||
| def deribit_timestamp(when): | ||||
|     return int((when.timestamp() * 1000) + (when.microsecond / 1000)) | ||||
| 
 | ||||
| 
 | ||||
| def str_to_cb_sym(name: str) -> Symbol: | ||||
|     base, strike_price, expiry_date, option_type = name.split('-') | ||||
| 
 | ||||
|     quote = base | ||||
| 
 | ||||
|     if option_type == 'put': | ||||
|         option_type = PUT  | ||||
|     elif option_type  == 'call': | ||||
|         option_type = CALL | ||||
|     else: | ||||
|         raise Exception("Couldn\'t parse option type") | ||||
| 
 | ||||
|     return Symbol( | ||||
|         base, quote, | ||||
|         type=OPTION, | ||||
|         strike_price=strike_price, | ||||
|         option_type=option_type, | ||||
|         expiry_date=expiry_date, | ||||
|         expiry_normalize=False) | ||||
| 
 | ||||
| 
 | ||||
| def piker_sym_to_cb_sym(name: str) -> Symbol: | ||||
|     base, expiry_date, strike_price, option_type = tuple( | ||||
|         name.upper().split('-')) | ||||
| 
 | ||||
|     quote = base | ||||
| 
 | ||||
|     if option_type == 'P': | ||||
|         option_type = PUT  | ||||
|     elif option_type  == 'C': | ||||
|         option_type = CALL | ||||
|     else: | ||||
|         raise Exception("Couldn\'t parse option type") | ||||
| 
 | ||||
|     return Symbol( | ||||
|         base, quote, | ||||
|         type=OPTION, | ||||
|         strike_price=strike_price, | ||||
|         option_type=option_type, | ||||
|         expiry_date=expiry_date.upper()) | ||||
| 
 | ||||
| 
 | ||||
| def cb_sym_to_deribit_inst(sym: Symbol): | ||||
|     # cryptofeed normalized | ||||
|     cb_norm = ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z'] | ||||
| 
 | ||||
|     # deribit specific  | ||||
|     months = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] | ||||
| 
 | ||||
|     exp = sym.expiry_date | ||||
| 
 | ||||
|     # YYMDD | ||||
|     # 01234 | ||||
|     year, month, day = ( | ||||
|         exp[:2], months[cb_norm.index(exp[2:3])], exp[3:]) | ||||
| 
 | ||||
|     otype = 'C' if sym.option_type == CALL else 'P' | ||||
| 
 | ||||
|     return f'{sym.base}-{day}{month}{year}-{sym.strike_price}-{otype}' | ||||
| 
 | ||||
| 
 | ||||
| def get_config() -> dict[str, Any]: | ||||
| 
 | ||||
|     conf, path = config.load() | ||||
| 
 | ||||
|     section = conf.get('deribit') | ||||
| 
 | ||||
|     # TODO: document why we send this, basically because logging params for cryptofeed | ||||
|     conf['log'] = {} | ||||
|     conf['log']['disabled'] = True | ||||
| 
 | ||||
|     if section is None: | ||||
|         log.warning(f'No config section found for deribit in {path}') | ||||
| 
 | ||||
|     return conf  | ||||
| 
 | ||||
| 
 | ||||
| class Client: | ||||
| 
 | ||||
|     def __init__(self, json_rpc: Callable) -> None: | ||||
|         self._pairs: dict[str, Any] = None | ||||
| 
 | ||||
|         config = get_config().get('deribit', {}) | ||||
| 
 | ||||
|         if ('key_id' in config) and ('key_secret' in config): | ||||
|             self._key_id = config['key_id'] | ||||
|             self._key_secret = config['key_secret'] | ||||
| 
 | ||||
|         else: | ||||
|             self._key_id = None | ||||
|             self._key_secret = None | ||||
| 
 | ||||
|         self.json_rpc = json_rpc | ||||
| 
 | ||||
|     @property | ||||
|     def currencies(self): | ||||
|         return ['btc', 'eth', 'sol', 'usd'] | ||||
| 
 | ||||
|     async def get_balances(self, kind: str = 'option') -> dict[str, float]: | ||||
|         """Return the set of positions for this account | ||||
|         by symbol. | ||||
|         """ | ||||
|         balances = {} | ||||
| 
 | ||||
|         for currency in self.currencies: | ||||
|             resp = await self.json_rpc( | ||||
|                 'private/get_positions', params={ | ||||
|                     'currency': currency.upper(), | ||||
|                     'kind': kind}) | ||||
| 
 | ||||
|             balances[currency] = resp.result | ||||
| 
 | ||||
|         return balances | ||||
| 
 | ||||
|     async def get_assets(self) -> dict[str, float]: | ||||
|         """Return the set of asset balances for this account | ||||
|         by symbol. | ||||
|         """ | ||||
|         balances = {} | ||||
| 
 | ||||
|         for currency in self.currencies: | ||||
|             resp = await self.json_rpc( | ||||
|                 'private/get_account_summary', params={ | ||||
|                     'currency': currency.upper()}) | ||||
| 
 | ||||
|             balances[currency] = resp.result['balance'] | ||||
| 
 | ||||
|         return balances | ||||
| 
 | ||||
|     async def submit_limit( | ||||
|         self, | ||||
|         symbol: str, | ||||
|         price: float, | ||||
|         action: str, | ||||
|         size: float | ||||
|     ) -> dict: | ||||
|         """Place an order | ||||
|         """ | ||||
|         params = { | ||||
|             'instrument_name': symbol.upper(), | ||||
|             'amount': size, | ||||
|             'type': 'limit', | ||||
|             'price': price, | ||||
|         } | ||||
|         resp = await self.json_rpc( | ||||
|             f'private/{action}', params) | ||||
| 
 | ||||
|         return resp.result | ||||
| 
 | ||||
|     async def submit_cancel(self, oid: str): | ||||
|         """Send cancel request for order id | ||||
|         """ | ||||
|         resp = await self.json_rpc( | ||||
|             'private/cancel', {'order_id': oid}) | ||||
|         return resp.result | ||||
| 
 | ||||
|     async def symbol_info( | ||||
|         self, | ||||
|         instrument: Optional[str] = None, | ||||
|         currency: str = 'btc',  # BTC, ETH, SOL, USDC | ||||
|         kind: str = 'option', | ||||
|         expired: bool = False | ||||
| 
 | ||||
|     ) -> dict[str, dict]: | ||||
|         ''' | ||||
|         Get symbol infos. | ||||
| 
 | ||||
|         ''' | ||||
|         if self._pairs: | ||||
|             return self._pairs | ||||
| 
 | ||||
|         # will retrieve all symbols by default | ||||
|         params: dict[str, str] = { | ||||
|             'currency': currency.upper(), | ||||
|             'kind': kind, | ||||
|             'expired': str(expired).lower() | ||||
|         } | ||||
| 
 | ||||
|         resp: JSONRPCResult = await self.json_rpc( | ||||
|             'public/get_instruments', | ||||
|             params, | ||||
|         ) | ||||
|         # convert to symbol-keyed table | ||||
|         results: list[dict] | None = resp.result | ||||
|         instruments: dict[str, dict] = { | ||||
|             item['instrument_name'].lower(): item | ||||
|             for item in results | ||||
|         } | ||||
| 
 | ||||
|         if instrument is not None: | ||||
|             return instruments[instrument] | ||||
|         else: | ||||
|             return instruments | ||||
| 
 | ||||
|     async def cache_symbols( | ||||
|         self, | ||||
|     ) -> dict: | ||||
| 
 | ||||
|         if not self._pairs: | ||||
|             self._pairs = await self.symbol_info() | ||||
| 
 | ||||
|         return self._pairs | ||||
| 
 | ||||
|     async def search_symbols( | ||||
|         self, | ||||
|         pattern: str, | ||||
|         limit: int = 30, | ||||
|     ) -> dict[str, Any]: | ||||
|         ''' | ||||
|         Fuzzy search symbology set for pairs matching `pattern`. | ||||
| 
 | ||||
|         ''' | ||||
|         pairs: dict[str, Any] = await self.symbol_info() | ||||
|         matches: dict[str, Pair] = match_from_pairs( | ||||
|             pairs=pairs, | ||||
|             query=pattern.upper(), | ||||
|             score_cutoff=35, | ||||
|             limit=limit | ||||
|         ) | ||||
| 
 | ||||
|        # repack in name-keyed table | ||||
|         return { | ||||
|             pair['instrument_name'].lower(): pair | ||||
|             for pair in matches.values() | ||||
|         } | ||||
| 
 | ||||
|     async def bars( | ||||
|         self, | ||||
|         symbol: str, | ||||
|         start_dt: Optional[datetime] = None, | ||||
|         end_dt: Optional[datetime] = None, | ||||
|         limit: int = 1000, | ||||
|         as_np: bool = True, | ||||
|     ) -> dict: | ||||
|         instrument = symbol | ||||
| 
 | ||||
|         if end_dt is None: | ||||
|             end_dt = pendulum.now('UTC') | ||||
| 
 | ||||
|         if start_dt is None: | ||||
|             start_dt = end_dt.start_of( | ||||
|                 'minute').subtract(minutes=limit) | ||||
| 
 | ||||
|         start_time = deribit_timestamp(start_dt) | ||||
|         end_time = deribit_timestamp(end_dt) | ||||
| 
 | ||||
|         # https://docs.deribit.com/#public-get_tradingview_chart_data | ||||
|         resp = await self.json_rpc( | ||||
|             'public/get_tradingview_chart_data', | ||||
|             params={ | ||||
|                 'instrument_name': instrument.upper(), | ||||
|                 'start_timestamp': start_time, | ||||
|                 'end_timestamp': end_time, | ||||
|                 'resolution': '1' | ||||
|             }) | ||||
| 
 | ||||
|         result = KLinesResult(**resp.result) | ||||
|         new_bars = [] | ||||
|         for i in range(len(result.close)): | ||||
| 
 | ||||
|             _open = result.open[i] | ||||
|             high = result.high[i] | ||||
|             low = result.low[i] | ||||
|             close = result.close[i] | ||||
|             volume = result.volume[i] | ||||
| 
 | ||||
|             row = [ | ||||
|                 (start_time + (i * (60 * 1000))) / 1000.0,  # time | ||||
|                 result.open[i], | ||||
|                 result.high[i], | ||||
|                 result.low[i], | ||||
|                 result.close[i], | ||||
|                 result.volume[i], | ||||
|                 0 | ||||
|             ] | ||||
| 
 | ||||
|             new_bars.append((i,) + tuple(row)) | ||||
| 
 | ||||
|         array = np.array(new_bars, dtype=def_iohlcv_fields) if as_np else klines | ||||
|         return array | ||||
| 
 | ||||
|     async def last_trades( | ||||
|         self, | ||||
|         instrument: str, | ||||
|         count: int = 10 | ||||
|     ): | ||||
|         resp = await self.json_rpc( | ||||
|             'public/get_last_trades_by_instrument', | ||||
|             params={ | ||||
|                 'instrument_name': instrument, | ||||
|                 'count': count | ||||
|             }) | ||||
| 
 | ||||
|         return LastTradesResult(**resp.result) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_client( | ||||
|     is_brokercheck: bool = False | ||||
| ) -> Client: | ||||
| 
 | ||||
|     async with ( | ||||
|         trio.open_nursery() as n, | ||||
|         open_jsonrpc_session( | ||||
|             _testnet_ws_url, dtype=JSONRPCResult) as json_rpc | ||||
|     ): | ||||
|         client = Client(json_rpc) | ||||
| 
 | ||||
|         _refresh_token: Optional[str] = None | ||||
|         _access_token: Optional[str] = None | ||||
| 
 | ||||
|         async def _auth_loop( | ||||
|             task_status: TaskStatus = trio.TASK_STATUS_IGNORED | ||||
|         ): | ||||
|             """Background task that adquires a first access token and then will | ||||
|             refresh the access token while the nursery isn't cancelled. | ||||
| 
 | ||||
|             https://docs.deribit.com/?python#authentication-2 | ||||
|             """ | ||||
|             renew_time = 10 | ||||
|             access_scope = 'trade:read_write' | ||||
|             _expiry_time = time.time() | ||||
|             got_access = False | ||||
|             nonlocal _refresh_token | ||||
|             nonlocal _access_token | ||||
| 
 | ||||
|             while True: | ||||
|                 if time.time() - _expiry_time < renew_time: | ||||
|                     # if we are close to token expiry time | ||||
| 
 | ||||
|                     if _refresh_token != None: | ||||
|                         # if we have a refresh token already dont need to send | ||||
|                         # secret | ||||
|                         params = { | ||||
|                             'grant_type': 'refresh_token', | ||||
|                             'refresh_token': _refresh_token, | ||||
|                             'scope': access_scope | ||||
|                         } | ||||
| 
 | ||||
|                     else: | ||||
|                         # we don't have refresh token, send secret to initialize | ||||
|                         params = { | ||||
|                             'grant_type': 'client_credentials', | ||||
|                             'client_id': client._key_id, | ||||
|                             'client_secret': client._key_secret, | ||||
|                             'scope': access_scope | ||||
|                         } | ||||
| 
 | ||||
|                     resp = await json_rpc('public/auth', params) | ||||
|                     result = resp.result | ||||
| 
 | ||||
|                     _expiry_time = time.time() + result['expires_in'] | ||||
|                     _refresh_token = result['refresh_token'] | ||||
| 
 | ||||
|                     if 'access_token' in result: | ||||
|                         _access_token = result['access_token'] | ||||
| 
 | ||||
|                     if not got_access: | ||||
|                         # first time this loop runs we must indicate task is | ||||
|                         # started, we have auth | ||||
|                         got_access = True | ||||
|                         task_status.started() | ||||
| 
 | ||||
|                 else: | ||||
|                     await trio.sleep(renew_time / 2) | ||||
| 
 | ||||
|         # if we have client creds launch auth loop | ||||
|         if client._key_id is not None: | ||||
|             await n.start(_auth_loop) | ||||
| 
 | ||||
|         await client.cache_symbols() | ||||
|         yield client | ||||
|         n.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_feed_handler(): | ||||
|     fh = FeedHandler(config=get_config()) | ||||
|     yield fh | ||||
|     await to_asyncio.run_task(fh.stop_async) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_feed_handler() -> trio.abc.ReceiveStream: | ||||
|     async with maybe_open_context( | ||||
|         acm_func=open_feed_handler, | ||||
|         key='feedhandler', | ||||
|     ) as (cache_hit, fh): | ||||
|         yield fh | ||||
| 
 | ||||
| 
 | ||||
| async def aio_price_feed_relay( | ||||
|     fh: FeedHandler, | ||||
|     instrument: Symbol, | ||||
|     from_trio: asyncio.Queue, | ||||
|     to_trio: trio.abc.SendChannel, | ||||
| ) -> None: | ||||
|     async def _trade(data: dict, receipt_timestamp): | ||||
|         to_trio.send_nowait(('trade', { | ||||
|             'symbol': cb_sym_to_deribit_inst( | ||||
|                 str_to_cb_sym(data.symbol)).lower(), | ||||
|             'last': data, | ||||
|             'broker_ts': time.time(), | ||||
|             'data': data.to_dict(), | ||||
|             'receipt': receipt_timestamp | ||||
|         })) | ||||
| 
 | ||||
|     async def _l1(data: dict, receipt_timestamp): | ||||
|         to_trio.send_nowait(('l1', { | ||||
|             'symbol': cb_sym_to_deribit_inst( | ||||
|                 str_to_cb_sym(data.symbol)).lower(), | ||||
|             'ticks': [ | ||||
|                 {'type': 'bid', | ||||
|                     'price': float(data.bid_price), 'size': float(data.bid_size)}, | ||||
|                 {'type': 'bsize', | ||||
|                     'price': float(data.bid_price), 'size': float(data.bid_size)}, | ||||
|                 {'type': 'ask', | ||||
|                     'price': float(data.ask_price), 'size': float(data.ask_size)}, | ||||
|                 {'type': 'asize', | ||||
|                     'price': float(data.ask_price), 'size': float(data.ask_size)} | ||||
|             ] | ||||
|         })) | ||||
| 
 | ||||
|     fh.add_feed( | ||||
|         DERIBIT, | ||||
|         channels=[TRADES, L1_BOOK], | ||||
|         symbols=[piker_sym_to_cb_sym(instrument)], | ||||
|         callbacks={ | ||||
|             TRADES: _trade, | ||||
|             L1_BOOK: _l1 | ||||
|         }) | ||||
| 
 | ||||
|     if not fh.running: | ||||
|         fh.run( | ||||
|             start_loop=False, | ||||
|             install_signal_handlers=False) | ||||
| 
 | ||||
|     # sync with trio | ||||
|     to_trio.send_nowait(None) | ||||
| 
 | ||||
|     await asyncio.sleep(float('inf')) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_price_feed( | ||||
|     instrument: str | ||||
| ) -> trio.abc.ReceiveStream: | ||||
|     async with maybe_open_feed_handler() as fh: | ||||
|         async with to_asyncio.open_channel_from( | ||||
|             partial( | ||||
|                 aio_price_feed_relay, | ||||
|                 fh, | ||||
|                 instrument | ||||
|             ) | ||||
|         ) as (first, chan): | ||||
|             yield chan | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_price_feed( | ||||
|     instrument: str | ||||
| ) -> trio.abc.ReceiveStream: | ||||
| 
 | ||||
|     # TODO: add a predicate to maybe_open_context | ||||
|     async with maybe_open_context( | ||||
|         acm_func=open_price_feed, | ||||
|         kwargs={ | ||||
|             'instrument': instrument | ||||
|         }, | ||||
|         key=f'{instrument}-price', | ||||
|     ) as (cache_hit, feed): | ||||
|         if cache_hit: | ||||
|             yield broadcast_receiver(feed, 10) | ||||
|         else: | ||||
|             yield feed | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| async def aio_order_feed_relay( | ||||
|     fh: FeedHandler, | ||||
|     instrument: Symbol, | ||||
|     from_trio: asyncio.Queue, | ||||
|     to_trio: trio.abc.SendChannel, | ||||
| ) -> None: | ||||
|     async def _fill(data: dict, receipt_timestamp): | ||||
|         breakpoint() | ||||
| 
 | ||||
|     async def _order_info(data: dict, receipt_timestamp): | ||||
|         breakpoint() | ||||
| 
 | ||||
|     fh.add_feed( | ||||
|         DERIBIT, | ||||
|         channels=[FILLS, ORDER_INFO], | ||||
|         symbols=[instrument.upper()], | ||||
|         callbacks={ | ||||
|             FILLS: _fill, | ||||
|             ORDER_INFO: _order_info, | ||||
|         }) | ||||
| 
 | ||||
|     if not fh.running: | ||||
|         fh.run( | ||||
|             start_loop=False, | ||||
|             install_signal_handlers=False) | ||||
| 
 | ||||
|     # sync with trio | ||||
|     to_trio.send_nowait(None) | ||||
| 
 | ||||
|     await asyncio.sleep(float('inf')) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_order_feed( | ||||
|     instrument: list[str] | ||||
| ) -> trio.abc.ReceiveStream: | ||||
|     async with maybe_open_feed_handler() as fh: | ||||
|         async with to_asyncio.open_channel_from( | ||||
|             partial( | ||||
|                 aio_order_feed_relay, | ||||
|                 fh, | ||||
|                 instrument | ||||
|             ) | ||||
|         ) as (first, chan): | ||||
|             yield chan | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_order_feed( | ||||
|     instrument: str | ||||
| ) -> trio.abc.ReceiveStream: | ||||
| 
 | ||||
|     # TODO: add a predicate to maybe_open_context | ||||
|     async with maybe_open_context( | ||||
|         acm_func=open_order_feed, | ||||
|         kwargs={ | ||||
|             'instrument': instrument, | ||||
|             'fh': fh | ||||
|         }, | ||||
|         key=f'{instrument}-order', | ||||
|     ) as (cache_hit, feed): | ||||
|         if cache_hit: | ||||
|             yield broadcast_receiver(feed, 10) | ||||
|         else: | ||||
|             yield feed | ||||
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 169 KiB | 
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 106 KiB | 
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 59 KiB | 
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 70 KiB | 
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 132 KiB | 
|  | @ -1,185 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Deribit backend. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from datetime import datetime | ||||
| from typing import Any, Optional, Callable | ||||
| import time | ||||
| 
 | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import pendulum | ||||
| from rapidfuzz import process as fuzzy | ||||
| import numpy as np | ||||
| import tractor | ||||
| 
 | ||||
| from piker.brokers import open_cached_client | ||||
| from piker.log import get_logger, get_console_log | ||||
| from piker.data import ShmArray | ||||
| from piker.brokers._util import ( | ||||
|     BrokerError, | ||||
|     DataUnavailable, | ||||
| ) | ||||
| 
 | ||||
| from cryptofeed import FeedHandler | ||||
| from cryptofeed.defines import ( | ||||
|     DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT | ||||
| ) | ||||
| from cryptofeed.symbols import Symbol | ||||
| 
 | ||||
| from .api import ( | ||||
|     Client, Trade, | ||||
|     get_config, | ||||
|     str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst, | ||||
|     maybe_open_price_feed | ||||
| ) | ||||
| 
 | ||||
| _spawn_kwargs = { | ||||
|     'infect_asyncio': True, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_history_client( | ||||
|     mkt: MktPair, | ||||
| ) -> tuple[Callable, int]: | ||||
| 
 | ||||
|     fnstrument: str = mkt.bs_fqme | ||||
|     # TODO implement history getter for the new storage layer. | ||||
|     async with open_cached_client('deribit') as client: | ||||
| 
 | ||||
|         async def get_ohlc( | ||||
|             end_dt: Optional[datetime] = None, | ||||
|             start_dt: Optional[datetime] = None, | ||||
| 
 | ||||
|         ) -> tuple[ | ||||
|             np.ndarray, | ||||
|             datetime,  # start | ||||
|             datetime,  # end | ||||
|         ]: | ||||
| 
 | ||||
|             array = await client.bars( | ||||
|                 instrument, | ||||
|                 start_dt=start_dt, | ||||
|                 end_dt=end_dt, | ||||
|             ) | ||||
|             if len(array) == 0: | ||||
|                 raise DataUnavailable | ||||
| 
 | ||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) | ||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) | ||||
| 
 | ||||
|             return array, start_dt, end_dt | ||||
| 
 | ||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} | ||||
| 
 | ||||
| 
 | ||||
| async def stream_quotes( | ||||
| 
 | ||||
|     send_chan: trio.abc.SendChannel, | ||||
|     symbols: list[str], | ||||
|     feed_is_live: trio.Event, | ||||
|     loglevel: str = None, | ||||
| 
 | ||||
|     # startup sync | ||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging | ||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) | ||||
| 
 | ||||
|     sym = symbols[0] | ||||
| 
 | ||||
|     async with ( | ||||
|         open_cached_client('deribit') as client, | ||||
|         send_chan as send_chan | ||||
|     ): | ||||
| 
 | ||||
|         init_msgs = { | ||||
|             # pass back token, and bool, signalling if we're the writer | ||||
|             # and that history has been written | ||||
|             sym: { | ||||
|                 'symbol_info': { | ||||
|                     'asset_type': 'option', | ||||
|                     'price_tick_size': 0.0005 | ||||
|                 }, | ||||
|                 'shm_write_opts': {'sum_tick_vml': False}, | ||||
|                 'fqsn': sym, | ||||
|             }, | ||||
|         } | ||||
| 
 | ||||
|         nsym = piker_sym_to_cb_sym(sym) | ||||
| 
 | ||||
|         async with maybe_open_price_feed(sym) as stream: | ||||
| 
 | ||||
|             cache = await client.cache_symbols() | ||||
| 
 | ||||
|             last_trades = (await client.last_trades( | ||||
|                 cb_sym_to_deribit_inst(nsym), count=1)).trades | ||||
| 
 | ||||
|             if len(last_trades) == 0: | ||||
|                 last_trade = None | ||||
|                 async for typ, quote in stream: | ||||
|                     if typ == 'trade': | ||||
|                         last_trade = Trade(**(quote['data'])) | ||||
|                         break | ||||
| 
 | ||||
|             else: | ||||
|                 last_trade = Trade(**(last_trades[0])) | ||||
| 
 | ||||
|             first_quote = { | ||||
|                 'symbol': sym, | ||||
|                 'last': last_trade.price, | ||||
|                 'brokerd_ts': last_trade.timestamp, | ||||
|                 'ticks': [{ | ||||
|                     'type': 'trade', | ||||
|                     'price': last_trade.price, | ||||
|                     'size': last_trade.amount, | ||||
|                     'broker_ts': last_trade.timestamp | ||||
|                 }] | ||||
|             } | ||||
|             task_status.started((init_msgs,  first_quote)) | ||||
| 
 | ||||
|             feed_is_live.set() | ||||
| 
 | ||||
|             async for typ, quote in stream: | ||||
|                 topic = quote['symbol'] | ||||
|                 await send_chan.send({topic: quote}) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_symbol_search( | ||||
|     ctx: tractor.Context, | ||||
| ) -> Client: | ||||
|     async with open_cached_client('deribit') as client: | ||||
| 
 | ||||
|         # load all symbols locally for fast search | ||||
|         cache = await client.cache_symbols() | ||||
|         await ctx.started() | ||||
| 
 | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             async for pattern in stream: | ||||
|                 # repack in dict form | ||||
|                 await stream.send( | ||||
|                     await client.search_symbols(pattern)) | ||||
|  | @ -1,134 +0,0 @@ | |||
| ``ib`` backend | ||||
| -------------- | ||||
| more or less the "everything broker" for traditional and international | ||||
| markets. they are the "go to" provider for automatic retail trading | ||||
| and we interface to their APIs using the `ib_insync` project. | ||||
| 
 | ||||
| status | ||||
| ****** | ||||
| current support is *production grade* and both real-time data and order | ||||
| management should be correct and fast. this backend is used by core devs | ||||
| for live trading. | ||||
| 
 | ||||
| currently there is not yet full support for: | ||||
| - options charting and trading | ||||
| - paxos based crypto rt feeds and trading | ||||
| 
 | ||||
| 
 | ||||
| config | ||||
| ****** | ||||
| In order to get order mode support your ``brokers.toml`` | ||||
| needs to have something like the following: | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|    [ib] | ||||
|    hosts = [ | ||||
|     "127.0.0.1", | ||||
|    ] | ||||
|    # TODO: when we eventually spawn gateways in our | ||||
|    # container, we can just dynamically allocate these | ||||
|    # using IBC. | ||||
|    ports = [ | ||||
|        4002, | ||||
|        4003, | ||||
|        4006, | ||||
|        4001, | ||||
|        7497, | ||||
|    ] | ||||
| 
 | ||||
|    # XXX: for a paper account the flex web query service | ||||
|    # is not supported so you have to manually download | ||||
|    # and XML report and put it in a location that can be | ||||
|    # accessed by the ``brokerd.ib`` backend code for parsing. | ||||
|    flex_token = '1111111111111111' | ||||
|    flex_trades_query_id = '6969696'  # live accounts only? | ||||
| 
 | ||||
|    # 3rd party web-api token | ||||
|    # (XXX: not sure if this works yet) | ||||
|    trade_log_token = '111111111111111' | ||||
| 
 | ||||
|    # when clients are being scanned this determines | ||||
|    # which clients are preferred to be used for data feeds | ||||
|    # based on account names which are detected as active | ||||
|    # on each client. | ||||
|    prefer_data_account = [ | ||||
|        # this has to be first in order to make data work with dual paper + live | ||||
|        'main', | ||||
|        'algopaper', | ||||
|    ] | ||||
| 
 | ||||
|    [ib.accounts] | ||||
|    main = 'U69696969' | ||||
|    algopaper = 'DU9696969' | ||||
| 
 | ||||
| 
 | ||||
| If everything works correctly you should see any current positions | ||||
| loaded in the pps pane on chart load and you should also be able to | ||||
| check your trade records in the file:: | ||||
| 
 | ||||
|     <pikerk_conf_dir>/ledgers/trades_ib_algopaper.toml | ||||
| 
 | ||||
| 
 | ||||
| An example ledger file will have entries written verbatim from the | ||||
| trade events schema: | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|     ["0000e1a7.630f5e5a.01.01"] | ||||
|     secType = "FUT" | ||||
|     conId = 515416577 | ||||
|     symbol = "MNQ" | ||||
|     lastTradeDateOrContractMonth = "20221216" | ||||
|     strike = 0.0 | ||||
|     right = "" | ||||
|     multiplier = "2" | ||||
|     exchange = "GLOBEX" | ||||
|     primaryExchange = "" | ||||
|     currency = "USD" | ||||
|     localSymbol = "MNQZ2" | ||||
|     tradingClass = "MNQ" | ||||
|     includeExpired = false | ||||
|     secIdType = "" | ||||
|     secId = "" | ||||
|     comboLegsDescrip = "" | ||||
|     comboLegs = [] | ||||
|     execId = "0000e1a7.630f5e5a.01.01" | ||||
|     time = 1661972086.0 | ||||
|     acctNumber = "DU69696969" | ||||
|     side = "BOT" | ||||
|     shares = 1.0 | ||||
|     price = 12372.75 | ||||
|     permId = 441472655 | ||||
|     clientId = 6116 | ||||
|     orderId = 985 | ||||
|     liquidation = 0 | ||||
|     cumQty = 1.0 | ||||
|     avgPrice = 12372.75 | ||||
|     orderRef = "" | ||||
|     evRule = "" | ||||
|     evMultiplier = 0.0 | ||||
|     modelCode = "" | ||||
|     lastLiquidity = 1 | ||||
|     broker_time = 1661972086.0 | ||||
|     name = "ib" | ||||
|     commission = 0.57 | ||||
|     realizedPNL = 243.41 | ||||
|     yield_ = 0.0 | ||||
|     yieldRedemptionDate = 0 | ||||
|     listingExchange = "GLOBEX" | ||||
|     date = "2022-08-31T18:54:46+00:00" | ||||
| 
 | ||||
| 
 | ||||
| your ``pps.toml`` file will have position entries like, | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|     [ib.algopaper."mnq.globex.20221216"] | ||||
|     size = -1.0 | ||||
|     ppu = 12423.630576923071 | ||||
|     bs_mktid = 515416577 | ||||
|     expiry = "2022-12-16T00:00:00+00:00" | ||||
|     clears = [ | ||||
|      { dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" }, | ||||
|     ] | ||||
|  | @ -30,52 +30,29 @@ from .api import ( | |||
| ) | ||||
| from .feed import ( | ||||
|     open_history_client, | ||||
|     open_symbol_search, | ||||
|     stream_quotes, | ||||
| ) | ||||
| from .broker import ( | ||||
|     open_trade_dialog, | ||||
| ) | ||||
| from .ledger import ( | ||||
|     norm_trade, | ||||
|     trades_dialogue, | ||||
|     norm_trade_records, | ||||
|     tx_sort, | ||||
| ) | ||||
| from .symbols import ( | ||||
|     get_mkt_info, | ||||
|     open_symbol_search, | ||||
|     _search_conf, | ||||
| ) | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'get_client', | ||||
|     'get_mkt_info', | ||||
|     'norm_trade', | ||||
|     'norm_trade_records', | ||||
|     'open_trade_dialog', | ||||
|     'trades_dialogue', | ||||
|     'open_history_client', | ||||
|     'open_symbol_search', | ||||
|     'stream_quotes', | ||||
|     '_search_conf', | ||||
|     'tx_sort', | ||||
| ] | ||||
| 
 | ||||
| _brokerd_mods: list[str] = [ | ||||
|     'api', | ||||
|     'broker', | ||||
| ] | ||||
| 
 | ||||
| _datad_mods: list[str] = [ | ||||
|     'feed', | ||||
|     'symbols', | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| # tractor RPC enable arg | ||||
| __enable_modules__: list[str] = ( | ||||
|     _brokerd_mods | ||||
|     + | ||||
|     _datad_mods | ||||
| ) | ||||
| __enable_modules__: list[str] = [ | ||||
|     'api', | ||||
|     'feed', | ||||
|     'broker', | ||||
| ] | ||||
| 
 | ||||
| # passed to ``tractor.ActorNursery.start_actor()`` | ||||
| _spawn_kwargs = { | ||||
|  | @ -86,8 +63,3 @@ _spawn_kwargs = { | |||
| # know if ``brokerd`` should be spawned with | ||||
| # ``tractor``'s aio mode. | ||||
| _infect_asyncio: bool = True | ||||
| 
 | ||||
| # XXX NOTE: for now we disable symcache with this backend since | ||||
| # there is no clearly simple nor practical way to download "all | ||||
| # symbology info" for all supported venues.. | ||||
| _no_symcache: bool = True | ||||
|  |  | |||
|  | @ -1,195 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| "FLEX" report processing utils. | ||||
| 
 | ||||
| """ | ||||
| from bidict import bidict | ||||
| import pendulum | ||||
| from pprint import pformat | ||||
| from typing import Any | ||||
| 
 | ||||
| from .api import ( | ||||
|     get_config, | ||||
|     log, | ||||
| ) | ||||
| from piker.accounting import ( | ||||
|     open_trade_ledger, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def parse_flex_dt( | ||||
|     record: str, | ||||
| ) -> pendulum.datetime: | ||||
|     ''' | ||||
|     Parse stupid flex record datetime stamps for the `dateTime` field.. | ||||
| 
 | ||||
|     ''' | ||||
|     date, ts = record.split(';') | ||||
|     dt = pendulum.parse(date) | ||||
|     ts = f'{ts[:2]}:{ts[2:4]}:{ts[4:]}' | ||||
|     tsdt = pendulum.parse(ts) | ||||
|     return dt.set(hour=tsdt.hour, minute=tsdt.minute, second=tsdt.second) | ||||
| 
 | ||||
| 
 | ||||
| def flex_records_to_ledger_entries( | ||||
|     accounts: bidict, | ||||
|     trade_entries: list[object], | ||||
| 
 | ||||
| ) -> dict: | ||||
|     ''' | ||||
|     Convert flex report entry objects into ``dict`` form, pretty much | ||||
|     straight up without modification except add a `pydatetime` field | ||||
|     from the parsed timestamp. | ||||
| 
 | ||||
|     ''' | ||||
|     trades_by_account = {} | ||||
|     for t in trade_entries: | ||||
|         entry = t.__dict__ | ||||
| 
 | ||||
|         # XXX: LOL apparently ``toml`` has a bug | ||||
|         # where a section key error will show up in the write | ||||
|         # if you leave a table key as an `int`? So i guess | ||||
|         # cast to strs for all keys.. | ||||
| 
 | ||||
|         # oddly for some so-called "BookTrade" entries | ||||
|         # this field seems to be blank, no cuckin clue. | ||||
|         # trade['ibExecID'] | ||||
|         tid = str(entry.get('ibExecID') or entry['tradeID']) | ||||
|         # date = str(entry['tradeDate']) | ||||
| 
 | ||||
|         # XXX: is it going to cause problems if a account name | ||||
|         # get's lost? The user should be able to find it based | ||||
|         # on the actual exec history right? | ||||
|         acctid = accounts[str(entry['accountId'])] | ||||
| 
 | ||||
|         # probably a flex record with a wonky non-std timestamp.. | ||||
|         dt = entry['pydatetime'] = parse_flex_dt(entry['dateTime']) | ||||
|         entry['datetime'] = str(dt) | ||||
| 
 | ||||
|         if not tid: | ||||
|             # this is likely some kind of internal adjustment | ||||
|             # transaction, likely one of the following: | ||||
|             # - an expiry event that will show a "book trade" indicating | ||||
|             #   some adjustment to cash balances: zeroing or itm settle. | ||||
|             # - a manual cash balance position adjustment likely done by | ||||
|             #   the user from the accounts window in TWS where they can | ||||
|             #   manually set the avg price and size: | ||||
|             #   https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST | ||||
|             log.warning(f'Skipping ID-less ledger entry:\n{pformat(entry)}') | ||||
|             continue | ||||
| 
 | ||||
|         trades_by_account.setdefault( | ||||
|             acctid, {} | ||||
|         )[tid] = entry | ||||
| 
 | ||||
|     for acctid in trades_by_account: | ||||
|         trades_by_account[acctid] = dict(sorted( | ||||
|             trades_by_account[acctid].items(), | ||||
|             key=lambda entry: entry[1]['pydatetime'], | ||||
|         )) | ||||
| 
 | ||||
|     return trades_by_account | ||||
| 
 | ||||
| 
 | ||||
| def load_flex_trades( | ||||
|     path: str | None = None, | ||||
| 
 | ||||
| ) -> dict[str, Any]: | ||||
| 
 | ||||
|     from ib_insync import flexreport, util | ||||
| 
 | ||||
|     conf = get_config() | ||||
| 
 | ||||
|     if not path: | ||||
|         # load ``brokers.toml`` and try to get the flex | ||||
|         # token and query id that must be previously defined | ||||
|         # by the user. | ||||
|         token = conf.get('flex_token') | ||||
|         if not token: | ||||
|             raise ValueError( | ||||
|                 'You must specify a ``flex_token`` field in your' | ||||
|                 '`brokers.toml` in order load your trade log, see our' | ||||
|                 'intructions for how to set this up here:\n' | ||||
|                 'PUT LINK HERE!' | ||||
|             ) | ||||
| 
 | ||||
|         qid = conf['flex_trades_query_id'] | ||||
| 
 | ||||
|         # TODO: hack this into our logging | ||||
|         # system like we do with the API client.. | ||||
|         util.logToConsole() | ||||
| 
 | ||||
|         # TODO: rewrite the query part of this with async..httpx? | ||||
|         report = flexreport.FlexReport( | ||||
|             token=token, | ||||
|             queryId=qid, | ||||
|         ) | ||||
| 
 | ||||
|     else: | ||||
|         # XXX: another project we could potentially look at, | ||||
|         # https://pypi.org/project/ibflex/ | ||||
|         report = flexreport.FlexReport(path=path) | ||||
| 
 | ||||
|     trade_entries = report.extract('Trade') | ||||
|     ln = len(trade_entries) | ||||
|     log.info(f'Loaded {ln} trades from flex query') | ||||
| 
 | ||||
|     trades_by_account = flex_records_to_ledger_entries( | ||||
|         conf['accounts'].inverse,  # reverse map to user account names | ||||
|         trade_entries, | ||||
|     ) | ||||
| 
 | ||||
|     ledger_dict: dict | None = None | ||||
| 
 | ||||
|     for acctid in trades_by_account: | ||||
|         trades_by_id = trades_by_account[acctid] | ||||
| 
 | ||||
|         with open_trade_ledger( | ||||
|             'ib', | ||||
|             acctid, | ||||
|             allow_from_sync_code=True, | ||||
|         ) as ledger_dict: | ||||
|             tid_delta = set(trades_by_id) - set(ledger_dict) | ||||
|             log.info( | ||||
|                 'New trades detected\n' | ||||
|                 f'{pformat(tid_delta)}' | ||||
|             ) | ||||
|             if tid_delta: | ||||
|                 sorted_delta = dict(sorted( | ||||
|                     {tid: trades_by_id[tid] for tid in tid_delta}.items(), | ||||
|                     key=lambda entry: entry[1].pop('pydatetime'), | ||||
|                 )) | ||||
|                 ledger_dict.update(sorted_delta) | ||||
| 
 | ||||
|     return ledger_dict | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     import sys | ||||
|     import os | ||||
| 
 | ||||
|     args = sys.argv | ||||
|     if len(args) > 1: | ||||
|         args = args[1:] | ||||
|         for arg in args: | ||||
|             path = os.path.abspath(arg) | ||||
|             load_flex_trades(path=path) | ||||
|     else: | ||||
|         # expect brokers.toml to have an entry and | ||||
|         # pull from the web service. | ||||
|         load_flex_trades() | ||||
|  | @ -1,269 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| ``ib`` utilities and hacks suitable for use in the backend and/or as | ||||
| runnable script-programs. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from functools import partial | ||||
| from typing import ( | ||||
|     Literal, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| import subprocess | ||||
| 
 | ||||
| import tractor | ||||
| 
 | ||||
| from piker.brokers._util import get_logger | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from .api import Client | ||||
|     from ib_insync import IB | ||||
| 
 | ||||
| log = get_logger('piker.brokers.ib') | ||||
| 
 | ||||
| _reset_tech: Literal[ | ||||
|     'vnc', | ||||
|     'i3ipc_xdotool', | ||||
| 
 | ||||
|     # TODO: in theory we can use a different linux DE API or | ||||
|     # some other type of similar window scanning/mgmt client | ||||
|     # (on other OSs) to do the same. | ||||
| 
 | ||||
| ] = 'vnc' | ||||
| 
 | ||||
| 
 | ||||
| async def data_reset_hack( | ||||
|     # vnc_host: str, | ||||
|     client: Client, | ||||
|     reset_type: Literal['data', 'connection'], | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Run key combos for resetting data feeds and yield back to caller | ||||
|     when complete. | ||||
| 
 | ||||
|     NOTE: this is a linux-only hack around! | ||||
| 
 | ||||
|     There are multiple "techs" you can use depending on your infra setup: | ||||
| 
 | ||||
|     - if running ib-gw in a container with a VNC server running the most | ||||
|       performant method is the `'vnc'` option. | ||||
| 
 | ||||
|     - if running ib-gw/tws locally, and you are using `i3` you can use | ||||
|       the ``i3ipc`` lib and ``xdotool`` to send the appropriate click | ||||
|       and key-combos automatically to your local desktop's java X-apps. | ||||
| 
 | ||||
|     https://interactivebrokers.github.io/tws-api/historical_limitations.html#pacing_violations | ||||
| 
 | ||||
|     TODOs: | ||||
|         - a return type that hopefully determines if the hack was | ||||
|           successful. | ||||
|         - other OS support? | ||||
|         - integration with ``ib-gw`` run in docker + Xorg? | ||||
|         - is it possible to offer a local server that can be accessed by | ||||
|           a client? Would be sure be handy for running native java blobs | ||||
|           that need to be wrangle. | ||||
| 
 | ||||
|     ''' | ||||
|     ib_client: IB = client.ib | ||||
| 
 | ||||
|     # look up any user defined vnc socket address mapped from | ||||
|     # a particular API socket port. | ||||
|     api_port: str = str(ib_client.client.port) | ||||
|     vnc_host: str | ||||
|     vnc_port: int | ||||
|     vnc_sockaddr: tuple[str] | None = client.conf.get('vnc_addrs') | ||||
| 
 | ||||
|     no_setup_msg:str = ( | ||||
|         f'No data reset hack test setup for {vnc_sockaddr}!\n' | ||||
|         'See config setup tips @\n' | ||||
|         'https://github.com/pikers/piker/tree/master/piker/brokers/ib' | ||||
|     ) | ||||
| 
 | ||||
|     if not vnc_sockaddr: | ||||
|         log.warning( | ||||
|             no_setup_msg | ||||
|             + | ||||
|             'REQUIRES A `vnc_addrs: array` ENTRY' | ||||
|         ) | ||||
| 
 | ||||
|     vnc_host, vnc_port = vnc_sockaddr.get( | ||||
|         api_port, | ||||
|         ('localhost', 3003) | ||||
|     ) | ||||
|     global _reset_tech | ||||
| 
 | ||||
|     match _reset_tech: | ||||
|         case 'vnc': | ||||
|             try: | ||||
|                 await tractor.to_asyncio.run_task( | ||||
|                     partial( | ||||
|                         vnc_click_hack, | ||||
|                         host=vnc_host, | ||||
|                         port=vnc_port, | ||||
|                     ) | ||||
|                 ) | ||||
|             except OSError: | ||||
|                 if vnc_host != 'localhost': | ||||
|                     log.warning(no_setup_msg) | ||||
|                     return False | ||||
| 
 | ||||
|                 try: | ||||
|                     import i3ipc  # noqa  (since a deps dynamic check) | ||||
|                 except ModuleNotFoundError: | ||||
|                     log.warning(no_setup_msg) | ||||
|                     return False | ||||
| 
 | ||||
|                 try: | ||||
|                     i3ipc_xdotool_manual_click_hack() | ||||
|                     _reset_tech = 'i3ipc_xdotool' | ||||
|                     return True | ||||
|                 except OSError: | ||||
|                     log.exception(no_setup_msg) | ||||
|                     return False | ||||
| 
 | ||||
|         case 'i3ipc_xdotool': | ||||
|             i3ipc_xdotool_manual_click_hack() | ||||
| 
 | ||||
|         case _ as tech: | ||||
|             raise RuntimeError(f'{tech} is not supported for reset tech!?') | ||||
| 
 | ||||
|     # we don't really need the ``xdotool`` approach any more B) | ||||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| async def vnc_click_hack( | ||||
|     host: str, | ||||
|     port: int, | ||||
|     reset_type: str = 'data' | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Reset the data or network connection for the VNC attached | ||||
|     ib gateway using magic combos. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         import asyncvnc | ||||
|     except ModuleNotFoundError: | ||||
|         log.warning( | ||||
|             "In order to leverage `piker`'s built-in data reset hacks, install " | ||||
|             "the `asyncvnc` project: https://github.com/barneygale/asyncvnc" | ||||
|         ) | ||||
|         return | ||||
| 
 | ||||
|     # two different hot keys which trigger diff types of reset | ||||
|     # requests B) | ||||
|     key = { | ||||
|         'data': 'f', | ||||
|         'connection': 'r' | ||||
|     }[reset_type] | ||||
| 
 | ||||
|     async with asyncvnc.connect( | ||||
|         host, | ||||
|         port=port, | ||||
| 
 | ||||
|         # TODO: doesn't work see: | ||||
|         # https://github.com/barneygale/asyncvnc/issues/7 | ||||
|         # password='ibcansmbz', | ||||
| 
 | ||||
|     ) as client: | ||||
| 
 | ||||
|         # move to middle of screen | ||||
|         # 640x1800 | ||||
|         client.mouse.move( | ||||
|             x=500, | ||||
|             y=500, | ||||
|         ) | ||||
|         client.mouse.click() | ||||
|         client.keyboard.press('Ctrl', 'Alt', key)  # keys are stacked | ||||
| 
 | ||||
| 
 | ||||
| def i3ipc_xdotool_manual_click_hack() -> None: | ||||
|     ''' | ||||
|     Do the data reset hack but expecting a local X-window using `xdotool`. | ||||
| 
 | ||||
|     ''' | ||||
|     import i3ipc | ||||
|     i3 = i3ipc.Connection() | ||||
| 
 | ||||
|     # TODO: might be worth offering some kinda api for grabbing | ||||
|     # the window id from the pid? | ||||
|     # https://stackoverflow.com/a/2250879 | ||||
|     t = i3.get_tree() | ||||
| 
 | ||||
|     orig_win_id = t.find_focused().window | ||||
| 
 | ||||
|     # for tws | ||||
|     win_names: list[str] = [ | ||||
|         'Interactive Brokers',  # tws running in i3 | ||||
|         'IB Gateway',  # gw running in i3 | ||||
|         # 'IB',  # gw running in i3 (newer version?) | ||||
|     ] | ||||
| 
 | ||||
|     try: | ||||
|         for name in win_names: | ||||
|             results = t.find_titled(name) | ||||
|             print(f'results for {name}: {results}') | ||||
|             if results: | ||||
|                 con = results[0] | ||||
|                 print(f'Resetting data feed for {name}') | ||||
|                 win_id = str(con.window) | ||||
|                 w, h = con.rect.width, con.rect.height | ||||
| 
 | ||||
|                 # TODO: seems to be a few libs for python but not sure | ||||
|                 # if they support all the sub commands we need, order of | ||||
|                 # most recent commit history: | ||||
|                 # https://github.com/rr-/pyxdotool | ||||
|                 # https://github.com/ShaneHutter/pyxdotool | ||||
|                 # https://github.com/cphyc/pyxdotool | ||||
| 
 | ||||
|                 # TODO: only run the reconnect (2nd) kc on a detected | ||||
|                 # disconnect? | ||||
|                 for key_combo, timeout in [ | ||||
|                     # only required if we need a connection reset. | ||||
|                     # ('ctrl+alt+r', 12), | ||||
|                     # data feed reset. | ||||
|                     ('ctrl+alt+f', 6) | ||||
|                 ]: | ||||
|                     subprocess.call([ | ||||
|                         'xdotool', | ||||
|                         'windowactivate', '--sync', win_id, | ||||
| 
 | ||||
|                         # move mouse to bottom left of window (where | ||||
|                         # there should be nothing to click). | ||||
|                         'mousemove_relative', '--sync', str(w-4), str(h-4), | ||||
| 
 | ||||
|                         # NOTE: we may need to stick a `--retry 3` in here.. | ||||
|                         'click', '--window', win_id, | ||||
|                         '--repeat', '3', '1', | ||||
| 
 | ||||
|                         # hackzorzes | ||||
|                         'key', key_combo, | ||||
|                         ], | ||||
|                         timeout=timeout, | ||||
|                     ) | ||||
| 
 | ||||
|         # re-activate and focus original window | ||||
|         subprocess.call([ | ||||
|             'xdotool', | ||||
|             'windowactivate', '--sync', str(orig_win_id), | ||||
|             'click', '--window', str(orig_win_id), '1', | ||||
|         ]) | ||||
|     except subprocess.TimeoutExpired: | ||||
|         log.exception('xdotool timed out?') | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,529 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Trade transaction accounting and normalization. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from bisect import insort | ||||
| from dataclasses import asdict | ||||
| from decimal import Decimal | ||||
| from functools import partial | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from bidict import bidict | ||||
| from pendulum import ( | ||||
|     DateTime, | ||||
|     parse, | ||||
|     from_timestamp, | ||||
| ) | ||||
| from ib_insync import ( | ||||
|     Contract, | ||||
|     Commodity, | ||||
|     Fill, | ||||
|     Execution, | ||||
|     CommissionReport, | ||||
| ) | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| from piker.data import ( | ||||
|     SymbologyCache, | ||||
| ) | ||||
| from piker.accounting import ( | ||||
|     Asset, | ||||
|     dec_digits, | ||||
|     digits_to_dec, | ||||
|     Transaction, | ||||
|     MktPair, | ||||
|     iter_by_dt, | ||||
| ) | ||||
| from ._flex_reports import parse_flex_dt | ||||
| from ._util import log | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from .api import ( | ||||
|         Client, | ||||
|         MethodProxy, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| tx_sort: Callable = partial( | ||||
|     iter_by_dt, | ||||
|     parsers={ | ||||
|         'dateTime': parse_flex_dt, | ||||
|         'datetime': parse, | ||||
| 
 | ||||
|         # XXX: for some some fucking 2022 and | ||||
|         # back options records.. f@#$ me.. | ||||
|         'date': parse, | ||||
|     } | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def norm_trade( | ||||
|     tid: str, | ||||
|     record: dict[str, Any], | ||||
| 
 | ||||
|     # this is the dict that was returned from | ||||
|     # `Client.get_mkt_pairs()` and when running offline ledger | ||||
|     # processing from `.accounting`, this will be the table loaded | ||||
|     # into `SymbologyCache.pairs`. | ||||
|     pairs: dict[str, Struct], | ||||
|     symcache: SymbologyCache | None = None, | ||||
| 
 | ||||
| ) -> Transaction | None: | ||||
| 
 | ||||
|     conid: int = str(record.get('conId') or record['conid']) | ||||
|     bs_mktid: str = str(conid) | ||||
| 
 | ||||
|     # NOTE: sometimes weird records (like BTTX?) | ||||
|     # have no field for this? | ||||
|     comms: float = -1 * ( | ||||
|         record.get('commission') | ||||
|         or record.get('ibCommission') | ||||
|         or 0 | ||||
|     ) | ||||
|     if not comms: | ||||
|         log.warning( | ||||
|             'No commissions found for record?\n' | ||||
|             f'{pformat(record)}\n' | ||||
|         ) | ||||
| 
 | ||||
|     price: float = ( | ||||
|         record.get('price') | ||||
|         or record.get('tradePrice') | ||||
|     ) | ||||
|     if price is None: | ||||
|         log.warning( | ||||
|             'No `price` field found in record?\n' | ||||
|             'Skipping normalization..\n' | ||||
|             f'{pformat(record)}\n' | ||||
|         ) | ||||
|         return None | ||||
| 
 | ||||
|     # the api doesn't do the -/+ on the quantity for you but flex | ||||
|     # records do.. are you fucking serious ib...!? | ||||
|     size: float|int = ( | ||||
|         record.get('quantity') | ||||
|         or record['shares'] | ||||
|     ) * { | ||||
|         'BOT': 1, | ||||
|         'SLD': -1, | ||||
|     }[record['side']] | ||||
| 
 | ||||
|     symbol: str = record['symbol'] | ||||
|     exch: str = ( | ||||
|         record.get('listingExchange') | ||||
|         or record.get('primaryExchange') | ||||
|         or record['exchange'] | ||||
|     ) | ||||
| 
 | ||||
|     # NOTE: remove null values since `tomlkit` can't serialize | ||||
|     # them to file. | ||||
|     if dnc := record.pop('deltaNeutralContract', None): | ||||
|         record['deltaNeutralContract'] = dnc | ||||
| 
 | ||||
|     # likely an opts contract record from a flex report.. | ||||
|     # TODO: no idea how to parse ^ the strike part from flex.. | ||||
|     # (00010000 any, or 00007500 tsla, ..) | ||||
|     # we probably must do the contract lookup for this? | ||||
|     if ( | ||||
|         '   ' in symbol | ||||
|         or '--' in exch | ||||
|     ): | ||||
|         underlying, _, tail = symbol.partition('   ') | ||||
|         exch: str = 'opt' | ||||
|         expiry: str = tail[:6] | ||||
|         # otype = tail[6] | ||||
|         # strike = tail[7:] | ||||
| 
 | ||||
|         log.warning( | ||||
|             f'Skipping option contract -> NO SUPPORT YET!\n' | ||||
|             f'{symbol}\n' | ||||
|         ) | ||||
|         return None | ||||
| 
 | ||||
|     # timestamping is way different in API records | ||||
|     dtstr: str = record.get('datetime') | ||||
|     date: str = record.get('date') | ||||
|     flex_dtstr: str = record.get('dateTime') | ||||
| 
 | ||||
|     if dtstr or date: | ||||
|         dt: DateTime = parse(dtstr or date) | ||||
| 
 | ||||
|     elif flex_dtstr: | ||||
|         # probably a flex record with a wonky non-std timestamp.. | ||||
|         dt: DateTime = parse_flex_dt(record['dateTime']) | ||||
| 
 | ||||
|     # special handling of symbol extraction from | ||||
|     # flex records using some ad-hoc schema parsing. | ||||
|     asset_type: str = ( | ||||
|         record.get('assetCategory') | ||||
|         or record.get('secType') | ||||
|         or 'STK' | ||||
|     ) | ||||
| 
 | ||||
|     if (expiry := ( | ||||
|             record.get('lastTradeDateOrContractMonth') | ||||
|             or record.get('expiry') | ||||
|         ) | ||||
|     ): | ||||
|         expiry: str = str(expiry).strip(' ') | ||||
|         # NOTE: we directly use the (simple and usually short) | ||||
|         # date-string expiry token when packing the `MktPair` | ||||
|         # since we want the fqme to contain *that* token. | ||||
|         # It might make sense later to instead parse and then | ||||
|         # render different output str format(s) for this same | ||||
|         # purpose depending on asset-type-market down the road. | ||||
|         # Eg. for derivs we use the short token only for fqme | ||||
|         # but use the isoformat('T') for transactions and | ||||
|         # account file position entries? | ||||
|         # dt_str: str = pendulum.parse(expiry).isoformat('T') | ||||
| 
 | ||||
|     # XXX: pretty much all legacy market assets have a fiat | ||||
|     # currency (denomination) determined by their venue. | ||||
|     currency: str = record['currency'] | ||||
|     src = Asset( | ||||
|         name=currency.lower(), | ||||
|         atype='fiat', | ||||
|         tx_tick=Decimal('0.01'), | ||||
|     ) | ||||
| 
 | ||||
|     match asset_type: | ||||
|         case 'FUT': | ||||
|             # XXX (flex) ledger entries don't necessarily have any | ||||
|             # simple 3-char key.. sometimes the .symbol is some | ||||
|             # weird internal key that we probably don't want in the | ||||
|             # .fqme => we should probably just wrap `Contract` to | ||||
|             # this like we do other crypto$ backends XD | ||||
| 
 | ||||
|             # NOTE: at least older FLEX records should have | ||||
|             # this field.. no idea about API entries.. | ||||
|             local_symbol: str | None = record.get('localSymbol') | ||||
|             underlying_key: str = record.get('underlyingSymbol') | ||||
|             descr: str | None = record.get('description') | ||||
| 
 | ||||
|             if ( | ||||
|                 not ( | ||||
|                     local_symbol | ||||
|                     and symbol in local_symbol | ||||
|                 ) | ||||
|                 and ( | ||||
|                         descr | ||||
|                         and symbol not in descr | ||||
|                     ) | ||||
|             ): | ||||
|                 con_key, exp_str = descr.split(' ') | ||||
|                 symbol: str = underlying_key or con_key | ||||
| 
 | ||||
|             dst = Asset( | ||||
|                 name=symbol.lower(), | ||||
|                 atype='future', | ||||
|                 tx_tick=Decimal('1'), | ||||
|             ) | ||||
| 
 | ||||
|         case 'STK': | ||||
|             dst = Asset( | ||||
|                 name=symbol.lower(), | ||||
|                 atype='stock', | ||||
|                 tx_tick=Decimal('1'), | ||||
|             ) | ||||
| 
 | ||||
|         case 'CASH': | ||||
|             if currency not in symbol: | ||||
|                 # likely a dict-casted `Forex` contract which | ||||
|                 # has .symbol as the dst and .currency as the | ||||
|                 # src. | ||||
|                 name: str = symbol.lower() | ||||
|             else: | ||||
|                 # likely a flex-report record which puts | ||||
|                 # EUR.USD as the symbol field and just USD in | ||||
|                 # the currency field. | ||||
|                 name: str = symbol.lower().replace(f'.{src.name}', '') | ||||
| 
 | ||||
|             dst = Asset( | ||||
|                 name=name, | ||||
|                 atype='fiat', | ||||
|                 tx_tick=Decimal('0.01'), | ||||
|             ) | ||||
| 
 | ||||
|         case 'OPT': | ||||
|             dst = Asset( | ||||
|                 name=symbol.lower(), | ||||
|                 atype='option', | ||||
|                 tx_tick=Decimal('1'), | ||||
| 
 | ||||
|                 # TODO: we should probably always cast to the | ||||
|                 # `Contract` instance then dict-serialize that for | ||||
|                 # the `.info` field! | ||||
|                 # info=asdict(Option()), | ||||
|             ) | ||||
| 
 | ||||
|         case 'CMDTY': | ||||
|             from .symbols import _adhoc_symbol_map | ||||
|             con_kwargs, _ = _adhoc_symbol_map[symbol.upper()] | ||||
|             dst = Asset( | ||||
|                 name=symbol.lower(), | ||||
|                 atype='commodity', | ||||
|                 tx_tick=Decimal('1'), | ||||
|                 info=asdict(Commodity(**con_kwargs)), | ||||
|             ) | ||||
| 
 | ||||
|     # try to build out piker fqme from record. | ||||
|     # src: str = record['currency'] | ||||
|     price_tick: Decimal = digits_to_dec(dec_digits(price)) | ||||
| 
 | ||||
|     # NOTE: can't serlialize `tomlkit.String` so cast to native | ||||
|     atype: str = str(dst.atype) | ||||
| 
 | ||||
|     # if not (mkt := symcache.mktmaps.get(bs_mktid)): | ||||
|     mkt = MktPair( | ||||
|         bs_mktid=bs_mktid, | ||||
|         dst=dst, | ||||
| 
 | ||||
|         price_tick=price_tick, | ||||
|         # NOTE: for "legacy" assets, volume is normally discreet, not | ||||
|         # a float, but we keep a digit in case the suitz decide | ||||
|         # to get crazy and change it; we'll be kinda ready | ||||
|         # schema-wise.. | ||||
|         size_tick=Decimal('1'), | ||||
| 
 | ||||
|         src=src,  # XXX: normally always a fiat | ||||
| 
 | ||||
|         _atype=atype, | ||||
| 
 | ||||
|         venue=exch, | ||||
|         expiry=expiry, | ||||
|         broker='ib', | ||||
| 
 | ||||
|         _fqme_without_src=(atype != 'fiat'), | ||||
|     ) | ||||
| 
 | ||||
|     fqme: str = mkt.fqme | ||||
| 
 | ||||
|     # XXX: if passed in, we fill out the symcache ad-hoc in order | ||||
|     # to make downstream accounting work.. | ||||
|     if symcache is not None: | ||||
|         orig_mkt: MktPair | None  = symcache.mktmaps.get(bs_mktid) | ||||
|         if ( | ||||
|             orig_mkt | ||||
|             and orig_mkt.fqme != mkt.fqme | ||||
|         ): | ||||
|             log.warning( | ||||
|             # print( | ||||
|                 f'Contracts with common `conId`: {bs_mktid} mismatch..\n' | ||||
|                 f'{orig_mkt.fqme} -> {mkt.fqme}\n' | ||||
|                 # 'with DIFF:\n' | ||||
|                 # f'{mkt - orig_mkt}' | ||||
|             ) | ||||
| 
 | ||||
|         symcache.mktmaps[bs_mktid] = mkt | ||||
|         symcache.mktmaps[fqme] = mkt | ||||
|         symcache.assets[src.name] = src | ||||
|         symcache.assets[dst.name] = dst | ||||
| 
 | ||||
|     # NOTE: for flex records the normal fields for defining an fqme | ||||
|     # sometimes won't be available so we rely on two approaches for | ||||
|     # the "reverse lookup" of piker style fqme keys: | ||||
|     # - when dealing with API trade records received from | ||||
|     #   `IB.trades()` we do a contract lookup at he time of processing | ||||
|     # - when dealing with flex records, it is assumed the record | ||||
|     #   is at least a day old and thus the TWS position reporting system | ||||
|     #   should already have entries if the pps are still open, in | ||||
|     #   which case, we can pull the fqme from that table (see | ||||
|     #   `trades_dialogue()` above). | ||||
|     return Transaction( | ||||
|         fqme=fqme, | ||||
|         tid=tid, | ||||
|         size=size, | ||||
|         price=price, | ||||
|         cost=comms, | ||||
|         dt=dt, | ||||
|         expiry=expiry, | ||||
|         bs_mktid=str(conid), | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| def norm_trade_records( | ||||
|     ledger: dict[str, Any], | ||||
|     symcache: SymbologyCache | None = None, | ||||
| 
 | ||||
| ) -> dict[str, Transaction]: | ||||
|     ''' | ||||
|     Normalize (xml) flex-report or (recent) API trade records into | ||||
|     our ledger format with parsing for `MktPair` and `Asset` | ||||
|     extraction to fill in the `Transaction.sys: MktPair` field. | ||||
| 
 | ||||
|     ''' | ||||
|     records: list[Transaction] = [] | ||||
|     for tid, record in ledger.items(): | ||||
| 
 | ||||
|         txn = norm_trade( | ||||
|             tid, | ||||
|             record, | ||||
| 
 | ||||
|             # NOTE: currently no symcache support | ||||
|             pairs={}, | ||||
|             symcache=symcache, | ||||
|         ) | ||||
| 
 | ||||
|         if txn is None: | ||||
|             continue | ||||
| 
 | ||||
|         # inject txns sorted by datetime | ||||
|         insort( | ||||
|             records, | ||||
|             txn, | ||||
|             key=lambda t: t.dt | ||||
|         ) | ||||
| 
 | ||||
|     return {r.tid: r for r in records} | ||||
| 
 | ||||
| 
 | ||||
| def api_trades_to_ledger_entries( | ||||
|     accounts: bidict[str, str], | ||||
|     fills: list[Fill], | ||||
| 
 | ||||
| ) -> dict[str, dict]: | ||||
|     ''' | ||||
|     Convert API execution objects entry objects into | ||||
|     flattened-``dict`` form, pretty much straight up without | ||||
|     modification except add a `pydatetime` field from the parsed | ||||
|     timestamp so that on write | ||||
| 
 | ||||
|     ''' | ||||
|     trades_by_account: dict[str, dict] = {} | ||||
|     for fill in fills: | ||||
| 
 | ||||
|         # NOTE: for the schema, see the defn for `Fill` which is | ||||
|         # a `NamedTuple` subtype | ||||
|         fdict: dict = fill._asdict() | ||||
| 
 | ||||
|         # flatten all (sub-)objects and convert to dicts. | ||||
|         # with values packed into one top level entry. | ||||
|         val: CommissionReport | Execution | Contract | ||||
|         txn_dict: dict[str, Any] = {} | ||||
|         for attr_name, val in fdict.items(): | ||||
|             match attr_name: | ||||
|                 # value is a `@dataclass` subtype | ||||
|                 case 'contract' | 'execution' | 'commissionReport': | ||||
|                     txn_dict.update(asdict(val)) | ||||
| 
 | ||||
|                 case 'time': | ||||
|                     # ib has wack ns timestamps, or is that us? | ||||
|                     continue | ||||
| 
 | ||||
|                 # TODO: we can remove this case right since there's | ||||
|                 # only 4 fields on a `Fill`? | ||||
|                 case _: | ||||
|                     txn_dict[attr_name] = val | ||||
| 
 | ||||
|         tid = str(txn_dict['execId']) | ||||
|         dt = from_timestamp(txn_dict['time']) | ||||
|         txn_dict['datetime'] = str(dt) | ||||
|         acctid = accounts[txn_dict['acctNumber']] | ||||
| 
 | ||||
|         # NOTE: only inserted (then later popped) for sorting below! | ||||
|         txn_dict['pydatetime'] = dt | ||||
| 
 | ||||
|         if not tid: | ||||
|             # this is likely some kind of internal adjustment | ||||
|             # transaction, likely one of the following: | ||||
|             # - an expiry event that will show a "book trade" indicating | ||||
|             #   some adjustment to cash balances: zeroing or itm settle. | ||||
|             # - a manual cash balance position adjustment likely done by | ||||
|             #   the user from the accounts window in TWS where they can | ||||
|             #   manually set the avg price and size: | ||||
|             #   https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST | ||||
|             log.warning( | ||||
|                 'Skipping ID-less ledger txn_dict:\n' | ||||
|                 f'{pformat(txn_dict)}' | ||||
|             ) | ||||
|             continue | ||||
| 
 | ||||
|         trades_by_account.setdefault( | ||||
|             acctid, {} | ||||
|         )[tid] = txn_dict | ||||
| 
 | ||||
|     # TODO: maybe we should just bisect.insort() into a list of | ||||
|     # tuples and then return a dict of that? | ||||
|     # sort entries in output by python based datetime | ||||
|     for acctid in trades_by_account: | ||||
|         trades_by_account[acctid] = dict(sorted( | ||||
|             trades_by_account[acctid].items(), | ||||
|             key=lambda entry: entry[1].pop('pydatetime'), | ||||
|         )) | ||||
| 
 | ||||
|     return trades_by_account | ||||
| 
 | ||||
| 
 | ||||
| async def update_ledger_from_api_trades( | ||||
|     fills: list[Fill], | ||||
|     client: Client | MethodProxy, | ||||
|     accounts_def_inv: bidict[str, str], | ||||
| 
 | ||||
|     # NOTE: provided for ad-hoc insertions "as transactions are | ||||
|     # processed" -> see `norm_trade()` signature requirements. | ||||
|     symcache: SymbologyCache | None = None, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     dict[str, Transaction], | ||||
|     dict[str, dict], | ||||
| ]: | ||||
|     # XXX; ERRGGG.. | ||||
|     # pack in the "primary/listing exchange" value from a | ||||
|     # contract lookup since it seems this isn't available by | ||||
|     # default from the `.fills()` method endpoint... | ||||
|     fill: Fill | ||||
|     for fill in fills: | ||||
|         con: Contract = fill.contract | ||||
|         conid: str = con.conId | ||||
|         pexch: str | None = con.primaryExchange | ||||
| 
 | ||||
|         if not pexch: | ||||
|             cons = await client.get_con(conid=conid) | ||||
|             if cons: | ||||
|                 con = cons[0] | ||||
|                 pexch = con.primaryExchange or con.exchange | ||||
|             else: | ||||
|                 # for futes it seems like the primary is always empty? | ||||
|                 pexch: str = con.exchange | ||||
| 
 | ||||
|         # pack in the ``Contract.secType`` | ||||
|         # entry['asset_type'] = condict['secType'] | ||||
| 
 | ||||
|     entries: dict[str, dict] = api_trades_to_ledger_entries( | ||||
|         accounts_def_inv, | ||||
|         fills, | ||||
|     ) | ||||
|     # normalize recent session's trades to the `Transaction` type | ||||
|     trans_by_acct: dict[str, dict[str, Transaction]] = {} | ||||
| 
 | ||||
|     for acctid, trades_by_id in entries.items(): | ||||
|         # normalize to transaction form | ||||
|         trans_by_acct[acctid] = norm_trade_records( | ||||
|             trades_by_id, | ||||
|             symcache=symcache, | ||||
|         ) | ||||
| 
 | ||||
|     return trans_by_acct, entries | ||||
|  | @ -1,615 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Symbology search and normalization. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     nullcontext, | ||||
| ) | ||||
| from decimal import Decimal | ||||
| import time | ||||
| from typing import ( | ||||
|     Awaitable, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from rapidfuzz import process as fuzzy | ||||
| import ib_insync as ibis | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| from piker.accounting import ( | ||||
|     Asset, | ||||
|     MktPair, | ||||
|     unpack_fqme, | ||||
| ) | ||||
| from piker._cacheables import ( | ||||
|     async_lifo_cache, | ||||
| ) | ||||
| 
 | ||||
| from ._util import ( | ||||
|     log, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from .api import ( | ||||
|         MethodProxy, | ||||
|         Client, | ||||
|     ) | ||||
| 
 | ||||
| _futes_venues = ( | ||||
|     'GLOBEX', | ||||
|     'NYMEX', | ||||
|     'CME', | ||||
|     'CMECRYPTO', | ||||
|     'COMEX', | ||||
|     # 'CMDTY',  # special name case.. | ||||
|     'CBOT',  # (treasury) yield futures | ||||
| ) | ||||
| 
 | ||||
| _adhoc_cmdty_set = { | ||||
|     # metals | ||||
|     # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 | ||||
|     'xauusd.cmdty',  # london gold spot ^ | ||||
|     'xagusd.cmdty',  # silver spot | ||||
| } | ||||
| 
 | ||||
| # NOTE: if you aren't seeing one of these symbol's futues contracts | ||||
| # show up, it's likely the `.<venue>` part is wrong! | ||||
| _adhoc_futes_set = { | ||||
| 
 | ||||
|     # equities | ||||
|     'nq.cme', | ||||
|     'mnq.cme',  # micro | ||||
| 
 | ||||
|     'es.cme', | ||||
|     'mes.cme',  # micro | ||||
| 
 | ||||
|     # cypto$ | ||||
|     'brr.cme', | ||||
|     'mbt.cme',  # micro | ||||
|     'ethusdrr.cme', | ||||
| 
 | ||||
|     # agriculture | ||||
|     'he.comex',  # lean hogs | ||||
|     'le.comex',  # live cattle (geezers) | ||||
|     'gf.comex',  # feeder cattle (younguns) | ||||
| 
 | ||||
|     # raw | ||||
|     'lb.comex',  # random len lumber | ||||
| 
 | ||||
|     'gc.comex', | ||||
|     'mgc.comex',  # micro | ||||
| 
 | ||||
|     # oil & gas | ||||
|     'cl.nymex', | ||||
| 
 | ||||
|     'ni.comex',  # silver futes | ||||
|     'qi.comex',  # mini-silver futes | ||||
| 
 | ||||
|     # treasury yields | ||||
|     # etfs by duration: | ||||
|     # SHY -> IEI -> IEF -> TLT | ||||
|     'zt.cbot',  # 2y | ||||
|     'z3n.cbot',  # 3y | ||||
|     'zf.cbot',  # 5y | ||||
|     'zn.cbot',  # 10y | ||||
|     'zb.cbot',  # 30y | ||||
| 
 | ||||
|     # (micros of above) | ||||
|     '2yy.cbot', | ||||
|     '5yy.cbot', | ||||
|     '10y.cbot', | ||||
|     '30y.cbot', | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| # taken from list here: | ||||
| # https://www.interactivebrokers.com/en/trading/products-spot-currencies.php | ||||
| _adhoc_fiat_set = set(( | ||||
|     'USD, AED, AUD, CAD,' | ||||
|     'CHF, CNH, CZK, DKK,' | ||||
|     'EUR, GBP, HKD, HUF,' | ||||
|     'ILS, JPY, MXN, NOK,' | ||||
|     'NZD, PLN, RUB, SAR,' | ||||
|     'SEK, SGD, TRY, ZAR' | ||||
|     ).split(' ,') | ||||
| ) | ||||
| 
 | ||||
| # manually discovered tick discrepancies, | ||||
| # onl god knows how or why they'd cuck these up.. | ||||
| _adhoc_mkt_infos: dict[int | str, dict] = { | ||||
|     'vtgn.nasdaq': {'price_tick': Decimal('0.01')}, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| # map of symbols to contract ids | ||||
| _adhoc_symbol_map = { | ||||
|     # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 | ||||
| 
 | ||||
|     # NOTE: some cmdtys/metals don't have trade data like gold/usd: | ||||
|     # https://groups.io/g/twsapi/message/44174 | ||||
|     'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}), | ||||
| } | ||||
| for qsn in _adhoc_futes_set: | ||||
|     sym, venue = qsn.split('.') | ||||
|     assert venue.upper() in _futes_venues, f'{venue}' | ||||
|     _adhoc_symbol_map[sym.upper()] = ( | ||||
|         {'exchange': venue}, | ||||
|         {}, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| # exchanges we don't support at the moment due to not knowing | ||||
| # how to do symbol-contract lookup correctly likely due | ||||
| # to not having the data feeds subscribed. | ||||
| _exch_skip_list = { | ||||
| 
 | ||||
|     'ASX',  # aussie stocks | ||||
|     'MEXI',  # mexican stocks | ||||
| 
 | ||||
|     # no idea | ||||
|     'NSE', | ||||
|     'VALUE', | ||||
|     'FUNDSERV', | ||||
|     'SWB2', | ||||
|     'PSE', | ||||
|     'PHLX', | ||||
| } | ||||
| 
 | ||||
| # optional search config the backend can register for | ||||
| # it's symbol search handling (in this case we avoid | ||||
| # accepting patterns before the kb has settled more then | ||||
| # a quarter second). | ||||
| _search_conf = { | ||||
|     'pause_period': 6 / 16, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_symbol_search(ctx: tractor.Context) -> None: | ||||
|     ''' | ||||
|     Symbology search brokerd-endpoint. | ||||
| 
 | ||||
|     ''' | ||||
|     from .api import open_client_proxies | ||||
|     from .feed import open_data_client | ||||
| 
 | ||||
|     # TODO: load user defined symbol set locally for fast search? | ||||
|     await ctx.started({}) | ||||
| 
 | ||||
|     async with ( | ||||
|         open_client_proxies() as (proxies, _), | ||||
|         open_data_client() as data_proxy, | ||||
|     ): | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             # select a non-history client for symbol search to lighten | ||||
|             # the load in the main data node. | ||||
|             proxy = data_proxy | ||||
|             for name, proxy in proxies.items(): | ||||
|                 if proxy is data_proxy: | ||||
|                     continue | ||||
|                 break | ||||
| 
 | ||||
|             ib_client = proxy._aio_ns.ib | ||||
|             log.info( | ||||
|                 f'Using API client for symbol-search\n' | ||||
|                 f'{ib_client}\n' | ||||
|             ) | ||||
| 
 | ||||
|             last = time.time() | ||||
|             async for pattern in stream: | ||||
|                 log.info(f'received {pattern}') | ||||
|                 now: float = time.time() | ||||
| 
 | ||||
|                 # this causes tractor hang... | ||||
|                 # assert 0 | ||||
| 
 | ||||
|                 assert pattern, 'IB can not accept blank search pattern' | ||||
| 
 | ||||
|                 # throttle search requests to no faster then 1Hz | ||||
|                 diff = now - last | ||||
|                 if diff < 1.0: | ||||
|                     log.debug('throttle sleeping') | ||||
|                     await trio.sleep(diff) | ||||
|                     try: | ||||
|                         pattern = stream.receive_nowait() | ||||
|                     except trio.WouldBlock: | ||||
|                         pass | ||||
| 
 | ||||
|                 if ( | ||||
|                     not pattern | ||||
|                     or pattern.isspace() | ||||
| 
 | ||||
|                     # XXX: not sure if this is a bad assumption but it | ||||
|                     # seems to make search snappier? | ||||
|                     or len(pattern) < 1 | ||||
|                 ): | ||||
|                     log.warning('empty pattern received, skipping..') | ||||
| 
 | ||||
|                     # TODO: *BUG* if nothing is returned here the client | ||||
|                     # side will cache a null set result and not showing | ||||
|                     # anything to the use on re-searches when this query | ||||
|                     # timed out. We probably need a special "timeout" msg | ||||
|                     # or something... | ||||
| 
 | ||||
|                     # XXX: this unblocks the far end search task which may | ||||
|                     # hold up a multi-search nursery block | ||||
|                     await stream.send({}) | ||||
| 
 | ||||
|                     continue | ||||
| 
 | ||||
|                 log.info(f'searching for {pattern}') | ||||
| 
 | ||||
|                 last = time.time() | ||||
| 
 | ||||
|                 # async batch search using api stocks endpoint and module | ||||
|                 # defined adhoc symbol set. | ||||
|                 stock_results = [] | ||||
| 
 | ||||
|                 async def extend_results( | ||||
|                     target: Awaitable[list] | ||||
|                 ) -> None: | ||||
|                     try: | ||||
|                         results = await target | ||||
|                     except tractor.trionics.Lagged: | ||||
|                         print("IB SYM-SEARCH OVERRUN?!?") | ||||
|                         return | ||||
| 
 | ||||
|                     stock_results.extend(results) | ||||
| 
 | ||||
|                 for _ in range(10): | ||||
|                     with trio.move_on_after(3) as cs: | ||||
|                         async with trio.open_nursery() as sn: | ||||
|                             sn.start_soon( | ||||
|                                 extend_results, | ||||
|                                 proxy.search_symbols( | ||||
|                                     pattern=pattern, | ||||
|                                     upto=5, | ||||
|                                 ), | ||||
|                             ) | ||||
| 
 | ||||
|                             # trigger async request | ||||
|                             await trio.sleep(0) | ||||
| 
 | ||||
|                     if cs.cancelled_caught: | ||||
|                         log.warning( | ||||
|                             f'Search timeout? {proxy._aio_ns.ib.client}' | ||||
|                         ) | ||||
|                         continue | ||||
|                     elif stock_results: | ||||
|                         break | ||||
|                     # else: | ||||
|                     # await tractor.pause() | ||||
| 
 | ||||
|                     # # match against our ad-hoc set immediately | ||||
|                     # adhoc_matches = fuzzy.extract( | ||||
|                     #     pattern, | ||||
|                     #     list(_adhoc_futes_set), | ||||
|                     #     score_cutoff=90, | ||||
|                     # ) | ||||
|                     # log.info(f'fuzzy matched adhocs: {adhoc_matches}') | ||||
|                     # adhoc_match_results = {} | ||||
|                     # if adhoc_matches: | ||||
|                     #     # TODO: do we need to pull contract details? | ||||
|                     #     adhoc_match_results = {i[0]: {} for i in | ||||
|                     #     adhoc_matches} | ||||
| 
 | ||||
|                 log.debug(f'fuzzy matching stocks {stock_results}') | ||||
|                 stock_matches = fuzzy.extract( | ||||
|                     pattern, | ||||
|                     stock_results, | ||||
|                     score_cutoff=50, | ||||
|                 ) | ||||
| 
 | ||||
|                 # matches = adhoc_match_results | { | ||||
|                 matches = { | ||||
|                     item[0]: {} for item in stock_matches | ||||
|                 } | ||||
|                 # TODO: we used to deliver contract details | ||||
|                 # {item[2]: item[0] for item in stock_matches} | ||||
| 
 | ||||
|                 log.debug(f"sending matches: {matches.keys()}") | ||||
|                 await stream.send(matches) | ||||
| 
 | ||||
| 
 | ||||
| # re-mapping to piker asset type names | ||||
| # https://github.com/erdewit/ib_insync/blob/master/ib_insync/contract.py#L113 | ||||
| _asset_type_map = { | ||||
|     'STK': 'stock', | ||||
|     'OPT': 'option', | ||||
|     'FUT': 'future', | ||||
|     'CONTFUT': 'continuous_future', | ||||
|     'CASH': 'fiat', | ||||
|     'IND': 'index', | ||||
|     'CFD': 'cfd', | ||||
|     'BOND': 'bond', | ||||
|     'CMDTY': 'commodity', | ||||
|     'FOP': 'futures_option', | ||||
|     'FUND': 'mutual_fund', | ||||
|     'WAR': 'warrant', | ||||
|     'IOPT': 'warran', | ||||
|     'BAG': 'bag', | ||||
|     'CRYPTO': 'crypto',  # bc it's diff then fiat? | ||||
|     # 'NEWS': 'news', | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| def parse_patt2fqme( | ||||
|     # client: Client, | ||||
|     pattern: str, | ||||
| 
 | ||||
| ) -> tuple[str, str, str, str]: | ||||
| 
 | ||||
|     # TODO: we can't use this currently because | ||||
|     # ``wrapper.starTicker()`` currently cashes ticker instances | ||||
|     # which means getting a singel quote will potentially look up | ||||
|     # a quote for a ticker that it already streaming and thus run | ||||
|     # into state clobbering (eg. list: Ticker.ticks). It probably | ||||
|     # makes sense to try this once we get the pub-sub working on | ||||
|     # individual symbols... | ||||
| 
 | ||||
|     # XXX UPDATE: we can probably do the tick/trades scraping | ||||
|     # inside our eventkit handler instead to bypass this entirely? | ||||
| 
 | ||||
|     currency = '' | ||||
| 
 | ||||
|     # fqme parsing stage | ||||
|     # ------------------ | ||||
|     if '.ib' in pattern: | ||||
|         _, symbol, venue, expiry = unpack_fqme(pattern) | ||||
| 
 | ||||
|     else: | ||||
|         symbol = pattern | ||||
|         expiry = '' | ||||
| 
 | ||||
|         # # another hack for forex pairs lul. | ||||
|         # if ( | ||||
|         #     '.idealpro' in symbol | ||||
|         #     # or '/' in symbol | ||||
|         # ): | ||||
|         #     exch: str = 'IDEALPRO' | ||||
|         #     symbol = symbol.removesuffix('.idealpro') | ||||
|         #     if '/' in symbol: | ||||
|         #         symbol, currency = symbol.split('/') | ||||
| 
 | ||||
|         # else: | ||||
|         # TODO: yes, a cache.. | ||||
|         # try: | ||||
|         #     # give the cache a go | ||||
|         #     return client._contracts[symbol] | ||||
|         # except KeyError: | ||||
|         #     log.debug(f'Looking up contract for {symbol}') | ||||
|         expiry: str = '' | ||||
|         if symbol.count('.') > 1: | ||||
|             symbol, _, expiry = symbol.rpartition('.') | ||||
| 
 | ||||
|         # use heuristics to figure out contract "type" | ||||
|         symbol, venue = symbol.upper().rsplit('.', maxsplit=1) | ||||
| 
 | ||||
|     return symbol, currency, venue, expiry | ||||
| 
 | ||||
| 
 | ||||
| def con2fqme( | ||||
|     con: ibis.Contract, | ||||
|     _cache: dict[int, (str, bool)] = {} | ||||
| 
 | ||||
| ) -> tuple[str, bool]: | ||||
|     ''' | ||||
|     Convert contracts to fqme-style strings to be used both in | ||||
|     symbol-search matching and as feed tokens passed to the front | ||||
|     end data deed layer. | ||||
| 
 | ||||
|     Previously seen contracts are cached by id. | ||||
| 
 | ||||
|     ''' | ||||
|     # should be real volume for this contract by default | ||||
|     calc_price: bool = False | ||||
|     if con.conId: | ||||
|         try: | ||||
|             # TODO: LOL so apparently IB just changes the contract | ||||
|             # ID (int) on a whim.. so we probably need to use an | ||||
|             # FQME style key after all... | ||||
|             return _cache[con.conId] | ||||
|         except KeyError: | ||||
|             pass | ||||
| 
 | ||||
|     suffix: str = con.primaryExchange or con.exchange | ||||
|     symbol: str = con.symbol | ||||
|     expiry: str = con.lastTradeDateOrContractMonth or '' | ||||
| 
 | ||||
|     match con: | ||||
|         case ibis.Option(): | ||||
|             # TODO: option symbol parsing and sane display: | ||||
|             symbol = con.localSymbol.replace(' ', '') | ||||
| 
 | ||||
|         case ( | ||||
|             ibis.Commodity() | ||||
|             # search API endpoint returns std con box.. | ||||
|             | ibis.Contract(secType='CMDTY') | ||||
|         ): | ||||
|             # commodities and forex don't have an exchange name and | ||||
|             # no real volume so we have to calculate the price | ||||
|             suffix = con.secType | ||||
| 
 | ||||
|             # no real volume on this tract | ||||
|             calc_price = True | ||||
| 
 | ||||
|         case ibis.Forex() | ibis.Contract(secType='CASH'): | ||||
|             dst, src = con.localSymbol.split('.') | ||||
|             symbol = ''.join([dst, src]) | ||||
|             suffix = con.exchange or 'idealpro' | ||||
| 
 | ||||
|             # no real volume on forex feeds.. | ||||
|             calc_price = True | ||||
| 
 | ||||
|     if not suffix: | ||||
|         entry = _adhoc_symbol_map.get( | ||||
|             con.symbol or con.localSymbol | ||||
|         ) | ||||
|         if entry: | ||||
|             meta, kwargs = entry | ||||
|             cid = meta.get('conId') | ||||
|             if cid: | ||||
|                 assert con.conId == meta['conId'] | ||||
|             suffix = meta['exchange'] | ||||
| 
 | ||||
|     # append a `.<suffix>` to the returned symbol | ||||
|     # key for derivatives that normally is the expiry | ||||
|     # date key. | ||||
|     if expiry: | ||||
|         suffix += f'.{expiry}' | ||||
| 
 | ||||
|     fqme_key = symbol.lower() | ||||
|     if suffix: | ||||
|         fqme_key = '.'.join((fqme_key, suffix)).lower() | ||||
| 
 | ||||
|     _cache[con.conId] = fqme_key, calc_price | ||||
|     return fqme_key, calc_price | ||||
| 
 | ||||
| 
 | ||||
| @async_lifo_cache() | ||||
| async def get_mkt_info( | ||||
|     fqme: str, | ||||
| 
 | ||||
|     proxy: MethodProxy | None = None, | ||||
| 
 | ||||
| ) -> tuple[MktPair, ibis.ContractDetails]: | ||||
| 
 | ||||
|     if '.ib' not in fqme: | ||||
|         fqme += '.ib' | ||||
|     broker, pair, venue, expiry = unpack_fqme(fqme) | ||||
| 
 | ||||
|     proxy: MethodProxy | ||||
|     if proxy is not None: | ||||
|         client_ctx = nullcontext(proxy) | ||||
|     else: | ||||
|         from .feed import ( | ||||
|             open_data_client, | ||||
|         ) | ||||
|         client_ctx = open_data_client | ||||
| 
 | ||||
|     async with client_ctx as proxy: | ||||
|         try: | ||||
|             ( | ||||
|                 con,  # Contract | ||||
|                 details,  # ContractDetails | ||||
|             ) = await proxy.get_sym_details(fqme=fqme) | ||||
|         except ConnectionError: | ||||
|             log.exception(f'Proxy is ded {proxy._aio_ns}') | ||||
|             raise | ||||
| 
 | ||||
|     # TODO: more consistent field translation | ||||
|     atype = _asset_type_map[con.secType] | ||||
| 
 | ||||
|     if atype == 'commodity': | ||||
|         venue: str = 'cmdty' | ||||
|     else: | ||||
|         venue = con.primaryExchange or con.exchange | ||||
| 
 | ||||
|     price_tick: Decimal = Decimal(str(details.minTick)) | ||||
|     ib_min_tick_gt_2: Decimal = Decimal('0.01') | ||||
|     if ( | ||||
|         price_tick < ib_min_tick_gt_2 | ||||
|     ): | ||||
|         # TODO: we need to add some kinda dynamic rounding sys | ||||
|         # to our MktPair i guess? | ||||
|         # not sure where the logic should sit, but likely inside | ||||
|         # the `.clearing._ems` i suppose... | ||||
|         log.warning( | ||||
|             'IB seems to disallow a min price tick < 0.01 ' | ||||
|             'when the price is > 2.0..?\n' | ||||
|             f'Decreasing min tick precision for {fqme} to 0.01' | ||||
|         ) | ||||
|         # price_tick = ib_min_tick | ||||
|         # await tractor.pause() | ||||
| 
 | ||||
|     if atype == 'stock': | ||||
|         # XXX: GRRRR they don't support fractional share sizes for | ||||
|         # stocks from the API?! | ||||
|         # if con.secType == 'STK': | ||||
|         size_tick = Decimal('1') | ||||
|     else: | ||||
|         size_tick: Decimal = Decimal( | ||||
|             str(details.minSize).rstrip('0') | ||||
|         ) | ||||
|         # |-> TODO: there is also the Contract.sizeIncrement, bt wtf is it? | ||||
| 
 | ||||
|     # NOTE: this is duplicate from the .broker.norm_trade_records() | ||||
|     # routine, we should factor all this parsing somewhere.. | ||||
|     expiry_str = str(con.lastTradeDateOrContractMonth) | ||||
|     # if expiry: | ||||
|     #     expiry_str: str = str(pendulum.parse( | ||||
|     #         str(expiry).strip(' ') | ||||
|     #     )) | ||||
| 
 | ||||
|     # TODO: currently we can't pass the fiat src asset because | ||||
|     # then we'll get a `MNQUSD` request for history data.. | ||||
|     # we need to figure out how we're going to handle this (later?) | ||||
|     # but likely we want all backends to eventually handle | ||||
|     # ``dst/src.venue.`` style !? | ||||
|     src = Asset( | ||||
|         name=str(con.currency).lower(), | ||||
|         atype='fiat', | ||||
|         tx_tick=Decimal('0.01'),  # right? | ||||
|     ) | ||||
|     dst = Asset( | ||||
|         name=con.symbol.lower(), | ||||
|         atype=atype, | ||||
|         tx_tick=size_tick, | ||||
|     ) | ||||
| 
 | ||||
|     mkt = MktPair( | ||||
|         src=src, | ||||
|         dst=dst, | ||||
| 
 | ||||
|         price_tick=price_tick, | ||||
|         size_tick=size_tick, | ||||
| 
 | ||||
|         bs_mktid=str(con.conId), | ||||
|         venue=str(venue), | ||||
|         expiry=expiry_str, | ||||
|         broker='ib', | ||||
| 
 | ||||
|         # TODO: options contract info as str? | ||||
|         # contract_info=<optionsdetails> | ||||
|         _fqme_without_src=(atype != 'fiat'), | ||||
|     ) | ||||
| 
 | ||||
|     # just.. wow. | ||||
|     if entry := _adhoc_mkt_infos.get(mkt.bs_fqme): | ||||
|         log.warning(f'Frickin {mkt.fqme} has an adhoc {entry}..') | ||||
|         new = mkt.to_dict() | ||||
|         new['price_tick'] = entry['price_tick'] | ||||
|         new['src'] = src | ||||
|         new['dst'] = dst | ||||
|         mkt = MktPair(**new) | ||||
| 
 | ||||
|     # if possible register the bs_mktid to the just-built | ||||
|     # mkt so that it can be retreived by order mode tasks later. | ||||
|     # TODO NOTE: this is going to be problematic if/when we split | ||||
|     # out the datatd vs. brokerd actors since the mktmap lookup | ||||
|     # table will now be inaccessible.. | ||||
|     if proxy is not None: | ||||
|         client: Client = proxy._aio_ns | ||||
|         client._contracts[mkt.bs_fqme] = con | ||||
|         client._cons2mkts[con] = mkt | ||||
| 
 | ||||
|     return mkt, details | ||||
|  | @ -1,64 +0,0 @@ | |||
| ``kraken`` backend | ||||
| ------------------ | ||||
| though they don't have the most liquidity of all the cexes they sure are | ||||
| accommodating to those of us who appreciate a little ``xmr``. | ||||
| 
 | ||||
| status | ||||
| ****** | ||||
| current support is *production grade* and both real-time data and order | ||||
| management should be correct and fast. this backend is used by core devs | ||||
| for live trading. | ||||
| 
 | ||||
| 
 | ||||
| config | ||||
| ****** | ||||
| In order to get order mode support your ``brokers.toml`` | ||||
| needs to have something like the following: | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|    [kraken] | ||||
|    accounts.spot = 'spot' | ||||
|    key_descr = "spot" | ||||
|    api_key = "69696969696969696696969696969696969696969696969696969696" | ||||
|    secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696" | ||||
| 
 | ||||
| 
 | ||||
| If everything works correctly you should see any current positions | ||||
| loaded in the pps pane on chart load and you should also be able to | ||||
| check your trade records in the file:: | ||||
| 
 | ||||
|     <pikerk_conf_dir>/ledgers/trades_kraken_spot.toml | ||||
| 
 | ||||
| 
 | ||||
| An example ledger file will have entries written verbatim from the | ||||
| trade events schema: | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|     [TFJBKK-SMBZS-VJ4UWS] | ||||
|     ordertxid = "SMBZSA-7CNQU-3HWLNJ" | ||||
|     postxid = "SMBZSE-M7IF5-CFI7LT" | ||||
|     pair = "XXMRZEUR" | ||||
|     time = 1655691993.4133966 | ||||
|     type = "buy" | ||||
|     ordertype = "limit" | ||||
|     price = "103.97000000" | ||||
|     cost = "499.99999977" | ||||
|     fee = "0.80000000" | ||||
|     vol = "4.80907954" | ||||
|     margin = "0.00000000" | ||||
|     misc = "" | ||||
| 
 | ||||
| 
 | ||||
| your ``pps.toml`` file will have position entries like, | ||||
| 
 | ||||
| .. code:: toml | ||||
| 
 | ||||
|    [kraken.spot."xmreur.kraken"] | ||||
|    size = 4.80907954 | ||||
|    ppu = 103.97000000 | ||||
|    bs_mktid = "XXMRZEUR" | ||||
|    clears = [ | ||||
|     { tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" }, | ||||
|    ] | ||||
|  | @ -19,57 +19,43 @@ Kraken backend. | |||
| 
 | ||||
| Sub-modules within break into the core functionalities: | ||||
| 
 | ||||
| - .api: for the core API machinery which generally | ||||
|         a ``asks``/``trio-websocket`` implemented ``Client``. | ||||
| - .broker: part for orders / trading endpoints. | ||||
| - .feed: for real-time and historical data query endpoints. | ||||
| - .ledger: for transaction processing as it pertains to accounting. | ||||
| - .symbols: for market (name) search and symbology meta-defs. | ||||
| - ``broker.py`` part for orders / trading endpoints | ||||
| - ``feed.py`` for real-time data feed endpoints | ||||
| - ``api.py`` for the core API machinery which is ``trio``-ized | ||||
|   wrapping around ``ib_insync``. | ||||
| 
 | ||||
| ''' | ||||
| from .symbols import ( | ||||
|     Pair,  # for symcache | ||||
|     open_symbol_search, | ||||
|     # required by `.accounting`, `.data` | ||||
|     get_mkt_info, | ||||
| ) | ||||
| # required by `.brokers` | ||||
| 
 | ||||
| from piker.log import get_logger | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| from .api import ( | ||||
|     get_client, | ||||
| ) | ||||
| from .feed import ( | ||||
|     # required by `.data` | ||||
|     stream_quotes, | ||||
|     open_history_client, | ||||
|     open_symbol_search, | ||||
|     stream_quotes, | ||||
| ) | ||||
| from .broker import ( | ||||
|     # required by `.clearing` | ||||
|     open_trade_dialog, | ||||
| ) | ||||
| from .ledger import ( | ||||
|     # required by `.accounting` | ||||
|     norm_trade, | ||||
|     trades_dialogue, | ||||
|     norm_trade_records, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'get_client', | ||||
|     'get_mkt_info', | ||||
|     'Pair', | ||||
|     'open_trade_dialog', | ||||
|     'trades_dialogue', | ||||
|     'open_history_client', | ||||
|     'open_symbol_search', | ||||
|     'stream_quotes', | ||||
|     'norm_trade_records', | ||||
|     'norm_trade', | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| # tractor RPC enable arg | ||||
| __enable_modules__: list[str] = [ | ||||
|     'api', | ||||
|     'broker', | ||||
|     'feed', | ||||
|     'symbols', | ||||
|     'broker', | ||||
| ] | ||||
|  |  | |||
|  | @ -15,78 +15,100 @@ | |||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Core (web) API client | ||||
| Kraken web API wrapping. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from dataclasses import field | ||||
| from datetime import datetime | ||||
| import itertools | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Union, | ||||
| ) | ||||
| import time | ||||
| 
 | ||||
| import httpx | ||||
| # import trio | ||||
| # import tractor | ||||
| import pendulum | ||||
| import asks | ||||
| from fuzzywuzzy import process as fuzzy | ||||
| import numpy as np | ||||
| from pydantic.dataclasses import dataclass | ||||
| import urllib.parse | ||||
| import hashlib | ||||
| import hmac | ||||
| import base64 | ||||
| import trio | ||||
| 
 | ||||
| from piker import config | ||||
| from piker.data import ( | ||||
|     def_iohlcv_fields, | ||||
|     match_from_pairs, | ||||
| ) | ||||
| from piker.accounting._mktinfo import ( | ||||
|     Asset, | ||||
|     digits_to_dec, | ||||
|     dec_digits, | ||||
| ) | ||||
| from piker.brokers._util import ( | ||||
|     resproc, | ||||
|     SymbolNotFound, | ||||
|     BrokerError, | ||||
|     DataThrottle, | ||||
| ) | ||||
| from piker.accounting import Transaction | ||||
| from piker.log import get_logger | ||||
| from .symbols import Pair | ||||
| 
 | ||||
| log = get_logger('piker.brokers.kraken') | ||||
| from . import log | ||||
| 
 | ||||
| # <uri>/<version>/ | ||||
| _url = 'https://api.kraken.com/0' | ||||
| 
 | ||||
| _headers: dict[str, str] = { | ||||
|     'User-Agent': 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)' | ||||
| } | ||||
| 
 | ||||
| # TODO: this is the only backend providing this right? | ||||
| # in which case we should drop it from the defaults and | ||||
| # instead make a custom fields descr in this module! | ||||
| # Broker specific ohlc schema which includes a vwap field | ||||
| _ohlc_dtype = [ | ||||
|     ('index', int), | ||||
|     ('time', int), | ||||
|     ('open', float), | ||||
|     ('high', float), | ||||
|     ('low', float), | ||||
|     ('close', float), | ||||
|     ('volume', float), | ||||
|     ('count', int), | ||||
|     ('bar_wap', float), | ||||
| ] | ||||
| 
 | ||||
| # UI components allow this to be declared such that additional | ||||
| # (historical) fields can be exposed. | ||||
| ohlc_dtype = np.dtype(_ohlc_dtype) | ||||
| 
 | ||||
| _show_wap_in_history = True | ||||
| _symbol_info_translation: dict[str, str] = { | ||||
|     'tick_decimals': 'pair_decimals', | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| def get_config() -> dict[str, Any]: | ||||
| @dataclass | ||||
| class OHLC: | ||||
|     ''' | ||||
|     Load our section from `piker/brokers.toml`. | ||||
|     Description of the flattened OHLC quote format. | ||||
| 
 | ||||
|     For schema details see: | ||||
|         https://docs.kraken.com/websockets/#message-ohlc | ||||
| 
 | ||||
|     ''' | ||||
|     conf, path = config.load( | ||||
|         conf_name='brokers', | ||||
|         touch_if_dne=True, | ||||
|     ) | ||||
|     if (section := conf.get('kraken')) is None: | ||||
|         log.warning( | ||||
|             f'No config section found for kraken in {path}' | ||||
|         ) | ||||
|     chan_id: int  # internal kraken id | ||||
|     chan_name: str  # eg. ohlc-1  (name-interval) | ||||
|     pair: str  # fx pair | ||||
|     time: float  # Begin time of interval, in seconds since epoch | ||||
|     etime: float  # End time of interval, in seconds since epoch | ||||
|     open: float  # Open price of interval | ||||
|     high: float  # High price within interval | ||||
|     low: float  # Low price within interval | ||||
|     close: float  # Close price of interval | ||||
|     vwap: float  # Volume weighted average price within interval | ||||
|     volume: float  # Accumulated volume **within interval** | ||||
|     count: int  # Number of trades within interval | ||||
|     # (sampled) generated tick data | ||||
|     ticks: list[Any] = field(default_factory=list) | ||||
| 
 | ||||
| 
 | ||||
| def get_config() -> dict[str, Any]: | ||||
| 
 | ||||
|     conf, path = config.load() | ||||
|     section = conf.get('kraken') | ||||
| 
 | ||||
|     if section is None: | ||||
|         log.warning(f'No config section found for kraken in {path}') | ||||
|         return {} | ||||
| 
 | ||||
|     return section | ||||
|  | @ -119,49 +141,30 @@ class InvalidKey(ValueError): | |||
| 
 | ||||
| class Client: | ||||
| 
 | ||||
|     # assets and mkt pairs are key-ed by kraken's ReST response | ||||
|     # symbol-bs_mktids (we call them "X-keys" like fricking | ||||
|     # "XXMRZEUR"). these keys used directly since ledger endpoints | ||||
|     # return transaction sets keyed with the same set! | ||||
|     _Assets: dict[str, Asset] = {} | ||||
|     _AssetPairs: dict[str, Pair] = {} | ||||
| 
 | ||||
|     # offer lookup tables for all .altname and .wsname | ||||
|     # to the equivalent .xname so that various symbol-schemas | ||||
|     # can be mapped to `Pair`s in the tables above. | ||||
|     _altnames: dict[str, str] = {} | ||||
|     _wsnames: dict[str, str] = {} | ||||
| 
 | ||||
|     # key-ed by `Pair.bs_fqme: str`, and thus used for search | ||||
|     # allowing for lookup using piker's own FQME symbology sys. | ||||
|     _pairs: dict[str, Pair] = {} | ||||
|     _assets: dict[str, Asset] = {} | ||||
| 
 | ||||
|     def __init__( | ||||
|         self, | ||||
|         config: dict[str, str], | ||||
|         httpx_client: httpx.AsyncClient, | ||||
| 
 | ||||
|         name: str = '', | ||||
|         api_key: str = '', | ||||
|         secret: str = '' | ||||
|     ) -> None: | ||||
| 
 | ||||
|         self._sesh: httpx.AsyncClient = httpx_client | ||||
| 
 | ||||
|         self._sesh = asks.Session(connections=4) | ||||
|         self._sesh.base_location = _url | ||||
|         self._sesh.headers.update({ | ||||
|             'User-Agent': | ||||
|                 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)' | ||||
|         }) | ||||
|         self._pairs: list[str] = [] | ||||
|         self._name = name | ||||
|         self._api_key = api_key | ||||
|         self._secret = secret | ||||
| 
 | ||||
|         self.conf: dict[str, str] = config | ||||
| 
 | ||||
|     @property | ||||
|     def pairs(self) -> dict[str, Pair]: | ||||
| 
 | ||||
|     def pairs(self) -> dict[str, Any]: | ||||
|         if self._pairs is None: | ||||
|             raise RuntimeError( | ||||
|                 "Client didn't run `.get_mkt_pairs()` on startup?!" | ||||
|                 "Make sure to run `cache_symbols()` on startup!" | ||||
|             ) | ||||
|             # retreive and cache all symbols | ||||
| 
 | ||||
|         return self._pairs | ||||
| 
 | ||||
|  | @ -170,9 +173,10 @@ class Client: | |||
|         method: str, | ||||
|         data: dict, | ||||
|     ) -> dict[str, Any]: | ||||
|         resp: httpx.Response = await self._sesh.post( | ||||
|             url=f'/public/{method}', | ||||
|         resp = await self._sesh.post( | ||||
|             path=f'/public/{method}', | ||||
|             json=data, | ||||
|             timeout=float('inf') | ||||
|         ) | ||||
|         return resproc(resp, log) | ||||
| 
 | ||||
|  | @ -183,18 +187,18 @@ class Client: | |||
|         uri_path: str | ||||
|     ) -> dict[str, Any]: | ||||
|         headers = { | ||||
|             'Content-Type': 'application/x-www-form-urlencoded', | ||||
|             'API-Key': self._api_key, | ||||
|             'API-Sign': get_kraken_signature( | ||||
|                 uri_path, | ||||
|                 data, | ||||
|                 self._secret, | ||||
|             ), | ||||
|             'Content-Type': | ||||
|                 'application/x-www-form-urlencoded', | ||||
|             'API-Key': | ||||
|                 self._api_key, | ||||
|             'API-Sign': | ||||
|                 get_kraken_signature(uri_path, data, self._secret) | ||||
|         } | ||||
|         resp: httpx.Response = await self._sesh.post( | ||||
|             url=f'/private/{method}', | ||||
|         resp = await self._sesh.post( | ||||
|             path=f'/private/{method}', | ||||
|             data=data, | ||||
|             headers=headers, | ||||
|             timeout=float('inf') | ||||
|         ) | ||||
|         return resproc(resp, log) | ||||
| 
 | ||||
|  | @ -208,93 +212,8 @@ class Client: | |||
|         data['nonce'] = str(int(1000*time.time())) | ||||
|         return await self._private(method, data, uri_path) | ||||
| 
 | ||||
|     async def get_balances( | ||||
|         self, | ||||
|     ) -> dict[str, float]: | ||||
|         ''' | ||||
|         Return the set of asset balances for this account | ||||
|         by symbol. | ||||
| 
 | ||||
|         ''' | ||||
|         resp = await self.endpoint( | ||||
|             'Balance', | ||||
|             {}, | ||||
|         ) | ||||
|         by_bsmktid: dict[str, dict] = resp['result'] | ||||
| 
 | ||||
|         balances: dict = {} | ||||
|         for xname, bal in by_bsmktid.items(): | ||||
|             asset: Asset = self._Assets[xname] | ||||
| 
 | ||||
|             # TODO: which KEY should we use? it's used to index | ||||
|             # the `Account.pps: dict` .. | ||||
|             key: str = asset.name.lower() | ||||
|             # TODO: should we just return a `Decimal` here | ||||
|             # or is the rounded version ok? | ||||
|             balances[key] = round( | ||||
|                 float(bal), | ||||
|                 ndigits=dec_digits(asset.tx_tick) | ||||
|             ) | ||||
| 
 | ||||
|         return balances | ||||
| 
 | ||||
|     async def get_assets( | ||||
|         self, | ||||
|         reload: bool = False, | ||||
| 
 | ||||
|     ) -> dict[str, Asset]: | ||||
|         ''' | ||||
|         Load and cache all asset infos and pack into | ||||
|         our native ``Asset`` struct. | ||||
| 
 | ||||
|         https://docs.kraken.com/rest/#tag/Market-Data/operation/getAssetInfo | ||||
| 
 | ||||
|         return msg: | ||||
|             "asset1": { | ||||
|                 "aclass": "string", | ||||
|                 "altname": "string", | ||||
|                 "decimals": 0, | ||||
|                 "display_decimals": 0, | ||||
|                 "collateral_value": 0, | ||||
|                 "status": "string" | ||||
|             } | ||||
| 
 | ||||
|         ''' | ||||
|         if ( | ||||
|             not self._assets | ||||
|             or reload | ||||
|         ): | ||||
|             resp = await self._public('Assets', {}) | ||||
|             assets: dict[str, dict] = resp['result'] | ||||
| 
 | ||||
|             for bs_mktid, info in assets.items(): | ||||
| 
 | ||||
|                 altname: str = info['altname'] | ||||
|                 aclass: str = info['aclass'] | ||||
|                 asset = Asset( | ||||
|                     name=altname, | ||||
|                     atype=f'crypto_{aclass}', | ||||
|                     tx_tick=digits_to_dec(info['decimals']), | ||||
|                     info=info, | ||||
|                 ) | ||||
|                 # NOTE: yes we keep 2 sets since kraken insists on | ||||
|                 # keeping 3 frickin sets bc apparently they have | ||||
|                 # no sane data engineers whol all like different | ||||
|                 # keys for their fricking symbology sets.. | ||||
|                 self._Assets[bs_mktid] = asset | ||||
|                 self._assets[altname.lower()] = asset | ||||
|                 self._assets[altname] = asset | ||||
| 
 | ||||
|         # we return the "most native" set merged with our preferred | ||||
|         # naming (which i guess is the "altname" one) since that's | ||||
|         # what the symcache loader will be storing, and we need the | ||||
|         # keys that are easiest to match against in any trade | ||||
|         # records. | ||||
|         return self._Assets | self._assets | ||||
| 
 | ||||
|     async def get_trades( | ||||
|         self, | ||||
|         fetch_limit: int | None = None, | ||||
| 
 | ||||
|     ) -> dict[str, Any]: | ||||
|         ''' | ||||
|  | @ -306,11 +225,6 @@ class Client: | |||
|         trades_by_id: dict[str, Any] = {} | ||||
| 
 | ||||
|         for i in itertools.count(): | ||||
|             if ( | ||||
|                 fetch_limit | ||||
|                 and i >= fetch_limit | ||||
|             ): | ||||
|                 break | ||||
| 
 | ||||
|             # increment 'ofs' pagination offset | ||||
|             ofs = i*50 | ||||
|  | @ -322,8 +236,7 @@ class Client: | |||
|             by_id = resp['result']['trades'] | ||||
|             trades_by_id.update(by_id) | ||||
| 
 | ||||
|             # can get up to 50 results per query, see: | ||||
|             # https://docs.kraken.com/rest/#tag/User-Data/operation/getTradeHistory | ||||
|             # we can get up to 50 results per query | ||||
|             if ( | ||||
|                 len(by_id) < 50 | ||||
|             ): | ||||
|  | @ -341,79 +254,6 @@ class Client: | |||
|         assert count == len(trades_by_id.values()) | ||||
|         return trades_by_id | ||||
| 
 | ||||
|     async def get_xfers( | ||||
|         self, | ||||
|         asset: str, | ||||
|         src_asset: str = '', | ||||
| 
 | ||||
|     ) -> dict[str, Transaction]: | ||||
|         ''' | ||||
|         Get asset balance transfer transactions. | ||||
| 
 | ||||
|         Currently only withdrawals are supported. | ||||
| 
 | ||||
|         ''' | ||||
|         resp = await self.endpoint( | ||||
|             'WithdrawStatus', | ||||
|             {'asset': asset}, | ||||
|         ) | ||||
|         try: | ||||
|             xfers: list[dict] = resp['result'] | ||||
|         except KeyError: | ||||
|             log.exception(f'Kraken suxxx: {resp}') | ||||
|             return [] | ||||
| 
 | ||||
|         # eg. resp schema: | ||||
|         # 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset': | ||||
|         #     'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid': | ||||
|         #     'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44', | ||||
|         #     'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z', | ||||
|         #     'amount': '0.00300726', 'fee': '0.00001000', 'time': | ||||
|         #     1658347714, 'status': 'Success'}]} | ||||
| 
 | ||||
|         if xfers: | ||||
|             import tractor | ||||
|             await tractor.pp() | ||||
| 
 | ||||
|         trans: dict[str, Transaction] = {} | ||||
|         for entry in xfers: | ||||
|             # look up the normalized name and asset info | ||||
|             asset_key: str = entry['asset'] | ||||
|             asset: Asset = self._Assets[asset_key] | ||||
|             asset_key: str = asset.name.lower() | ||||
| 
 | ||||
|             # XXX: this is in the asset units (likely) so it isn't | ||||
|             # quite the same as a commisions cost necessarily..) | ||||
|             # TODO: also round this based on `Pair` cost precision info? | ||||
|             cost = float(entry['fee']) | ||||
|             # fqme: str = asset_key + '.kraken' | ||||
| 
 | ||||
|             tx = Transaction( | ||||
|                 fqme=asset_key,  # this must map to an entry in .assets! | ||||
|                 tid=entry['txid'], | ||||
|                 dt=pendulum.from_timestamp(entry['time']), | ||||
|                 bs_mktid=f'{asset_key}{src_asset}', | ||||
|                 size=-1*( | ||||
|                     float(entry['amount']) | ||||
|                     + | ||||
|                     cost | ||||
|                 ), | ||||
|                 # since this will be treated as a "sell" it | ||||
|                 # shouldn't be needed to compute the be price. | ||||
|                 price='NaN', | ||||
| 
 | ||||
|                 # XXX: see note above | ||||
|                 cost=cost, | ||||
| 
 | ||||
|                 # not a trade but a withdrawal or deposit on the | ||||
|                 # asset (chain) system. | ||||
|                 etype='transfer', | ||||
| 
 | ||||
|             ) | ||||
|             trans[tx.tid] = tx | ||||
| 
 | ||||
|         return trans | ||||
| 
 | ||||
|     async def submit_limit( | ||||
|         self, | ||||
|         symbol: str, | ||||
|  | @ -442,7 +282,6 @@ class Client: | |||
|                 "volume": str(size), | ||||
|             } | ||||
|             return await self.endpoint('AddOrder', data) | ||||
| 
 | ||||
|         else: | ||||
|             # Edit order data for kraken api | ||||
|             data["txid"] = reqid | ||||
|  | @ -459,124 +298,61 @@ class Client: | |||
|         # txid is a transaction id given by kraken | ||||
|         return await self.endpoint('CancelOrder', {"txid": reqid}) | ||||
| 
 | ||||
|     async def asset_pairs( | ||||
|     async def symbol_info( | ||||
|         self, | ||||
|         pair_patt: str | None = None, | ||||
|         pair: Optional[str] = None, | ||||
|     ): | ||||
|         if pair is not None: | ||||
|             pairs = {'pair': pair} | ||||
|         else: | ||||
|             pairs = None  # get all pairs | ||||
| 
 | ||||
|     ) -> dict[str, Pair] | Pair: | ||||
|         ''' | ||||
|         Query for a tradeable asset pair (info), or all if no input | ||||
|         pattern is provided. | ||||
|         resp = await self._public('AssetPairs', pairs) | ||||
|         err = resp['error'] | ||||
|         if err: | ||||
|             symbolname = pairs['pair'] if pair else None | ||||
|             raise SymbolNotFound(f'{symbolname}.kraken') | ||||
| 
 | ||||
|         https://docs.kraken.com/rest/#tag/Market-Data/operation/getTradableAssetPairs | ||||
|         pairs = resp['result'] | ||||
| 
 | ||||
|         ''' | ||||
|         if not self._AssetPairs: | ||||
|             # get all pairs by default, or filter | ||||
|             # to whatever pattern is provided as input. | ||||
|             req_pairs: dict[str, str] | None = None | ||||
|             if pair_patt is not None: | ||||
|                 req_pairs = {'pair': pair_patt} | ||||
|         if pair is not None: | ||||
|             _, data = next(iter(pairs.items())) | ||||
|             return data | ||||
|         else: | ||||
|             return pairs | ||||
| 
 | ||||
|             resp = await self._public( | ||||
|                 'AssetPairs', | ||||
|                 req_pairs, | ||||
|             ) | ||||
|             err = resp['error'] | ||||
|             if err: | ||||
|                 raise SymbolNotFound(pair_patt) | ||||
| 
 | ||||
|             # NOTE: we try to key pairs by our custom defined | ||||
|             # `.bs_fqme` field since we want to offer search over | ||||
|             # this pattern set, callers should fill out lookup | ||||
|             # tables for kraken's bs_mktid keys to map to these | ||||
|             # keys! | ||||
|             # XXX: FURTHER kraken's data eng team decided to offer | ||||
|             # 3 frickin market-pair-symbol key sets depending on | ||||
|             # which frickin API is being used. | ||||
|             # Example for the trading pair 'LTC<EUR' | ||||
|             # - the "X-key" from rest eps 'XLTCZEUR' | ||||
|             # - the "websocket key" from ws msgs is 'LTC/EUR' | ||||
|             # - the "altname key" also delivered in pair info is 'LTCEUR' | ||||
|             for xkey, data in resp['result'].items(): | ||||
| 
 | ||||
|                 # NOTE: always cache in pairs tables for faster lookup | ||||
|                 pair = Pair(xname=xkey, **data) | ||||
| 
 | ||||
|                 # register the above `Pair` structs for all | ||||
|                 # key-sets/monikers: a set of 4 (frickin) tables | ||||
|                 # acting as a combined surjection of all possible | ||||
|                 # (and stupid) kraken names to their `Pair` obj. | ||||
|                 self._AssetPairs[xkey] = pair | ||||
|                 self._pairs[pair.bs_fqme] = pair | ||||
|                 self._altnames[pair.altname] = pair | ||||
|                 self._wsnames[pair.wsname] = pair | ||||
| 
 | ||||
|         if pair_patt is not None: | ||||
|             return next(iter(self._pairs.items()))[1] | ||||
| 
 | ||||
|         return self._AssetPairs | ||||
| 
 | ||||
|     async def get_mkt_pairs( | ||||
|     async def cache_symbols( | ||||
|         self, | ||||
|         reload: bool = False, | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Load all market pair info build and cache it for downstream | ||||
|         use. | ||||
|         if not self._pairs: | ||||
|             self._pairs = await self.symbol_info() | ||||
| 
 | ||||
|         Multiple pair info lookup tables (like ``._altnames: | ||||
|         dict[str, str]``) are created for looking up the | ||||
|         piker-native `Pair`-struct from any input of the three | ||||
|         (yes, it's that idiotic..) available symbol/pair-key-sets | ||||
|         that kraken frickin offers depending on the API including | ||||
|         the .altname, .wsname and the weird ass default set they | ||||
|         return in ReST responses .xname.. | ||||
| 
 | ||||
|         ''' | ||||
|         if ( | ||||
|             not self._pairs | ||||
|             or reload | ||||
|         ): | ||||
|             await self.asset_pairs() | ||||
| 
 | ||||
|         return self._AssetPairs | ||||
|         return self._pairs | ||||
| 
 | ||||
|     async def search_symbols( | ||||
|         self, | ||||
|         pattern: str, | ||||
| 
 | ||||
|         limit: int = None, | ||||
|     ) -> dict[str, Any]: | ||||
|         ''' | ||||
|         Search for a symbol by "alt name".. | ||||
|         if self._pairs is not None: | ||||
|             data = self._pairs | ||||
|         else: | ||||
|             data = await self.symbol_info() | ||||
| 
 | ||||
|         It is expected that the ``Client._pairs`` table | ||||
|         gets populated before conducting the underlying fuzzy-search | ||||
|         over the pair-key set. | ||||
| 
 | ||||
|         ''' | ||||
|         if not len(self._pairs): | ||||
|             await self.get_mkt_pairs() | ||||
|             assert self._pairs, '`Client.get_mkt_pairs()` was never called!?' | ||||
| 
 | ||||
|         matches: dict[str, Pair] = match_from_pairs( | ||||
|             pairs=self._pairs, | ||||
|             query=pattern.upper(), | ||||
|         matches = fuzzy.extractBests( | ||||
|             pattern, | ||||
|             data, | ||||
|             score_cutoff=50, | ||||
|         ) | ||||
| 
 | ||||
|         # repack in .altname-keyed output table | ||||
|         return { | ||||
|             pair.altname: pair | ||||
|             for pair in matches.values() | ||||
|         } | ||||
|         # repack in dict form | ||||
|         return {item[0]['altname']: item[0] for item in matches} | ||||
| 
 | ||||
|     async def bars( | ||||
|         self, | ||||
|         symbol: str = 'XBTUSD', | ||||
| 
 | ||||
|         # UTC 2017-07-02 12:53:20 | ||||
|         since: Union[int, datetime] | None = None, | ||||
|         since: Optional[Union[int, datetime]] = None, | ||||
|         count: int = 720,  # <- max allowed per query | ||||
|         as_np: bool = True, | ||||
| 
 | ||||
|  | @ -630,11 +406,11 @@ class Client: | |||
|                 new_bars.append( | ||||
|                     (i,) + tuple( | ||||
|                         ftype(bar[j]) for j, (name, ftype) in enumerate( | ||||
|                             def_iohlcv_fields[1:] | ||||
|                             _ohlc_dtype[1:] | ||||
|                         ) | ||||
|                     ) | ||||
|                 ) | ||||
|             array = np.array(new_bars, dtype=def_iohlcv_fields) if as_np else bars | ||||
|             array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars | ||||
|             return array | ||||
|         except KeyError: | ||||
|             errmsg = json['error'][0] | ||||
|  | @ -648,56 +424,45 @@ class Client: | |||
|             else: | ||||
|                 raise BrokerError(errmsg) | ||||
| 
 | ||||
|     @classmethod | ||||
|     def to_bs_fqme( | ||||
|         cls, | ||||
|         pair_str: str | ||||
|     ) -> str: | ||||
|         ''' | ||||
|         Normalize symbol names to to a 3x3 pair from the global | ||||
|         definition map which we build out from the data retreived from | ||||
|         the 'AssetPairs' endpoint, see methods above. | ||||
| 
 | ||||
|         ''' | ||||
|         try: | ||||
|             return cls._altnames[pair_str.upper()].bs_fqme | ||||
|         except KeyError as ke: | ||||
|             raise SymbolNotFound(f'kraken has no {ke.args[0]}') | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_client() -> Client: | ||||
| 
 | ||||
|     conf: dict[str, Any] = get_config() | ||||
|     async with httpx.AsyncClient( | ||||
|         base_url=_url, | ||||
|         headers=_headers, | ||||
|     section = get_config() | ||||
|     if section: | ||||
|         client = Client( | ||||
|             name=section['key_descr'], | ||||
|             api_key=section['api_key'], | ||||
|             secret=section['secret'] | ||||
|         ) | ||||
|     else: | ||||
|         client = Client() | ||||
| 
 | ||||
|         # TODO: is there a way to numerate this? | ||||
|         # https://www.python-httpx.org/advanced/clients/#why-use-a-client | ||||
|         # connections=4 | ||||
|     ) as trio_client: | ||||
|         if conf: | ||||
|             client = Client( | ||||
|                 conf, | ||||
|                 httpx_client=trio_client, | ||||
|     # at startup, load all symbols locally for fast search | ||||
|     await client.cache_symbols() | ||||
| 
 | ||||
|                 # TODO: don't break these up and just do internal | ||||
|                 # conf lookups instead.. | ||||
|                 name=conf['key_descr'], | ||||
|                 api_key=conf['api_key'], | ||||
|                 secret=conf['secret'] | ||||
|             ) | ||||
|         else: | ||||
|             client = Client( | ||||
|                 conf={}, | ||||
|                 httpx_client=trio_client, | ||||
|             ) | ||||
|     yield client | ||||
| 
 | ||||
|         # at startup, load all symbols, and asset info in | ||||
|         # batch requests. | ||||
|         async with trio.open_nursery() as nurse: | ||||
|             nurse.start_soon(client.get_assets) | ||||
|             await client.get_mkt_pairs() | ||||
| 
 | ||||
|         yield client | ||||
| def normalize_symbol( | ||||
|     ticker: str | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Normalize symbol names to to a 3x3 pair. | ||||
| 
 | ||||
|     ''' | ||||
|     remap = { | ||||
|         'XXBTZEUR': 'XBTEUR', | ||||
|         'XXMRZEUR': 'XMREUR', | ||||
| 
 | ||||
|         # ws versions? pretty weird.. | ||||
|         'XBT/EUR': 'XBTEUR', | ||||
|         'XMR/EUR': 'XMREUR', | ||||
|     } | ||||
|     symlen = len(ticker) | ||||
|     if symlen != 6: | ||||
|         ticker = remap[ticker] | ||||
|     else: | ||||
|         raise ValueError(f'Unhandled symbol: {ticker}') | ||||
| 
 | ||||
|     return ticker.lower() | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -18,65 +18,73 @@ | |||
| Real-time and historical data feed endpoints. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     aclosing, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from dataclasses import asdict | ||||
| from datetime import datetime | ||||
| from typing import ( | ||||
|     AsyncGenerator, | ||||
|     Callable, | ||||
|     Any, | ||||
|     Optional, | ||||
|     Callable, | ||||
| ) | ||||
| import time | ||||
| 
 | ||||
| from fuzzywuzzy import process as fuzzy | ||||
| import numpy as np | ||||
| import pendulum | ||||
| from trio_typing import TaskStatus | ||||
| import tractor | ||||
| import trio | ||||
| import wsproto | ||||
| 
 | ||||
| from piker.accounting._mktinfo import ( | ||||
|     MktPair, | ||||
| ) | ||||
| from piker.brokers import ( | ||||
|     open_cached_client, | ||||
| ) | ||||
| from piker._cacheables import open_cached_client | ||||
| from piker.brokers._util import ( | ||||
|     BrokerError, | ||||
|     DataThrottle, | ||||
|     DataUnavailable, | ||||
| ) | ||||
| from piker.types import Struct | ||||
| from piker.data.validate import FeedInit | ||||
| from piker.log import get_console_log | ||||
| from piker.data import ShmArray | ||||
| from piker.data.types import Struct | ||||
| from piker.data._web_bs import open_autorecon_ws, NoBsWs | ||||
| from . import log | ||||
| from .api import ( | ||||
|     log, | ||||
|     Client, | ||||
|     OHLC, | ||||
| ) | ||||
| from .symbols import get_mkt_info | ||||
| 
 | ||||
| 
 | ||||
| class OHLC(Struct, frozen=True): | ||||
|     ''' | ||||
|     Description of the flattened OHLC quote format. | ||||
| # https://www.kraken.com/features/api#get-tradable-pairs | ||||
| class Pair(Struct): | ||||
|     altname: str  # alternate pair name | ||||
|     wsname: str  # WebSocket pair name (if available) | ||||
|     aclass_base: str  # asset class of base component | ||||
|     base: str  # asset id of base component | ||||
|     aclass_quote: str  # asset class of quote component | ||||
|     quote: str  # asset id of quote component | ||||
|     lot: str  # volume lot size | ||||
| 
 | ||||
|     For schema details see: | ||||
|         https://docs.kraken.com/websockets/#message-ohlc | ||||
|     pair_decimals: int  # scaling decimal places for pair | ||||
|     lot_decimals: int  # scaling decimal places for volume | ||||
| 
 | ||||
|     ''' | ||||
|     chan_id: int  # internal kraken id | ||||
|     chan_name: str  # eg. ohlc-1  (name-interval) | ||||
|     pair: str  # fx pair | ||||
|     # amount to multiply lot volume by to get currency volume | ||||
|     lot_multiplier: float | ||||
| 
 | ||||
|     # unpacked from array | ||||
|     time: float  # Begin time of interval, in seconds since epoch | ||||
|     etime: float  # End time of interval, in seconds since epoch | ||||
|     open: float  # Open price of interval | ||||
|     high: float  # High price within interval | ||||
|     low: float  # Low price within interval | ||||
|     close: float  # Close price of interval | ||||
|     vwap: float  # Volume weighted average price within interval | ||||
|     volume: float  # Accumulated volume **within interval** | ||||
|     count: int  # Number of trades within interval | ||||
|     # array of leverage amounts available when buying | ||||
|     leverage_buy: list[int] | ||||
|     # array of leverage amounts available when selling | ||||
|     leverage_sell: list[int] | ||||
| 
 | ||||
|     # fee schedule array in [volume, percent fee] tuples | ||||
|     fees: list[tuple[int, float]] | ||||
| 
 | ||||
|     # maker fee schedule array in [volume, percent fee] tuples (if on | ||||
|     # maker/taker) | ||||
|     fees_maker: list[tuple[int, float]] | ||||
| 
 | ||||
|     fee_volume_currency: str  # volume discount currency | ||||
|     margin_call: str  # margin call level | ||||
|     margin_stop: str  # stop-out/liquidation margin level | ||||
|     ordermin: float  # minimum order volume for pair | ||||
| 
 | ||||
| 
 | ||||
| async def stream_messages( | ||||
|  | @ -89,11 +97,29 @@ async def stream_messages( | |||
|     though a single async generator. | ||||
| 
 | ||||
|     ''' | ||||
|     last_hb: float = 0 | ||||
|     too_slow_count = last_hb = 0 | ||||
| 
 | ||||
|     while True: | ||||
| 
 | ||||
|         with trio.move_on_after(5) as cs: | ||||
|             msg = await ws.recv_msg() | ||||
| 
 | ||||
|         # trigger reconnection if heartbeat is laggy | ||||
|         if cs.cancelled_caught: | ||||
| 
 | ||||
|             too_slow_count += 1 | ||||
| 
 | ||||
|             if too_slow_count > 20: | ||||
|                 log.warning( | ||||
|                     "Heartbeat is too slow, resetting ws connection") | ||||
| 
 | ||||
|                 await ws._connect() | ||||
|                 too_slow_count = 0 | ||||
|                 continue | ||||
| 
 | ||||
|         if isinstance(msg, dict): | ||||
|             if msg.get('event') == 'heartbeat': | ||||
| 
 | ||||
|     async for msg in ws: | ||||
|         match msg: | ||||
|             case {'event': 'heartbeat'}: | ||||
|                 now = time.time() | ||||
|                 delay = now - last_hb | ||||
|                 last_hb = now | ||||
|  | @ -104,9 +130,11 @@ async def stream_messages( | |||
| 
 | ||||
|                 continue | ||||
| 
 | ||||
|             case _: | ||||
|                 # passthrough sub msgs | ||||
|                 yield msg | ||||
|             err = msg.get('errorMessage') | ||||
|             if err: | ||||
|                 raise BrokerError(err) | ||||
|         else: | ||||
|             yield msg | ||||
| 
 | ||||
| 
 | ||||
| async def process_data_feed_msgs( | ||||
|  | @ -116,99 +144,81 @@ async def process_data_feed_msgs( | |||
|     Parse and pack data feed messages. | ||||
| 
 | ||||
|     ''' | ||||
|     async with aclosing(stream_messages(ws)) as ws_stream: | ||||
|         async for msg in ws_stream: | ||||
|             match msg: | ||||
|                 case { | ||||
|                     'errorMessage': errmsg | ||||
|                 }: | ||||
|                     raise BrokerError(errmsg) | ||||
|     async for msg in stream_messages(ws): | ||||
| 
 | ||||
|                 case { | ||||
|                     'event': 'subscriptionStatus', | ||||
|                 } as sub: | ||||
|                     log.info( | ||||
|                         'WS subscription is active:\n' | ||||
|                         f'{sub}' | ||||
|                     ) | ||||
|                     continue | ||||
|         chan_id, *payload_array, chan_name, pair = msg | ||||
| 
 | ||||
|                 case [ | ||||
|                     chan_id, | ||||
|                     *payload_array, | ||||
|                     chan_name, | ||||
|                     pair | ||||
|                 ]: | ||||
|                     if 'ohlc' in chan_name: | ||||
|                         array: list = payload_array[0] | ||||
|                         ohlc = OHLC( | ||||
|                             chan_id, | ||||
|                             chan_name, | ||||
|                             pair, | ||||
|                             *map(float, array[:-1]), | ||||
|                             count=array[-1], | ||||
|                         ) | ||||
|                         yield 'ohlc', ohlc.copy() | ||||
|         if 'ohlc' in chan_name: | ||||
| 
 | ||||
|                     elif 'spread' in chan_name: | ||||
|             yield 'ohlc', OHLC(chan_id, chan_name, pair, *payload_array[0]) | ||||
| 
 | ||||
|                         bid, ask, ts, bsize, asize = map( | ||||
|                             float, payload_array[0]) | ||||
|         elif 'spread' in chan_name: | ||||
| 
 | ||||
|                         # TODO: really makes you think IB has a horrible API... | ||||
|                         quote = { | ||||
|                             'symbol': pair.replace('/', ''), | ||||
|                             'ticks': [ | ||||
|                                 {'type': 'bid', 'price': bid, 'size': bsize}, | ||||
|                                 {'type': 'bsize', 'price': bid, 'size': bsize}, | ||||
|             bid, ask, ts, bsize, asize = map(float, payload_array[0]) | ||||
| 
 | ||||
|                                 {'type': 'ask', 'price': ask, 'size': asize}, | ||||
|                                 {'type': 'asize', 'price': ask, 'size': asize}, | ||||
|                             ], | ||||
|                         } | ||||
|                         yield 'l1', quote | ||||
|             # TODO: really makes you think IB has a horrible API... | ||||
|             quote = { | ||||
|                 'symbol': pair.replace('/', ''), | ||||
|                 'ticks': [ | ||||
|                     {'type': 'bid', 'price': bid, 'size': bsize}, | ||||
|                     {'type': 'bsize', 'price': bid, 'size': bsize}, | ||||
| 
 | ||||
|                     # elif 'book' in msg[-2]: | ||||
|                     #     chan_id, *payload_array, chan_name, pair = msg | ||||
|                     #     print(msg) | ||||
|                     {'type': 'ask', 'price': ask, 'size': asize}, | ||||
|                     {'type': 'asize', 'price': ask, 'size': asize}, | ||||
|                 ], | ||||
|             } | ||||
|             yield 'l1', quote | ||||
| 
 | ||||
|                 case { | ||||
|                     'connectionID': conid, | ||||
|                     'event': 'systemStatus', | ||||
|                     'status': 'online', | ||||
|                     'version': ver, | ||||
|                 }: | ||||
|                     log.info( | ||||
|                         f'Established {ver} ws connection with id: {conid}' | ||||
|                     ) | ||||
|                     continue | ||||
|         # elif 'book' in msg[-2]: | ||||
|         #     chan_id, *payload_array, chan_name, pair = msg | ||||
|         #     print(msg) | ||||
| 
 | ||||
|                 case _: | ||||
|                     print(f'UNHANDLED MSG: {msg}') | ||||
|                     # yield msg | ||||
|         else: | ||||
|             print(f'UNHANDLED MSG: {msg}') | ||||
|             yield msg | ||||
| 
 | ||||
| 
 | ||||
| def normalize(ohlc: OHLC) -> dict: | ||||
|     ''' | ||||
|     Norm an `OHLC` msg to piker's minimal (live-)quote schema. | ||||
| def normalize( | ||||
|     ohlc: OHLC, | ||||
| 
 | ||||
|     ''' | ||||
|     quote = ohlc.to_dict() | ||||
| ) -> dict: | ||||
|     quote = asdict(ohlc) | ||||
|     quote['broker_ts'] = quote['time'] | ||||
|     quote['brokerd_ts'] = time.time() | ||||
|     quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '') | ||||
|     quote['last'] = quote['close'] | ||||
|     quote['bar_wap'] = ohlc.vwap | ||||
|     return quote | ||||
| 
 | ||||
|     # seriously eh? what's with this non-symmetry everywhere | ||||
|     # in subscription systems... | ||||
|     # XXX: piker style is always lowercases symbols. | ||||
|     topic = quote['pair'].replace('/', '').lower() | ||||
| 
 | ||||
|     # print(quote) | ||||
|     return topic, quote | ||||
| 
 | ||||
| 
 | ||||
| def make_sub(pairs: list[str], data: dict[str, Any]) -> dict[str, str]: | ||||
|     ''' | ||||
|     Create a request subscription packet dict. | ||||
| 
 | ||||
|     https://docs.kraken.com/websockets/#message-subscribe | ||||
| 
 | ||||
|     ''' | ||||
|     # eg. specific logic for this in kraken's sync client: | ||||
|     # https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188 | ||||
|     return { | ||||
|         'pair': pairs, | ||||
|         'event': 'subscribe', | ||||
|         'subscription': data, | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_history_client( | ||||
|     mkt: MktPair, | ||||
|     symbol: str, | ||||
| 
 | ||||
| ) -> AsyncGenerator[Callable, None]: | ||||
| 
 | ||||
|     symbol: str = mkt.bs_mktid | ||||
| ) -> tuple[Callable, int]: | ||||
| 
 | ||||
|     # TODO implement history getter for the new storage layer. | ||||
|     async with open_cached_client('kraken') as client: | ||||
|  | @ -219,7 +229,6 @@ async def open_history_client( | |||
|         queries: int = 0 | ||||
| 
 | ||||
|         async def get_ohlc( | ||||
|             timeframe: float, | ||||
|             end_dt: Optional[datetime] = None, | ||||
|             start_dt: Optional[datetime] = None, | ||||
| 
 | ||||
|  | @ -230,12 +239,8 @@ async def open_history_client( | |||
|         ]: | ||||
| 
 | ||||
|             nonlocal queries | ||||
|             if ( | ||||
|                 queries > 0 | ||||
|                 or timeframe != 60 | ||||
|             ): | ||||
|                 raise DataUnavailable( | ||||
|                     'Only a single query for 1m bars supported') | ||||
|             if queries > 0: | ||||
|                 raise DataUnavailable | ||||
| 
 | ||||
|             count = 0 | ||||
|             while count <= 3: | ||||
|  | @ -258,6 +263,24 @@ async def open_history_client( | |||
|         yield get_ohlc, {'erlangs': 1, 'rate': 1} | ||||
| 
 | ||||
| 
 | ||||
| async def backfill_bars( | ||||
| 
 | ||||
|     sym: str, | ||||
|     shm: ShmArray,  # type: ignore # noqa | ||||
|     count: int = 10,  # NOTE: any more and we'll overrun the underlying buffer | ||||
|     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Fill historical bars into shared mem / storage afap. | ||||
|     ''' | ||||
|     with trio.CancelScope() as cs: | ||||
|         async with open_cached_client('kraken') as client: | ||||
|             bars = await client.bars(symbol=sym) | ||||
|             shm.push(bars) | ||||
|             task_status.started(cs) | ||||
| 
 | ||||
| 
 | ||||
| async def stream_quotes( | ||||
| 
 | ||||
|     send_chan: trio.abc.SendChannel, | ||||
|  | @ -278,36 +301,50 @@ async def stream_quotes( | |||
|     ``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>. | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging | ||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) | ||||
| 
 | ||||
|     ws_pairs: list[str] = [] | ||||
|     init_msgs: list[FeedInit] = [] | ||||
|     ws_pairs = {} | ||||
|     sym_infos = {} | ||||
| 
 | ||||
|     async with ( | ||||
|         send_chan as send_chan, | ||||
|     ): | ||||
|         for sym_str in symbols: | ||||
|             mkt, pair = await get_mkt_info(sym_str) | ||||
|             init_msgs.append( | ||||
|                 FeedInit(mkt_info=mkt) | ||||
|             ) | ||||
|     async with open_cached_client('kraken') as client, send_chan as send_chan: | ||||
| 
 | ||||
|             ws_pairs.append(pair.wsname) | ||||
|         # keep client cached for real-time section | ||||
|         for sym in symbols: | ||||
| 
 | ||||
|             # transform to upper since piker style is always lower | ||||
|             sym = sym.upper() | ||||
| 
 | ||||
|             si = Pair(**await client.symbol_info(sym))  # validation | ||||
|             syminfo = si.to_dict() | ||||
|             syminfo['price_tick_size'] = 1 / 10**si.pair_decimals | ||||
|             syminfo['lot_tick_size'] = 1 / 10**si.lot_decimals | ||||
|             syminfo['asset_type'] = 'crypto' | ||||
|             sym_infos[sym] = syminfo | ||||
|             ws_pairs[sym] = si.wsname | ||||
| 
 | ||||
|         symbol = symbols[0].lower() | ||||
| 
 | ||||
|         init_msgs = { | ||||
|             # pass back token, and bool, signalling if we're the writer | ||||
|             # and that history has been written | ||||
|             symbol: { | ||||
|                 'symbol_info': sym_infos[sym], | ||||
|                 'shm_write_opts': {'sum_tick_vml': False}, | ||||
|                 'fqsn': sym, | ||||
|             }, | ||||
|         } | ||||
| 
 | ||||
|         @acm | ||||
|         async def subscribe(ws: NoBsWs): | ||||
| 
 | ||||
|         async def subscribe(ws: wsproto.WSConnection): | ||||
|             # XXX: setup subs | ||||
|             # https://docs.kraken.com/websockets/#message-subscribe | ||||
|             # specific logic for this in kraken's sync client: | ||||
|             # specific logic for this in kraken's shitty sync client: | ||||
|             # https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188 | ||||
|             ohlc_sub = { | ||||
|                 'event': 'subscribe', | ||||
|                 'pair': ws_pairs, | ||||
|                 'subscription': { | ||||
|                     'name': 'ohlc', | ||||
|                     'interval': 1, | ||||
|                 }, | ||||
|             } | ||||
|             ohlc_sub = make_sub( | ||||
|                 list(ws_pairs.values()), | ||||
|                 {'name': 'ohlc', 'interval': 1} | ||||
|             ) | ||||
| 
 | ||||
|             # TODO: we want to eventually allow unsubs which should | ||||
|             # be completely fine to request from a separate task | ||||
|  | @ -316,14 +353,10 @@ async def stream_quotes( | |||
|             await ws.send_msg(ohlc_sub) | ||||
| 
 | ||||
|             # trade data (aka L1) | ||||
|             l1_sub = { | ||||
|                 'event': 'subscribe', | ||||
|                 'pair': ws_pairs, | ||||
|                 'subscription': { | ||||
|                     'name': 'spread', | ||||
|                     # 'depth': 10} | ||||
|                 }, | ||||
|             } | ||||
|             l1_sub = make_sub( | ||||
|                 list(ws_pairs.values()), | ||||
|                 {'name': 'spread'}  # 'depth': 10} | ||||
|             ) | ||||
| 
 | ||||
|             # pull a first quote and deliver | ||||
|             await ws.send_msg(l1_sub) | ||||
|  | @ -331,85 +364,101 @@ async def stream_quotes( | |||
|             yield | ||||
| 
 | ||||
|             # unsub from all pairs on teardown | ||||
|             if ws.connected(): | ||||
|                 await ws.send_msg({ | ||||
|                     'pair': ws_pairs, | ||||
|                     'event': 'unsubscribe', | ||||
|                     'subscription': ['ohlc', 'spread'], | ||||
|                 }) | ||||
|             await ws.send_msg({ | ||||
|                 'pair': list(ws_pairs.values()), | ||||
|                 'event': 'unsubscribe', | ||||
|                 'subscription': ['ohlc', 'spread'], | ||||
|             }) | ||||
| 
 | ||||
|                 # XXX: do we need to ack the unsub? | ||||
|                 # await ws.recv_msg() | ||||
|             # XXX: do we need to ack the unsub? | ||||
|             # await ws.recv_msg() | ||||
| 
 | ||||
|         # see the tips on reconnection logic: | ||||
|         # https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds | ||||
|         ws: NoBsWs | ||||
|         async with ( | ||||
|             open_autorecon_ws( | ||||
|                 'wss://ws.kraken.com/', | ||||
|                 fixture=subscribe, | ||||
|                 reset_after=20, | ||||
|             ) as ws, | ||||
|         async with open_autorecon_ws( | ||||
|             'wss://ws.kraken.com/', | ||||
|             fixture=subscribe, | ||||
|         ) as ws: | ||||
| 
 | ||||
|             # avoid stream-gen closure from breaking trio.. | ||||
|             # NOTE: not sure this actually works XD particularly | ||||
|             # if we call `ws._connect()` manally in the streaming | ||||
|             # async gen.. | ||||
|             aclosing(process_data_feed_msgs(ws)) as msg_gen, | ||||
|         ): | ||||
|             # pull a first quote and deliver | ||||
|             typ, ohlc_last = await anext(msg_gen) | ||||
|             quote = normalize(ohlc_last) | ||||
|             msg_gen = process_data_feed_msgs(ws) | ||||
| 
 | ||||
|             # TODO: use ``anext()`` when it lands in 3.10! | ||||
|             typ, ohlc_last = await msg_gen.__anext__() | ||||
| 
 | ||||
|             topic, quote = normalize(ohlc_last) | ||||
| 
 | ||||
|             task_status.started((init_msgs,  quote)) | ||||
| 
 | ||||
|             # lol, only "closes" when they're margin squeezing clients ;P | ||||
|             feed_is_live.set() | ||||
| 
 | ||||
|             # keep start of last interval for volume tracking | ||||
|             last_interval_start: float = ohlc_last.etime | ||||
|             last_interval_start = ohlc_last.etime | ||||
| 
 | ||||
|             # start streaming | ||||
|             topic: str = mkt.bs_fqme | ||||
|             async for typ, quote in msg_gen: | ||||
|                 match typ: | ||||
|             async for typ, ohlc in msg_gen: | ||||
| 
 | ||||
|                 if typ == 'ohlc': | ||||
| 
 | ||||
|                     # TODO: can get rid of all this by using | ||||
|                     # ``trades`` subscription..? Not sure why this | ||||
|                     # wasn't used originally? (music queues) zoltannn.. | ||||
|                     # https://docs.kraken.com/websockets/#message-trade | ||||
|                     case 'ohlc': | ||||
|                         # generate tick values to match time & sales pane: | ||||
|                         # https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m | ||||
|                         volume = quote.volume | ||||
|                     # ``trades`` subscription... | ||||
| 
 | ||||
|                         # new OHLC sample interval | ||||
|                         if quote.etime > last_interval_start: | ||||
|                             last_interval_start: float = quote.etime | ||||
|                             tick_volume: float = volume | ||||
|                     # generate tick values to match time & sales pane: | ||||
|                     # https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m | ||||
|                     volume = ohlc.volume | ||||
| 
 | ||||
|                         else: | ||||
|                             # this is the tick volume *within the interval* | ||||
|                             tick_volume: float = volume - ohlc_last.volume | ||||
|                     # new OHLC sample interval | ||||
|                     if ohlc.etime > last_interval_start: | ||||
|                         last_interval_start = ohlc.etime | ||||
|                         tick_volume = volume | ||||
| 
 | ||||
|                         ohlc_last = quote | ||||
|                         last = quote.close | ||||
|                     else: | ||||
|                         # this is the tick volume *within the interval* | ||||
|                         tick_volume = volume - ohlc_last.volume | ||||
| 
 | ||||
|                         quote = normalize(quote) | ||||
|                         ticks = quote.setdefault( | ||||
|                             'ticks', | ||||
|                             [], | ||||
|                         ) | ||||
|                         if tick_volume: | ||||
|                             ticks.append({ | ||||
|                                 'type': 'trade', | ||||
|                                 'price': last, | ||||
|                                 'size': tick_volume, | ||||
|                             }) | ||||
|                     ohlc_last = ohlc | ||||
|                     last = ohlc.close | ||||
| 
 | ||||
|                     case 'l1': | ||||
|                         # passthrough quote msg | ||||
|                         pass | ||||
|                     if tick_volume: | ||||
|                         ohlc.ticks.append({ | ||||
|                             'type': 'trade', | ||||
|                             'price': last, | ||||
|                             'size': tick_volume, | ||||
|                         }) | ||||
| 
 | ||||
|                     case _: | ||||
|                         log.warning(f'Unknown WSS message: {typ}, {quote}') | ||||
|                     topic, quote = normalize(ohlc) | ||||
| 
 | ||||
|                 elif typ == 'l1': | ||||
|                     quote = ohlc | ||||
|                     topic = quote['symbol'].lower() | ||||
| 
 | ||||
|                 await send_chan.send({topic: quote}) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_symbol_search( | ||||
|     ctx: tractor.Context, | ||||
| 
 | ||||
| ) -> Client: | ||||
|     async with open_cached_client('kraken') as client: | ||||
| 
 | ||||
|         # load all symbols locally for fast search | ||||
|         cache = await client.cache_symbols() | ||||
|         await ctx.started(cache) | ||||
| 
 | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             async for pattern in stream: | ||||
| 
 | ||||
|                 matches = fuzzy.extractBests( | ||||
|                     pattern, | ||||
|                     cache, | ||||
|                     score_cutoff=50, | ||||
|                 ) | ||||
|                 # repack in dict form | ||||
|                 await stream.send( | ||||
|                     {item[0]['altname']: item[0] | ||||
|                      for item in matches} | ||||
|                 ) | ||||
|  |  | |||
|  | @ -1,269 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Trade transaction accounting and normalization. | ||||
| 
 | ||||
| ''' | ||||
| import math | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
| ) | ||||
| 
 | ||||
| import pendulum | ||||
| 
 | ||||
| from piker.accounting import ( | ||||
|     Transaction, | ||||
|     Position, | ||||
|     Account, | ||||
|     get_likely_pair, | ||||
|     TransactionLedger, | ||||
|     # MktPair, | ||||
| ) | ||||
| from piker.types import Struct | ||||
| from piker.data import ( | ||||
|     SymbologyCache, | ||||
| ) | ||||
| from .api import ( | ||||
|     log, | ||||
|     Client, | ||||
|     Pair, | ||||
| ) | ||||
| # from .feed import get_mkt_info | ||||
| 
 | ||||
| 
 | ||||
| def norm_trade( | ||||
|     tid: str, | ||||
|     record: dict[str, Any], | ||||
| 
 | ||||
|     # this is the dict that was returned from | ||||
|     # `Client.get_mkt_pairs()` and when running offline ledger | ||||
|     # processing from `.accounting`, this will be the table loaded | ||||
|     # into `SymbologyCache.pairs`. | ||||
|     pairs: dict[str, Struct], | ||||
|     symcache: SymbologyCache | None = None, | ||||
| 
 | ||||
| ) -> Transaction: | ||||
| 
 | ||||
|     size: float = float(record.get('vol')) * { | ||||
|         'buy': 1, | ||||
|         'sell': -1, | ||||
|     }[record['type']] | ||||
| 
 | ||||
|     # NOTE: this value may be either the websocket OR the rest schema | ||||
|     # so we need to detect the key format and then choose the | ||||
|     # correct symbol lookup table to evetually get a ``Pair``.. | ||||
|     # See internals of `Client.asset_pairs()` for deats! | ||||
|     src_pair_key: str = record['pair'] | ||||
| 
 | ||||
|     # XXX: kraken's data engineering is soo bad they require THREE | ||||
|     # different pair schemas (more or less seemingly tied to | ||||
|     # transport-APIs)..LITERALLY they return different market id | ||||
|     # pairs in the ledger endpoints vs. the websocket event subs.. | ||||
|     # lookup pair using appropriately provided tabled depending | ||||
|     # on API-key-schema.. | ||||
|     pair: Pair = pairs[src_pair_key] | ||||
|     fqme: str = pair.bs_fqme.lower() + '.kraken' | ||||
| 
 | ||||
|     return Transaction( | ||||
|         fqme=fqme, | ||||
|         tid=tid, | ||||
|         size=size, | ||||
|         price=float(record['price']), | ||||
|         cost=float(record['fee']), | ||||
|         dt=pendulum.from_timestamp(float(record['time'])), | ||||
|         bs_mktid=pair.bs_mktid, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| async def norm_trade_records( | ||||
|     ledger: dict[str, Any], | ||||
|     client: Client, | ||||
|     api_name_set: str = 'xname', | ||||
| 
 | ||||
| ) -> dict[str, Transaction]: | ||||
|     ''' | ||||
|     Loop through an input ``dict`` of trade records | ||||
|     and convert them to ``Transactions``. | ||||
| 
 | ||||
|     ''' | ||||
|     records: dict[str, Transaction] = {} | ||||
|     for tid, record in ledger.items(): | ||||
| 
 | ||||
|         # manual_fqme: str = f'{bs_mktid.lower()}.kraken' | ||||
|         # mkt: MktPair = (await get_mkt_info(manual_fqme))[0] | ||||
|         # fqme: str = mkt.fqme | ||||
|         # assert fqme == manual_fqme | ||||
|         pairs: dict[str, Pair] = { | ||||
|             'xname': client._AssetPairs, | ||||
|             'wsname': client._wsnames, | ||||
|             'altname': client._altnames, | ||||
|         }[api_name_set] | ||||
| 
 | ||||
|         records[tid] = norm_trade( | ||||
|             tid, | ||||
|             record, | ||||
|             pairs=pairs, | ||||
|         ) | ||||
| 
 | ||||
|     return records | ||||
| 
 | ||||
| 
 | ||||
| def has_pp( | ||||
|     acnt: Account, | ||||
|     src_fiat: str, | ||||
|     dst: str, | ||||
|     size: float, | ||||
| 
 | ||||
| ) -> Position | None: | ||||
| 
 | ||||
|     src2dst: dict[str, str] = {} | ||||
|     for bs_mktid in acnt.pps: | ||||
|         likely_pair = get_likely_pair( | ||||
|             src_fiat, | ||||
|             dst, | ||||
|             bs_mktid, | ||||
|         ) | ||||
|         if likely_pair: | ||||
|             src2dst[src_fiat] = dst | ||||
| 
 | ||||
|     for src, dst in src2dst.items(): | ||||
|         pair: str = f'{dst}{src_fiat}' | ||||
|         pos: Position = acnt.pps.get(pair) | ||||
|         if ( | ||||
|             pos | ||||
|             and math.isclose(pos.size, size) | ||||
|         ): | ||||
|             return pos | ||||
| 
 | ||||
|         elif ( | ||||
|             size == 0 | ||||
|             and pos.size | ||||
|         ): | ||||
|             log.warning( | ||||
|                 f'`kraken` account says you have  a ZERO ' | ||||
|                 f'balance for {bs_mktid}:{pair}\n' | ||||
|                 f'but piker seems to think `{pos.size}`\n' | ||||
|                 'This is likely a discrepancy in piker ' | ||||
|                 'accounting if the above number is' | ||||
|                 "large,' though it's likely to due lack" | ||||
|                 "f tracking xfers fees.." | ||||
|             ) | ||||
|             return pos | ||||
| 
 | ||||
|     return None  # indicate no entry found | ||||
| 
 | ||||
| 
 | ||||
| # TODO: factor most of this "account updating from txns" into the | ||||
| # the `Account` impl so has to provide for hiding the mostly | ||||
| # cross-provider updates from txn sets | ||||
| async def verify_balances( | ||||
|     acnt: Account, | ||||
|     src_fiat: str, | ||||
|     balances: dict[str, float], | ||||
|     client: Client, | ||||
|     ledger: TransactionLedger, | ||||
|     ledger_trans: dict[str, Transaction],  # from toml | ||||
|     api_trans: dict[str, Transaction],  # from API | ||||
| 
 | ||||
|     simulate_pp_update: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|     for dst, size in balances.items(): | ||||
| 
 | ||||
|         # we don't care about tracking positions | ||||
|         # in the user's source fiat currency. | ||||
|         if ( | ||||
|             dst == src_fiat | ||||
|             or not any( | ||||
|                 dst in bs_mktid for bs_mktid in acnt.pps | ||||
|             ) | ||||
|         ): | ||||
|             log.warning( | ||||
|                 f'Skipping balance `{dst}`:{size} for position calcs!' | ||||
|             ) | ||||
|             continue | ||||
| 
 | ||||
|         # we have a balance for which there is no pos entry | ||||
|         # - we have to likely update from the ledger? | ||||
|         if not has_pp(acnt, src_fiat, dst, size): | ||||
|             updated = acnt.update_from_ledger( | ||||
|                 ledger_trans, | ||||
|                 symcache=ledger.symcache, | ||||
|             ) | ||||
|             log.info(f'Updated pps from ledger:\n{pformat(updated)}') | ||||
| 
 | ||||
|             # FIRST try reloading from API records | ||||
|             if ( | ||||
|                 not has_pp(acnt, src_fiat, dst, size) | ||||
|                 and not simulate_pp_update | ||||
|             ): | ||||
|                 acnt.update_from_ledger( | ||||
|                     api_trans, | ||||
|                     symcache=ledger.symcache, | ||||
|                 ) | ||||
| 
 | ||||
|                 # get transfers to make sense of abs | ||||
|                 # balances. | ||||
|                 # NOTE: we do this after ledger and API | ||||
|                 # loading since we might not have an | ||||
|                 # entry in the | ||||
|                 # ``account.kraken.spot.toml`` for the | ||||
|                 # necessary pair yet and thus this | ||||
|                 # likely pair grabber will likely fail. | ||||
|                 if not has_pp(acnt, src_fiat, dst, size): | ||||
|                     for bs_mktid in acnt.pps: | ||||
|                         likely_pair: str | None = get_likely_pair( | ||||
|                             src_fiat, | ||||
|                             dst, | ||||
|                             bs_mktid, | ||||
|                         ) | ||||
|                         if likely_pair: | ||||
|                             break | ||||
|                     else: | ||||
|                         raise ValueError( | ||||
|                             'Could not find a position pair in ' | ||||
|                             'ledger for likely widthdrawal ' | ||||
|                             f'candidate: {dst}' | ||||
|                         ) | ||||
| 
 | ||||
|                     # this was likely pos that had a withdrawal | ||||
|                     # from the dst asset out of the account. | ||||
|                     if likely_pair: | ||||
|                         xfer_trans = await client.get_xfers( | ||||
|                             dst, | ||||
| 
 | ||||
|                             # TODO: not all src assets are | ||||
|                             # 3 chars long... | ||||
|                             src_asset=likely_pair[3:], | ||||
|                         ) | ||||
|                         if xfer_trans: | ||||
|                             updated = acnt.update_from_ledger( | ||||
|                                 xfer_trans, | ||||
|                                 cost_scalar=1, | ||||
|                                 symcache=ledger.symcache, | ||||
|                             ) | ||||
|                             log.info( | ||||
|                                 f'Updated {dst} from transfers:\n' | ||||
|                                 f'{pformat(updated)}' | ||||
|                             ) | ||||
| 
 | ||||
|                 if has_pp(acnt, src_fiat, dst, size): | ||||
|                     raise ValueError( | ||||
|                         'Could not reproduce balance:\n' | ||||
|                         f'dst: {dst}, {size}\n' | ||||
|                     ) | ||||
|  | @ -1,206 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Symbology defs and search. | ||||
| 
 | ||||
| ''' | ||||
| from decimal import Decimal | ||||
| 
 | ||||
| import tractor | ||||
| from rapidfuzz import process as fuzzy | ||||
| 
 | ||||
| from piker._cacheables import ( | ||||
|     async_lifo_cache, | ||||
| ) | ||||
| from piker.accounting._mktinfo import ( | ||||
|     digits_to_dec, | ||||
| ) | ||||
| from piker.brokers import ( | ||||
|     open_cached_client, | ||||
|     SymbolNotFound, | ||||
| ) | ||||
| from piker.types import Struct | ||||
| from piker.accounting._mktinfo import ( | ||||
|     Asset, | ||||
|     MktPair, | ||||
|     unpack_fqme, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # https://www.kraken.com/features/api#get-tradable-pairs | ||||
| class Pair(Struct): | ||||
|     xname: str  # idiotic bs_mktid equiv i guess? | ||||
|     altname: str  # alternate pair name | ||||
|     wsname: str  # WebSocket pair name (if available) | ||||
|     aclass_base: str  # asset class of base component | ||||
|     base: str  # asset id of base component | ||||
|     aclass_quote: str  # asset class of quote component | ||||
|     quote: str  # asset id of quote component | ||||
|     lot: str  # volume lot size | ||||
| 
 | ||||
|     cost_decimals: int | ||||
|     costmin: float | ||||
|     pair_decimals: int  # scaling decimal places for pair | ||||
|     lot_decimals: int  # scaling decimal places for volume | ||||
| 
 | ||||
|     # amount to multiply lot volume by to get currency volume | ||||
|     lot_multiplier: float | ||||
| 
 | ||||
|     # array of leverage amounts available when buying | ||||
|     leverage_buy: list[int] | ||||
|     # array of leverage amounts available when selling | ||||
|     leverage_sell: list[int] | ||||
| 
 | ||||
|     # fee schedule array in [volume, percent fee] tuples | ||||
|     fees: list[tuple[int, float]] | ||||
| 
 | ||||
|     # maker fee schedule array in [volume, percent fee] tuples (if on | ||||
|     # maker/taker) | ||||
|     fees_maker: list[tuple[int, float]] | ||||
| 
 | ||||
|     fee_volume_currency: str  # volume discount currency | ||||
|     margin_call: str  # margin call level | ||||
|     margin_stop: str  # stop-out/liquidation margin level | ||||
|     ordermin: float  # minimum order volume for pair | ||||
|     tick_size: float  # min price step size | ||||
|     status: str | ||||
| 
 | ||||
|     short_position_limit: float = 0 | ||||
|     long_position_limit: float = float('inf') | ||||
| 
 | ||||
|     # TODO: should we make this a literal NamespacePath ref? | ||||
|     ns_path: str = 'piker.brokers.kraken:Pair' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_mktid(self) -> str: | ||||
|         ''' | ||||
|         Kraken seems to index it's market symbol sets in | ||||
|         transaction ledgers using the key returned from rest | ||||
|         queries.. so use that since apparently they can't | ||||
|         make up their minds on a better key set XD | ||||
| 
 | ||||
|         ''' | ||||
|         return self.xname | ||||
| 
 | ||||
|     @property | ||||
|     def price_tick(self) -> Decimal: | ||||
|         return digits_to_dec(self.pair_decimals) | ||||
| 
 | ||||
|     @property | ||||
|     def size_tick(self) -> Decimal: | ||||
|         return digits_to_dec(self.lot_decimals) | ||||
| 
 | ||||
|     @property | ||||
|     def bs_dst_asset(self) -> str: | ||||
|         dst, _ = self.wsname.split('/') | ||||
|         return dst | ||||
| 
 | ||||
|     @property | ||||
|     def bs_src_asset(self) -> str: | ||||
|         _, src = self.wsname.split('/') | ||||
|         return src | ||||
| 
 | ||||
|     @property | ||||
|     def bs_fqme(self) -> str: | ||||
|         ''' | ||||
|         Basically the `.altname` but with special '.' handling and | ||||
|         `.SPOT` suffix appending (for future multi-venue support). | ||||
| 
 | ||||
|         ''' | ||||
|         dst, src = self.wsname.split('/') | ||||
|         # XXX: omg for stupid shite like ETH2.S/ETH.. | ||||
|         dst = dst.replace('.', '-') | ||||
|         return f'{dst}{src}.SPOT' | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_symbol_search(ctx: tractor.Context) -> None: | ||||
|     async with open_cached_client('kraken') as client: | ||||
| 
 | ||||
|         # load all symbols locally for fast search | ||||
|         cache = await client.get_mkt_pairs() | ||||
|         await ctx.started(cache) | ||||
| 
 | ||||
|         async with ctx.open_stream() as stream: | ||||
|             async for pattern in stream: | ||||
|                 await stream.send( | ||||
|                     await client.search_symbols(pattern) | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @async_lifo_cache() | ||||
| async def get_mkt_info( | ||||
|     fqme: str, | ||||
| 
 | ||||
| ) -> tuple[MktPair, Pair]: | ||||
|     ''' | ||||
|     Query for and return a `MktPair` and backend-native `Pair` (or | ||||
|     wtv else) info. | ||||
| 
 | ||||
|     If more then one fqme is provided return a ``dict`` of native | ||||
|     key-strs to `MktPair`s. | ||||
| 
 | ||||
|     ''' | ||||
|     venue: str = 'spot' | ||||
|     expiry: str = '' | ||||
|     if '.kraken' not in fqme: | ||||
|         fqme += '.kraken' | ||||
| 
 | ||||
|     broker, pair, venue, expiry = unpack_fqme(fqme) | ||||
|     venue: str = venue or 'spot' | ||||
| 
 | ||||
|     if venue.lower() != 'spot': | ||||
|         raise SymbolNotFound( | ||||
|             'kraken only supports spot markets right now!\n' | ||||
|             f'{fqme}\n' | ||||
|         ) | ||||
| 
 | ||||
|     async with open_cached_client('kraken') as client: | ||||
| 
 | ||||
|         # uppercase since kraken bs_mktid is always upper | ||||
|         # bs_fqme, _, broker = fqme.partition('.') | ||||
|         # pair_str: str = bs_fqme.upper() | ||||
|         pair_str: str = f'{pair}.{venue}' | ||||
| 
 | ||||
|         pair: Pair | None = client._pairs.get(pair_str.upper()) | ||||
|         if not pair: | ||||
|             bs_fqme: str = client.to_bs_fqme(pair_str) | ||||
|             pair: Pair = client._pairs[bs_fqme] | ||||
| 
 | ||||
|         if not (assets := client._assets): | ||||
|             assets: dict[str, Asset] = await client.get_assets() | ||||
| 
 | ||||
|         dst_asset: Asset = assets[pair.bs_dst_asset] | ||||
|         src_asset: Asset = assets[pair.bs_src_asset] | ||||
| 
 | ||||
|         mkt = MktPair( | ||||
|             dst=dst_asset, | ||||
|             src=src_asset, | ||||
| 
 | ||||
|             price_tick=pair.price_tick, | ||||
|             size_tick=pair.size_tick, | ||||
|             bs_mktid=pair.bs_mktid, | ||||
| 
 | ||||
|             expiry=expiry, | ||||
|             venue=venue or 'spot', | ||||
| 
 | ||||
|             # TODO: futes | ||||
|             # _atype=_atype, | ||||
| 
 | ||||
|             broker='kraken', | ||||
|         ) | ||||
|         return mkt, pair | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers) | ||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -40,17 +40,13 @@ import wrapt | |||
| import asks | ||||
| 
 | ||||
| from ..calc import humanize, percent_change | ||||
| from . import open_cached_client | ||||
| from piker._cacheables import async_lifo_cache | ||||
| from .._cacheables import open_cached_client, async_lifo_cache | ||||
| from .. import config | ||||
| from ._util import resproc, BrokerError, SymbolNotFound | ||||
| from ..log import ( | ||||
|     colorize_json, | ||||
| ) | ||||
| from ._util import ( | ||||
|     log, | ||||
|     get_console_log, | ||||
| ) | ||||
| from ..log import get_logger, colorize_json, get_console_log | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _use_practice_account = False | ||||
| _refresh_token_ep = 'https://{}login.questrade.com/oauth2/' | ||||
|  |  | |||
|  | @ -27,13 +27,12 @@ from typing import List | |||
| from async_generator import asynccontextmanager | ||||
| import asks | ||||
| 
 | ||||
| from ._util import ( | ||||
|     resproc, | ||||
|     BrokerError, | ||||
|     log, | ||||
| ) | ||||
| from ..log import get_logger | ||||
| from ._util import resproc, BrokerError | ||||
| from ..calc import percent_change | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _service_ep = 'https://api.robinhood.com' | ||||
| 
 | ||||
| 
 | ||||
|  | @ -66,10 +65,8 @@ class Client: | |||
|         self.api = _API(self._sess) | ||||
| 
 | ||||
|     def _zip_in_order(self, symbols: [str], quotes: List[dict]): | ||||
|         return { | ||||
|             quote.get('symbol', sym) if quote else sym: quote | ||||
|             for sym, quote in zip(symbols, quotes) | ||||
|         } | ||||
|         return {quote.get('symbol', sym) if quote else sym: quote | ||||
|                 for sym, quote in zip(symbols, results_dict)} | ||||
| 
 | ||||
|     async def quote(self, symbols: [str]): | ||||
|         """Retrieve quotes for a list of ``symbols``. | ||||
|  |  | |||
|  | @ -1,49 +0,0 @@ | |||
| piker.clearing | ||||
| ______________ | ||||
| trade execution-n-control subsys for both live and paper trading as | ||||
| well as algo-trading manual override/interaction across any backend | ||||
| broker and data provider. | ||||
| 
 | ||||
| avail UIs | ||||
| ********* | ||||
| 
 | ||||
| order ctl | ||||
| --------- | ||||
| the `piker.clearing` subsys is exposed mainly though | ||||
| the `piker chart` GUI as a "chart trader" style UX and | ||||
| is automatically enabled whenever a chart is opened. | ||||
| 
 | ||||
| .. ^TODO, more prose here! | ||||
| 
 | ||||
| the "manual" order control features are exposed via the | ||||
| `piker.ui.order_mode` API and can pretty much always be | ||||
| used (at least) in simulated-trading mode, aka "paper"-mode, and | ||||
| the micro-manual is as follows: | ||||
| 
 | ||||
| ``order_mode`` ( | ||||
|     edge triggered activation by any of the following keys, | ||||
|     ``mouse-click`` on y-level to submit at that price | ||||
|     ): | ||||
| 
 | ||||
|     - ``f``/ ``ctl-f`` to stage buy | ||||
|     - ``d``/ ``ctl-d`` to stage sell | ||||
|     - ``a`` to stage alert | ||||
| 
 | ||||
| 
 | ||||
| ``search_mode`` ( | ||||
|     ``ctl-l`` or ``ctl-space`` to open, | ||||
|     ``ctl-c`` or ``ctl-space`` to close | ||||
|     ) : | ||||
| 
 | ||||
|     - begin typing to have symbol search automatically lookup | ||||
|       symbols from all loaded backend (broker) providers | ||||
|     - arrow keys and mouse click to navigate selection | ||||
|     - vi-like ``ctl-[hjkl]`` for navigation | ||||
| 
 | ||||
| 
 | ||||
| position (pp) mgmt | ||||
| ------------------ | ||||
| you can also configure your position allocation limits from the | ||||
| sidepane. | ||||
| 
 | ||||
| .. ^TODO, explain and provide tut once more refined! | ||||
|  | @ -18,38 +18,3 @@ | |||
| Market machinery for order executions, book, management. | ||||
| 
 | ||||
| """ | ||||
| from ..log import get_logger | ||||
| from ._client import ( | ||||
|     open_ems, | ||||
|     OrderClient, | ||||
| ) | ||||
| from ._ems import ( | ||||
|     open_brokerd_dialog, | ||||
| ) | ||||
| from ._util import OrderDialogs | ||||
| from ._messages import( | ||||
|     Order, | ||||
|     Status, | ||||
|     Cancel, | ||||
| 
 | ||||
|     # TODO: deprecate these and replace end-2-end with | ||||
|     # client-side-dialog set above B) | ||||
|     # https://github.com/pikers/piker/issues/514 | ||||
|     BrokerdPosition | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'FeeModel', | ||||
|     'open_ems', | ||||
|     'OrderClient', | ||||
|     'open_brokerd_dialog', | ||||
|     'OrderDialogs', | ||||
|     'Order', | ||||
|     'Status', | ||||
|     'Cancel', | ||||
|     'BrokerdPosition' | ||||
| 
 | ||||
| ] | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  |  | |||
|  | @ -23,9 +23,9 @@ from typing import Optional | |||
| 
 | ||||
| from bidict import bidict | ||||
| 
 | ||||
| from ._pos import Position | ||||
| from . import MktPair | ||||
| from piker.types import Struct | ||||
| from ..data._source import Symbol | ||||
| from ..data.types import Struct | ||||
| from ..pp import Position | ||||
| 
 | ||||
| 
 | ||||
| _size_units = bidict({ | ||||
|  | @ -42,15 +42,7 @@ SizeUnit = Enum( | |||
| 
 | ||||
| class Allocator(Struct): | ||||
| 
 | ||||
|     mkt: MktPair | ||||
| 
 | ||||
|     # TODO: if we ever want ot support non-uniform entry-slot-proportion | ||||
|     # "sizes" | ||||
|     # disti_weight: str = 'uniform' | ||||
| 
 | ||||
|     units_limit: float | ||||
|     currency_limit: float | ||||
|     slots: int | ||||
|     symbol: Symbol | ||||
|     account: Optional[str] = 'paper' | ||||
| 
 | ||||
|     _size_units: bidict[str, Optional[str]] = _size_units | ||||
|  | @ -74,6 +66,14 @@ class Allocator(Struct): | |||
|         self._size_unit = v | ||||
|         return v | ||||
| 
 | ||||
|     # TODO: if we ever want ot support non-uniform entry-slot-proportion | ||||
|     # "sizes" | ||||
|     # disti_weight: str = 'uniform' | ||||
| 
 | ||||
|     units_limit: float | ||||
|     currency_limit: float | ||||
|     slots: int | ||||
| 
 | ||||
|     def step_sizes( | ||||
|         self, | ||||
|     ) -> (float, float): | ||||
|  | @ -93,13 +93,10 @@ class Allocator(Struct): | |||
|         else: | ||||
|             return self.units_limit | ||||
| 
 | ||||
|     def limit_info(self) -> tuple[str, float]: | ||||
|         return self.size_unit, self.limit() | ||||
| 
 | ||||
|     def next_order_info( | ||||
|         self, | ||||
| 
 | ||||
|         # we only need a startup size for exit calcs, we can then | ||||
|         # we only need a startup size for exit calcs, we can the | ||||
|         # determine how large slots should be if the initial pp size was | ||||
|         # larger then the current live one, and the live one is smaller | ||||
|         # then the initial config settings. | ||||
|  | @ -114,24 +111,24 @@ class Allocator(Struct): | |||
|         depending on position / order entry config. | ||||
| 
 | ||||
|         ''' | ||||
|         mkt: MktPair = self.mkt | ||||
|         ld: int = mkt.size_tick_digits | ||||
|         sym = self.symbol | ||||
|         ld = sym.lot_size_digits | ||||
| 
 | ||||
|         size_unit = self.size_unit | ||||
|         live_size = live_pp.cumsize | ||||
|         live_size = live_pp.size | ||||
|         abs_live_size = abs(live_size) | ||||
|         abs_startup_size = abs(startup_pp.cumsize) | ||||
|         abs_startup_size = abs(startup_pp.size) | ||||
| 
 | ||||
|         u_per_slot, currency_per_slot = self.step_sizes() | ||||
| 
 | ||||
|         if size_unit == 'units': | ||||
|             slot_size: float = u_per_slot | ||||
|             l_sub_pp: float = self.units_limit - abs_live_size | ||||
|             slot_size = u_per_slot | ||||
|             l_sub_pp = self.units_limit - abs_live_size | ||||
| 
 | ||||
|         elif size_unit == 'currency': | ||||
|             live_cost_basis: float = abs_live_size * live_pp.ppu | ||||
|             slot_size: float = currency_per_slot / price | ||||
|             l_sub_pp: float = (self.currency_limit - live_cost_basis) / price | ||||
|             live_cost_basis = abs_live_size * live_pp.be_price | ||||
|             slot_size = currency_per_slot / price | ||||
|             l_sub_pp = (self.currency_limit - live_cost_basis) / price | ||||
| 
 | ||||
|         else: | ||||
|             raise ValueError( | ||||
|  | @ -140,20 +137,12 @@ class Allocator(Struct): | |||
| 
 | ||||
|         # an entry (adding-to or starting a pp) | ||||
|         if ( | ||||
|             action == 'buy' and live_size > 0 or | ||||
|             action == 'sell' and live_size < 0 or | ||||
|             live_size == 0 | ||||
|             or ( | ||||
|                 action == 'buy' | ||||
|                 and live_size > 0 | ||||
|             ) | ||||
|             or ( | ||||
|                 action == 'sell' | ||||
|                 and live_size < 0 | ||||
|             ) | ||||
|         ): | ||||
|             order_size = min( | ||||
|                 slot_size, | ||||
|                 max(l_sub_pp, 0), | ||||
|             ) | ||||
| 
 | ||||
|             order_size = min(slot_size, l_sub_pp) | ||||
| 
 | ||||
|         # an exit (removing-from or going to net-zero pp) | ||||
|         else: | ||||
|  | @ -169,7 +158,7 @@ class Allocator(Struct): | |||
|             if size_unit == 'currency': | ||||
|                 # compute the "projected" limit's worth of units at the | ||||
|                 # current pp (weighted) price: | ||||
|                 slot_size = currency_per_slot / live_pp.ppu | ||||
|                 slot_size = currency_per_slot / live_pp.be_price | ||||
| 
 | ||||
|             else: | ||||
|                 slot_size = u_per_slot | ||||
|  | @ -184,7 +173,7 @@ class Allocator(Struct): | |||
|             order_size = max(slotted_pp, slot_size) | ||||
| 
 | ||||
|             if ( | ||||
|                 abs_live_size < slot_size | ||||
|                 abs_live_size < slot_size or | ||||
| 
 | ||||
|                 # NOTE: front/back "loading" heurstic: | ||||
|                 # if the remaining pp is in between 0-1.5x a slot's | ||||
|  | @ -193,17 +182,14 @@ class Allocator(Struct): | |||
|                 # **without** going past a net-zero pp. if the pp is | ||||
|                 # > 1.5x a slot size, then front load: exit a slot's and | ||||
|                 # expect net-zero to be acquired on the final exit. | ||||
|                 or slot_size < pp_size < round((1.5*slot_size), ndigits=ld) | ||||
|                 or ( | ||||
|                 slot_size < pp_size < round((1.5*slot_size), ndigits=ld) or | ||||
| 
 | ||||
|                     # underlying requires discrete (int) units (eg. stocks) | ||||
|                     # and thus our slot size (based on our limit) would | ||||
|                     # exit a fractional unit's worth so, presuming we aren't | ||||
|                     # supporting a fractional-units-style broker, we need | ||||
|                     # exit the final unit. | ||||
|                     ld == 0 | ||||
|                     and abs_live_size == 1 | ||||
|                 ) | ||||
|                 # underlying requires discrete (int) units (eg. stocks) | ||||
|                 # and thus our slot size (based on our limit) would | ||||
|                 # exit a fractional unit's worth so, presuming we aren't | ||||
|                 # supporting a fractional-units-style broker, we need | ||||
|                 # exit the final unit. | ||||
|                 ld == 0 and abs_live_size == 1 | ||||
|             ): | ||||
|                 order_size = abs_live_size | ||||
| 
 | ||||
|  | @ -212,12 +198,13 @@ class Allocator(Struct): | |||
|             # compute a fractional slots size to display | ||||
|             slots_used = self.slots_used( | ||||
|                 Position( | ||||
|                     mkt=mkt, | ||||
|                     bs_mktid=mkt.bs_mktid, | ||||
|                     symbol=sym, | ||||
|                     size=order_size, | ||||
|                     be_price=price, | ||||
|                     bsuid=sym, | ||||
|                 ) | ||||
|             ) | ||||
| 
 | ||||
|         # TODO: render an actual ``Executable`` type here? | ||||
|         return { | ||||
|             'size': abs(round(order_size, ndigits=ld)), | ||||
|             'size_digits': ld, | ||||
|  | @ -239,11 +226,11 @@ class Allocator(Struct): | |||
|         Calc and return the number of slots used by this ``Position``. | ||||
| 
 | ||||
|         ''' | ||||
|         abs_pp_size = abs(pp.cumsize) | ||||
|         abs_pp_size = abs(pp.size) | ||||
| 
 | ||||
|         if self.size_unit == 'currency': | ||||
|             # live_currency_size = size or (abs_pp_size * pp.ppu) | ||||
|             live_currency_size = abs_pp_size * pp.ppu | ||||
|             # live_currency_size = size or (abs_pp_size * pp.be_price) | ||||
|             live_currency_size = abs_pp_size * pp.be_price | ||||
|             prop = live_currency_size / self.currency_limit | ||||
| 
 | ||||
|         else: | ||||
|  | @ -255,9 +242,17 @@ class Allocator(Struct): | |||
|         return round(prop * self.slots) | ||||
| 
 | ||||
| 
 | ||||
| _derivs = ( | ||||
|     'future', | ||||
|     'continuous_future', | ||||
|     'option', | ||||
|     'futures_option', | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def mk_allocator( | ||||
| 
 | ||||
|     mkt: MktPair, | ||||
|     symbol: Symbol, | ||||
|     startup_pp: Position, | ||||
| 
 | ||||
|     # default allocation settings | ||||
|  | @ -281,9 +276,45 @@ def mk_allocator( | |||
|         'currency_limit': 6e3, | ||||
|         'slots': 6, | ||||
|     } | ||||
| 
 | ||||
|     defaults.update(user_def) | ||||
| 
 | ||||
|     return Allocator( | ||||
|         mkt=mkt, | ||||
|     alloc = Allocator( | ||||
|         symbol=symbol, | ||||
|         **defaults, | ||||
|     ) | ||||
| 
 | ||||
|     asset_type = symbol.type_key | ||||
| 
 | ||||
|     # specific configs by asset class / type | ||||
| 
 | ||||
|     if asset_type in _derivs: | ||||
|         # since it's harder to know how currency "applies" in this case | ||||
|         # given leverage properties | ||||
|         alloc.size_unit = '# units' | ||||
| 
 | ||||
|         # set units limit to slots size thus making make the next | ||||
|         # entry step 1.0 | ||||
|         alloc.units_limit = alloc.slots | ||||
| 
 | ||||
|     else: | ||||
|         alloc.size_unit = 'currency' | ||||
| 
 | ||||
|     # if the current position is already greater then the limit | ||||
|     # settings, increase the limit to the current position | ||||
|     if alloc.size_unit == 'currency': | ||||
|         startup_size = startup_pp.size * startup_pp.be_price | ||||
| 
 | ||||
|         if startup_size > alloc.currency_limit: | ||||
|             alloc.currency_limit = round(startup_size, ndigits=2) | ||||
| 
 | ||||
|     else: | ||||
|         startup_size = abs(startup_pp.size) | ||||
| 
 | ||||
|         if startup_size > alloc.units_limit: | ||||
|             alloc.units_limit = startup_size | ||||
| 
 | ||||
|             if asset_type in _derivs: | ||||
|                 alloc.slots = alloc.units_limit | ||||
| 
 | ||||
|     return alloc | ||||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -18,284 +18,210 @@ | |||
| Orders and execution client API. | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from typing import Dict | ||||
| from pprint import pformat | ||||
| from typing import TYPE_CHECKING | ||||
| from dataclasses import dataclass, field | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.trionics import broadcast_receiver | ||||
| 
 | ||||
| from ._util import ( | ||||
|     log,  # sub-sys logger | ||||
| ) | ||||
| from piker.types import Struct | ||||
| from ..service import maybe_open_emsd | ||||
| from ._messages import ( | ||||
|     Order, | ||||
|     Cancel, | ||||
|     BrokerdPosition, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._messages import ( | ||||
|         Status, | ||||
|     ) | ||||
| from ..log import get_logger | ||||
| from ._ems import _emsd_main | ||||
| from .._daemon import maybe_open_emsd | ||||
| from ._messages import Order, Cancel | ||||
| 
 | ||||
| 
 | ||||
| class OrderClient(Struct): | ||||
|     ''' | ||||
|     EMS-client-side order book ctl and tracking. | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
|     (A)sync API for submitting orders and alerts to the `emsd` service; | ||||
|     this is the main control for execution management from client code. | ||||
| 
 | ||||
| @dataclass | ||||
| class OrderBook: | ||||
|     '''EMS-client-side order book ctl and tracking. | ||||
| 
 | ||||
|     A style similar to "model-view" is used here where this api is | ||||
|     provided as a supervised control for an EMS actor which does all the | ||||
|     hard/fast work of talking to brokers/exchanges to conduct | ||||
|     executions. | ||||
| 
 | ||||
|     Currently, this is mostly for keeping local state to match the EMS | ||||
|     and use received events to trigger graphics updates. | ||||
| 
 | ||||
|     ''' | ||||
|     # IPC stream to `emsd` actor | ||||
|     _ems_stream: tractor.MsgStream | ||||
| 
 | ||||
|     # mem channels used to relay order requests to the EMS daemon | ||||
|     _to_relay_task: trio.abc.SendChannel | ||||
|     _from_sync_order_client: trio.abc.ReceiveChannel | ||||
|     _to_ems: trio.abc.SendChannel | ||||
|     _from_order_book: trio.abc.ReceiveChannel | ||||
| 
 | ||||
|     # history table | ||||
|     _sent_orders: dict[str, Order] = {} | ||||
|     _sent_orders: Dict[str, Order] = field(default_factory=dict) | ||||
|     _ready_to_receive: trio.Event = trio.Event() | ||||
| 
 | ||||
|     def send_nowait( | ||||
|     def send( | ||||
|         self, | ||||
|         msg: Order | dict, | ||||
| 
 | ||||
|     ) -> dict | Order: | ||||
|         ''' | ||||
|         Sync version of ``.send()``. | ||||
| 
 | ||||
|         ''' | ||||
|     ) -> dict: | ||||
|         self._sent_orders[msg.oid] = msg | ||||
|         self._to_relay_task.send_nowait(msg) | ||||
|         self._to_ems.send_nowait(msg) | ||||
|         return msg | ||||
| 
 | ||||
|     async def send( | ||||
|     def update( | ||||
|         self, | ||||
|         msg: Order | dict, | ||||
| 
 | ||||
|     ) -> dict | Order: | ||||
|         ''' | ||||
|         Send a new order msg async to the `emsd` service. | ||||
| 
 | ||||
|         ''' | ||||
|         self._sent_orders[msg.oid] = msg | ||||
|         await self._ems_stream.send(msg) | ||||
|         return msg | ||||
| 
 | ||||
|     def update_nowait( | ||||
|         self, | ||||
|         uuid: str, | ||||
|         **data: dict, | ||||
| 
 | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Sync version of ``.update()``. | ||||
| 
 | ||||
|         ''' | ||||
|         cmd = self._sent_orders[uuid] | ||||
|         msg = cmd.copy(update=data) | ||||
|         self._sent_orders[uuid] = msg | ||||
|         self._to_relay_task.send_nowait(msg) | ||||
|         return msg | ||||
|         self._to_ems.send_nowait(msg) | ||||
|         return cmd | ||||
| 
 | ||||
|     async def update( | ||||
|         self, | ||||
|         uuid: str, | ||||
|         **data: dict, | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Update an existing order dialog with a msg updated from | ||||
|         ``update`` kwargs. | ||||
|     def cancel(self, uuid: str) -> bool: | ||||
|         """Cancel an order (or alert) in the EMS. | ||||
| 
 | ||||
|         ''' | ||||
|         """ | ||||
|         cmd = self._sent_orders[uuid] | ||||
|         msg = cmd.copy(update=data) | ||||
|         self._sent_orders[uuid] = msg | ||||
|         await self._ems_stream.send(msg) | ||||
|         return msg | ||||
| 
 | ||||
|     def _mk_cancel_msg( | ||||
|         self, | ||||
|         uuid: str, | ||||
|     ) -> Cancel: | ||||
|         cmd = self._sent_orders.get(uuid) | ||||
|         if not cmd: | ||||
|             log.error( | ||||
|                 f'Unknown order {uuid}!?\n' | ||||
|                 f'Maybe there is a stale entry or line?\n' | ||||
|                 f'You should report this as a bug!' | ||||
|             ) | ||||
|             return | ||||
| 
 | ||||
|         fqme = str(cmd.symbol) | ||||
|         return Cancel( | ||||
|         msg = Cancel( | ||||
|             oid=uuid, | ||||
|             symbol=fqme, | ||||
|             symbol=cmd.symbol, | ||||
|         ) | ||||
|         self._to_ems.send_nowait(msg) | ||||
| 
 | ||||
| 
 | ||||
| _orders: OrderBook = None | ||||
| 
 | ||||
| 
 | ||||
| def get_orders( | ||||
|     emsd_uid: tuple[str, str] = None | ||||
| ) -> OrderBook: | ||||
|     """" | ||||
|     OrderBook singleton factory per actor. | ||||
| 
 | ||||
|     """ | ||||
|     if emsd_uid is not None: | ||||
|         # TODO: read in target emsd's active book on startup | ||||
|         pass | ||||
| 
 | ||||
|     global _orders | ||||
| 
 | ||||
|     if _orders is None: | ||||
|         size = 100 | ||||
|         tx, rx = trio.open_memory_channel(size) | ||||
|         brx = broadcast_receiver(rx, size) | ||||
| 
 | ||||
|         # setup local ui event streaming channels for request/resp | ||||
|         # streamging with EMS daemon | ||||
|         _orders = OrderBook( | ||||
|             _to_ems=tx, | ||||
|             _from_order_book=brx, | ||||
|         ) | ||||
| 
 | ||||
|     def cancel_nowait( | ||||
|         self, | ||||
|         uuid: str, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Sync version of ``.cancel()``. | ||||
| 
 | ||||
|         ''' | ||||
|         self._to_relay_task.send_nowait( | ||||
|             self._mk_cancel_msg(uuid) | ||||
|         ) | ||||
| 
 | ||||
|     async def cancel( | ||||
|         self, | ||||
|         uuid: str, | ||||
| 
 | ||||
|     ) -> bool: | ||||
|         ''' | ||||
|         Cancel an already existintg order (or alert) dialog. | ||||
| 
 | ||||
|         ''' | ||||
|         await self._ems_stream.send( | ||||
|             self._mk_cancel_msg(uuid) | ||||
|         ) | ||||
|     return _orders | ||||
| 
 | ||||
| 
 | ||||
| # TODO: we can get rid of this relay loop once we move | ||||
| # order_mode inputs to async code! | ||||
| async def relay_order_cmds_from_sync_code( | ||||
| 
 | ||||
| async def relay_orders_from_sync_code( | ||||
| 
 | ||||
|     client: OrderClient, | ||||
|     symbol_key: str, | ||||
|     to_ems_stream: tractor.MsgStream, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Order submission relay task: deliver orders sent from synchronous (UI) | ||||
|     code to the EMS via ``OrderClient._from_sync_order_client``. | ||||
|     """ | ||||
|     Order streaming task: deliver orders transmitted from UI | ||||
|     to downstream consumers. | ||||
| 
 | ||||
|     This is run in the UI actor (usually the one running Qt but could be | ||||
|     any other client service code). This process simply delivers order | ||||
|     messages to the above ``_to_relay_task`` send channel (from sync code using | ||||
|     messages to the above ``_to_ems`` send channel (from sync code using | ||||
|     ``.send_nowait()``), these values are pulled from the channel here | ||||
|     and relayed to any consumer(s) that called this function using | ||||
|     a ``tractor`` portal. | ||||
| 
 | ||||
|     This effectively makes order messages look like they're being | ||||
|     "pushed" from the parent to the EMS where local sync code is likely | ||||
|     doing the pushing from some non-async UI handler. | ||||
|     doing the pushing from some UI. | ||||
| 
 | ||||
|     ''' | ||||
|     async with ( | ||||
|         client._from_sync_order_client.subscribe() as sync_order_cmds | ||||
|     ): | ||||
|         async for cmd in sync_order_cmds: | ||||
|             sym = cmd.symbol | ||||
|             msg = pformat(cmd.to_dict()) | ||||
| 
 | ||||
|             if sym == symbol_key: | ||||
|                 log.info(f'Send order cmd:\n{msg}') | ||||
|     """ | ||||
|     book = get_orders() | ||||
|     async with book._from_order_book.subscribe() as orders_stream: | ||||
|         async for cmd in orders_stream: | ||||
|             if cmd.symbol == symbol_key: | ||||
|                 log.info(f'Send order cmd:\n{pformat(cmd)}') | ||||
|                 # send msg over IPC / wire | ||||
|                 await to_ems_stream.send(cmd) | ||||
| 
 | ||||
|             else: | ||||
|                 log.warning( | ||||
|                     f'Ignoring unmatched order cmd for {sym} != {symbol_key}:' | ||||
|                     f'\n{msg}' | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_ems( | ||||
|     fqme: str, | ||||
|     mode: str = 'live', | ||||
|     loglevel: str = 'error', | ||||
|     fqsn: str, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     OrderClient,  # client | ||||
|     tractor.MsgStream,  # order ctl stream | ||||
|     dict[ | ||||
|         # brokername, acctid | ||||
|         tuple[str, str], | ||||
|         dict[str, BrokerdPosition], | ||||
|     ], | ||||
|     list[str], | ||||
|     dict[str, Status], | ||||
| ]: | ||||
| ) -> ( | ||||
|     OrderBook, | ||||
|     tractor.MsgStream, | ||||
|     dict, | ||||
| ): | ||||
|     ''' | ||||
|     (Maybe) spawn an EMS-daemon (emsd), deliver an `OrderClient` for | ||||
|     requesting orders/alerts and a `trades_stream` which delivers all | ||||
|     response-msgs. | ||||
|     Spawn an EMS daemon and begin sending orders and receiving | ||||
|     alerts. | ||||
| 
 | ||||
|     This is a "client side" entrypoint which may spawn the `emsd` service | ||||
|     if it can't be discovered and generally speaking is the lowest level | ||||
|     broker control client-API. | ||||
|     This EMS tries to reduce most broker's terrible order entry apis to | ||||
|     a very simple protocol built on a few easy to grok and/or | ||||
|     "rantsy" premises: | ||||
| 
 | ||||
|     - most users will prefer "dark mode" where orders are not submitted | ||||
|       to a broker until and execution condition is triggered | ||||
|       (aka client-side "hidden orders") | ||||
| 
 | ||||
|     - Brokers over-complicate their apis and generally speaking hire | ||||
|       poor designers to create them. We're better off using creating a super | ||||
|       minimal, schema-simple, request-event-stream protocol to unify all the | ||||
|       existing piles of shit (and shocker, it'll probably just end up | ||||
|       looking like a decent crypto exchange's api) | ||||
| 
 | ||||
|     - all order types can be implemented with client-side limit orders | ||||
| 
 | ||||
|     - we aren't reinventing a wheel in this case since none of these | ||||
|       brokers are exposing FIX protocol; it is they doing the re-invention. | ||||
| 
 | ||||
| 
 | ||||
|     TODO: make some fancy diagrams using mermaid.io | ||||
| 
 | ||||
|     the possible set of responses from the stream  is currently: | ||||
|     - 'dark_submitted', 'broker_submitted' | ||||
|     - 'dark_cancelled', 'broker_cancelled' | ||||
|     - 'dark_executed', 'broker_executed' | ||||
|     - 'broker_filled' | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: prolly hand in the `MktPair` instance directly here as well! | ||||
|     from piker.accounting import unpack_fqme | ||||
|     broker, mktep, venue, suffix = unpack_fqme(fqme) | ||||
|     # wait for service to connect back to us signalling | ||||
|     # ready for order commands | ||||
|     book = get_orders() | ||||
| 
 | ||||
|     async with maybe_open_emsd( | ||||
|         broker, | ||||
|         loglevel=loglevel, | ||||
|     ) as portal: | ||||
|     from ..data._source import unpack_fqsn | ||||
|     broker, symbol, suffix = unpack_fqsn(fqsn) | ||||
| 
 | ||||
|     async with maybe_open_emsd(broker) as portal: | ||||
| 
 | ||||
|         from ._ems import _emsd_main | ||||
|         async with ( | ||||
|             # connect to emsd | ||||
|             portal.open_context( | ||||
|                 _emsd_main, | ||||
|                 fqme=fqme, | ||||
|                 exec_mode=mode, | ||||
|                 loglevel=loglevel, | ||||
| 
 | ||||
|             ) as ( | ||||
|                 ctx, | ||||
|                 ( | ||||
|                     positions, | ||||
|                     accounts, | ||||
|                     dialogs, | ||||
|                 ) | ||||
|             ), | ||||
|                 _emsd_main, | ||||
|                 fqsn=fqsn, | ||||
| 
 | ||||
|             ) as (ctx, (positions, accounts)), | ||||
| 
 | ||||
|             # open 2-way trade command stream | ||||
|             ctx.open_stream() as trades_stream, | ||||
|         ): | ||||
|             size: int = 100  # what should this be? | ||||
|             tx, rx = trio.open_memory_channel(size) | ||||
|             brx = broadcast_receiver(rx, size) | ||||
| 
 | ||||
|             # setup local ui event streaming channels for request/resp | ||||
|             # streamging with EMS daemon | ||||
|             client = OrderClient( | ||||
|                 _ems_stream=trades_stream, | ||||
|                 _to_relay_task=tx, | ||||
|                 _from_sync_order_client=brx, | ||||
|             ) | ||||
| 
 | ||||
|             client._ems_stream = trades_stream | ||||
| 
 | ||||
|             # start sync code order msg delivery task | ||||
|             async with trio.open_nursery() as n: | ||||
|                 n.start_soon( | ||||
|                     relay_orders_from_sync_code, | ||||
|                     client, | ||||
|                     fqme, | ||||
|                     relay_order_cmds_from_sync_code, | ||||
|                     fqsn, | ||||
|                     trades_stream | ||||
|                 ) | ||||
| 
 | ||||
|                 yield ( | ||||
|                     client, | ||||
|                     trades_stream, | ||||
|                     positions, | ||||
|                     accounts, | ||||
|                     dialogs, | ||||
|                 ) | ||||
| 
 | ||||
|                 # stop the sync-msg-relay task on exit. | ||||
|                 n.cancel_scope.cancel() | ||||
|                 yield book, trades_stream, positions, accounts | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -18,74 +18,56 @@ | |||
| Clearing sub-system message and protocols. | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from typing import ( | ||||
|     Literal, | ||||
| ) | ||||
| from typing import Optional, Union | ||||
| 
 | ||||
| from msgspec import field | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| from ..data._source import Symbol | ||||
| from ..data.types import Struct | ||||
| 
 | ||||
| 
 | ||||
| # TODO: ``msgspec`` stuff worth paying attention to: | ||||
| # - schema evolution: | ||||
| # https://jcristharif.com/msgspec/usage.html#schema-evolution | ||||
| # - for eg. ``BrokerdStatus``, instead just have separate messages? | ||||
| # - schema evolution: https://jcristharif.com/msgspec/usage.html#schema-evolution | ||||
| # - use literals for a common msg determined by diff keys? | ||||
| #   - https://jcristharif.com/msgspec/usage.html#literal | ||||
| #   - for eg. ``BrokerdStatus``, instead just have separate messages? | ||||
| 
 | ||||
| # -------------- | ||||
| # Client -> emsd | ||||
| # -------------- | ||||
| 
 | ||||
| class Order(Struct): | ||||
| 
 | ||||
|     # TODO: ideally we can combine these 2 fields into | ||||
|     # 1 and just use the size polarity to determine a buy/sell. | ||||
|     # i would like to see this become more like | ||||
|     # https://jcristharif.com/msgspec/usage.html#literal | ||||
|     # action: Literal[ | ||||
|     #     'live', | ||||
|     #     'dark', | ||||
|     #     'alert', | ||||
|     # ] | ||||
| 
 | ||||
|     action: Literal[ | ||||
|         'buy', | ||||
|         'sell', | ||||
|         'alert', | ||||
|     ] | ||||
|     # determines whether the create execution | ||||
|     # will be submitted to the ems or directly to | ||||
|     # the backend broker | ||||
|     exec_mode: Literal[ | ||||
|         'dark', | ||||
|         'live', | ||||
|         # 'paper',  no right? | ||||
|     ] | ||||
| 
 | ||||
|     # internal ``emdsd`` unique "order id" | ||||
|     oid: str  # uuid4 | ||||
|     # TODO: figure out how to optionally typecast this to `MktPair`? | ||||
|     symbol: str  # | MktPair | ||||
|     account: str  # should we set a default as '' ? | ||||
| 
 | ||||
|     price: float | ||||
|     size: float  # -ve is "sell", +ve is "buy" | ||||
| 
 | ||||
|     brokers: list[str] = [] | ||||
| 
 | ||||
| 
 | ||||
| class Cancel(Struct): | ||||
|     ''' | ||||
|     Cancel msg for removing a dark (ems triggered) or | ||||
|     '''Cancel msg for removing a dark (ems triggered) or | ||||
|     broker-submitted (live) trigger/order. | ||||
| 
 | ||||
|     ''' | ||||
|     action: str = 'cancel' | ||||
|     oid: str  # uuid4 | ||||
|     symbol: str | ||||
|     action: str = 'cancel' | ||||
| 
 | ||||
| 
 | ||||
| class Order(Struct): | ||||
| 
 | ||||
|     # TODO: use ``msgspec.Literal`` | ||||
|     # https://jcristharif.com/msgspec/usage.html#literal | ||||
|     action: str  # {'buy', 'sell', 'alert'} | ||||
|     # internal ``emdsd`` unique "order id" | ||||
|     oid: str  # uuid4 | ||||
|     symbol: Union[str, Symbol] | ||||
|     account: str  # should we set a default as '' ? | ||||
| 
 | ||||
|     price: float | ||||
|     # TODO: could we drop the ``.action`` field above and instead just | ||||
|     # use +/- values here? Would make the msg smaller at the sake of a | ||||
|     # teensie fp precision? | ||||
|     size: float | ||||
|     brokers: list[str] | ||||
| 
 | ||||
|     # Assigned once initial ack is received | ||||
|     # ack_time_ns: Optional[int] = None | ||||
| 
 | ||||
|     # determines whether the create execution | ||||
|     # will be submitted to the ems or directly to | ||||
|     # the backend broker | ||||
|     exec_mode: str  # {'dark', 'live', 'paper'} | ||||
| 
 | ||||
| 
 | ||||
| # -------------- | ||||
|  | @ -96,60 +78,41 @@ class Cancel(Struct): | |||
| 
 | ||||
| class Status(Struct): | ||||
| 
 | ||||
|     time_ns: int | ||||
|     oid: str  # uuid4 ems-order dialog id | ||||
| 
 | ||||
|     resp: Literal[ | ||||
|       'pending',  # acked by broker but not yet open | ||||
|       'open', | ||||
|       'dark_open',  # dark/algo triggered order is open in ems clearing loop | ||||
|       'triggered',  # above triggered order sent to brokerd, or an alert closed | ||||
|       'closed',  # fully cleared all size/units | ||||
|       'fill',  # partial execution | ||||
|       'canceled', | ||||
|       'error', | ||||
|     ] | ||||
| 
 | ||||
|     name: str = 'status' | ||||
|     oid: str  # uuid4 | ||||
|     time_ns: int | ||||
| 
 | ||||
|     # { | ||||
|     #   'dark_submitted', | ||||
|     #   'dark_cancelled', | ||||
|     #   'dark_triggered', | ||||
| 
 | ||||
|     #   'broker_submitted', | ||||
|     #   'broker_cancelled', | ||||
|     #   'broker_executed', | ||||
|     #   'broker_filled', | ||||
|     #   'broker_errored', | ||||
| 
 | ||||
|     #   'alert_submitted', | ||||
|     #   'alert_triggered', | ||||
| 
 | ||||
|     # } | ||||
|     resp: str  # "response", see above | ||||
| 
 | ||||
|     # trigger info | ||||
|     trigger_price: Optional[float] = None | ||||
|     # price: float | ||||
| 
 | ||||
|     # broker: Optional[str] = None | ||||
| 
 | ||||
|     # this maps normally to the ``BrokerdOrder.reqid`` below, an id | ||||
|     # normally allocated internally by the backend broker routing system | ||||
|     reqid: int | str | None = None | ||||
|     broker_reqid: Optional[Union[int, str]] = None | ||||
| 
 | ||||
|     # the (last) source order/request msg if provided | ||||
|     # (eg. the Order/Cancel which causes this msg) and | ||||
|     # acts as a back-reference to the corresponding | ||||
|     # request message which was the source of this msg. | ||||
|     req: Order | None = None | ||||
| 
 | ||||
|     # XXX: better design/name here? | ||||
|     # flag that can be set to indicate a message for an order | ||||
|     # event that wasn't originated by piker's emsd (eg. some external | ||||
|     # trading system which does it's own order control but that you | ||||
|     # might want to "track" using piker UIs/systems). | ||||
|     src: str | None = None | ||||
| 
 | ||||
|     # set when a cancel request msg was set for this order flow dialog | ||||
|     # but the brokerd dialog isn't yet in a cancelled state. | ||||
|     cancel_called: bool = False | ||||
| 
 | ||||
|     # for relaying a boxed brokerd-dialog-side msg data "through" the | ||||
|     # ems layer to clients. | ||||
|     # for relaying backend msg data "through" the ems layer | ||||
|     brokerd_msg: dict = {} | ||||
| 
 | ||||
| 
 | ||||
| class Error(Status): | ||||
|     resp: str = 'error' | ||||
| 
 | ||||
|     # TODO: allow re-wrapping from existing (last) status? | ||||
|     @classmethod | ||||
|     def from_status( | ||||
|         cls, | ||||
|         msg: Status, | ||||
|     ) -> Error: | ||||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| # --------------- | ||||
| # emsd -> brokerd | ||||
| # --------------- | ||||
|  | @ -157,6 +120,7 @@ class Error(Status): | |||
| 
 | ||||
| class BrokerdCancel(Struct): | ||||
| 
 | ||||
|     action: str = 'cancel' | ||||
|     oid: str  # piker emsd order id | ||||
|     time_ns: int | ||||
| 
 | ||||
|  | @ -167,31 +131,27 @@ class BrokerdCancel(Struct): | |||
|     # for setting a unique order id then this value will be relayed back | ||||
|     # on the emsd order request stream as the ``BrokerdOrderAck.reqid`` | ||||
|     # field | ||||
|     reqid: int | str | None = None | ||||
|     action: str = 'cancel' | ||||
|     reqid: Optional[Union[int, str]] = None | ||||
| 
 | ||||
| 
 | ||||
| class BrokerdOrder(Struct): | ||||
| 
 | ||||
|     action: str  # {buy, sell} | ||||
|     oid: str | ||||
|     account: str | ||||
|     time_ns: int | ||||
| 
 | ||||
|     symbol: str  # fqme | ||||
|     price: float | ||||
|     size: float | ||||
| 
 | ||||
|     # TODO: if we instead rely on a +ve/-ve size to determine | ||||
|     # the action we more or less don't need this field right? | ||||
|     action: str = ''  # {buy, sell} | ||||
| 
 | ||||
|     # "broker request id": broker specific/internal order id if this is | ||||
|     # None, creates a new order otherwise if the id is valid the backend | ||||
|     # api must modify the existing matching order. If the broker allows | ||||
|     # for setting a unique order id then this value will be relayed back | ||||
|     # on the emsd order request stream as the ``BrokerdOrderAck.reqid`` | ||||
|     # field | ||||
|     reqid: int | str | None = None | ||||
|     reqid: Optional[Union[int, str]] = None | ||||
| 
 | ||||
|     symbol: str  # symbol.<providername> ? | ||||
|     price: float | ||||
|     size: float | ||||
| 
 | ||||
| 
 | ||||
| # --------------- | ||||
|  | @ -207,67 +167,73 @@ class BrokerdOrderAck(Struct): | |||
|     ``.oid`` (which is a uuid4). | ||||
| 
 | ||||
|     ''' | ||||
|     name: str = 'ack' | ||||
| 
 | ||||
|     # defined and provided by backend | ||||
|     reqid: int | str | ||||
|     reqid: Union[int, str] | ||||
| 
 | ||||
|     # emsd id originally sent in matching request msg | ||||
|     oid: str | ||||
|     # TODO: do we need this? | ||||
|     account: str = '' | ||||
|     name: str = 'ack' | ||||
| 
 | ||||
| 
 | ||||
| class BrokerdStatus(Struct): | ||||
| 
 | ||||
|     time_ns: int | ||||
|     reqid: int | str | ||||
|     status: Literal[ | ||||
|         'open', | ||||
|         'canceled', | ||||
|         'pending', | ||||
|         # 'error',  # NOTE: use `BrokerdError` | ||||
|         'closed', | ||||
|     ] | ||||
|     name: str = 'status' | ||||
|     reqid: Union[int, str] | ||||
|     time_ns: int | ||||
| 
 | ||||
|     # XXX: should be best effort set for every update | ||||
|     account: str = '' | ||||
| 
 | ||||
|     # { | ||||
|     #   'submitted', | ||||
|     #   'cancelled', | ||||
|     #   'filled', | ||||
|     # } | ||||
|     status: str | ||||
| 
 | ||||
|     oid: str = '' | ||||
|     # TODO: do we need this? | ||||
|     account: str | None = None, | ||||
|     filled: float = 0.0 | ||||
|     reason: str = '' | ||||
|     remaining: float = 0.0 | ||||
| 
 | ||||
|     # external: bool = False | ||||
|     # XXX: better design/name here? | ||||
|     # flag that can be set to indicate a message for an order | ||||
|     # event that wasn't originated by piker's emsd (eg. some external | ||||
|     # trading system which does it's own order control but that you | ||||
|     # might want to "track" using piker UIs/systems). | ||||
|     external: bool = False | ||||
| 
 | ||||
|     # XXX: not required schema as of yet | ||||
|     broker_details: dict = field(default_factory=lambda: { | ||||
|     broker_details: dict = { | ||||
|         'name': '', | ||||
|     }) | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
| class BrokerdFill(Struct): | ||||
|     ''' | ||||
|     A single message indicating a "fill-details" event from the | ||||
|     broker if avaiable. | ||||
|     A single message indicating a "fill-details" event from the broker | ||||
|     if avaiable. | ||||
| 
 | ||||
|     ''' | ||||
|     # brokerd timestamp required for order mode arrow placement on x-axis | ||||
|     # TODO: maybe int if we force ns? | ||||
|     # we need to normalize this somehow since backends will use their | ||||
|     # own format and likely across many disparate epoch clocks... | ||||
|     name: str = 'fill' | ||||
|     reqid: Union[int, str] | ||||
|     time_ns: int | ||||
|     broker_time: float | ||||
|     reqid: int | str | ||||
| 
 | ||||
|     # order exeuction related | ||||
|     action: str | ||||
|     size: float | ||||
|     price: float | ||||
| 
 | ||||
|     name: str = 'fill' | ||||
|     action: str | None = None | ||||
|     broker_details: dict = {}  # meta-data (eg. commisions etc.) | ||||
| 
 | ||||
|     # brokerd timestamp required for order mode arrow placement on x-axis | ||||
| 
 | ||||
|     # TODO: maybe int if we force ns? | ||||
|     # we need to normalize this somehow since backends will use their | ||||
|     # own format and likely across many disparate epoch clocks... | ||||
|     broker_time: float | ||||
| 
 | ||||
| 
 | ||||
| class BrokerdError(Struct): | ||||
|     ''' | ||||
|  | @ -276,31 +242,27 @@ class BrokerdError(Struct): | |||
|     This is still a TODO thing since we're not sure how to employ it yet. | ||||
| 
 | ||||
|     ''' | ||||
|     reason: str | ||||
|     name: str = 'error' | ||||
|     oid: str | ||||
| 
 | ||||
|     # TODO: drop this right? | ||||
|     symbol: str | None = None | ||||
| 
 | ||||
|     oid: str | None = None | ||||
|     # if no brokerd order request was actually submitted (eg. we errored | ||||
|     # at the ``pikerd`` layer) then there will be ``reqid`` allocated. | ||||
|     reqid: str | None = None | ||||
|     reqid: Optional[Union[int, str]] = None | ||||
| 
 | ||||
|     name: str = 'error' | ||||
|     symbol: str | ||||
|     reason: str | ||||
|     broker_details: dict = {} | ||||
| 
 | ||||
| 
 | ||||
| # TODO: yeah, so we REALLY need to completely deprecate | ||||
| # this and use the `.accounting.Position` msg-type instead.. | ||||
| class BrokerdPosition(Struct): | ||||
|     ''' | ||||
|     Position update event from brokerd. | ||||
|     '''Position update event from brokerd. | ||||
| 
 | ||||
|     ''' | ||||
|     name: str = 'position' | ||||
| 
 | ||||
|     broker: str | ||||
|     account: str | ||||
|     symbol: str | ||||
|     size: float | ||||
|     avg_price: float | ||||
|     currency: str = '' | ||||
|     name: str = 'position' | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,93 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| """ | ||||
| Sub-sys module commons. | ||||
| 
 | ||||
| """ | ||||
| from collections import ChainMap | ||||
| from functools import partial | ||||
| from typing import Any | ||||
| 
 | ||||
| from ..log import ( | ||||
|     get_logger, | ||||
|     get_console_log, | ||||
| ) | ||||
| from piker.types import Struct | ||||
| subsys: str = 'piker.clearing' | ||||
| 
 | ||||
| log = get_logger(subsys) | ||||
| 
 | ||||
| get_console_log = partial( | ||||
|     get_console_log, | ||||
|     name=subsys, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| class OrderDialogs(Struct): | ||||
|     ''' | ||||
|     Order control dialog (and thus transaction) tracking via | ||||
|     message recording. | ||||
| 
 | ||||
|     Allows easily recording messages associated with a given set of | ||||
|     order control transactions and looking up the latest field | ||||
|     state using the entire (reverse chronological) msg flow. | ||||
| 
 | ||||
|     ''' | ||||
|     _flows: dict[str, ChainMap] = {} | ||||
| 
 | ||||
|     def add_msg( | ||||
|         self, | ||||
|         oid: str, | ||||
|         msg: dict, | ||||
|     ) -> None: | ||||
| 
 | ||||
|         # NOTE: manually enter a new map on the first msg add to | ||||
|         # avoid creating one with an empty dict first entry in | ||||
|         # `ChainMap.maps` which is the default if none passed at | ||||
|         # init. | ||||
|         cm: ChainMap = self._flows.get(oid) | ||||
|         if cm: | ||||
|             cm.maps.insert(0, msg) | ||||
|         else: | ||||
|             cm = ChainMap(msg) | ||||
|             self._flows[oid] = cm | ||||
| 
 | ||||
|     # TODO: wrap all this in the `collections.abc.Mapping` interface? | ||||
|     def get( | ||||
|         self, | ||||
|         oid: str, | ||||
| 
 | ||||
|     ) -> ChainMap[str, Any]: | ||||
|         ''' | ||||
|         Return the dialog `ChainMap` for provided id. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._flows.get(oid, None) | ||||
| 
 | ||||
|     def pop( | ||||
|         self, | ||||
|         oid: str, | ||||
| 
 | ||||
|     ) -> ChainMap[str, Any]: | ||||
|         ''' | ||||
|         Pop and thus remove the `ChainMap` containing the msg flow | ||||
|         for the given order id. | ||||
| 
 | ||||
|         ''' | ||||
|         if (flow := self._flows.pop(oid, None)) is None: | ||||
|             log.warning(f'No flow found for oid: {oid}') | ||||
| 
 | ||||
|         return flow | ||||
|  | @ -1,295 +1,121 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) 2018-present Tyler Goodlet | ||||
| # (in stewardship for pikers, everywhere.) | ||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or | ||||
| # modify it under the terms of the GNU Affero General Public | ||||
| # License as published by the Free Software Foundation, either | ||||
| # version 3 of the License, or (at your option) any later version. | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU | ||||
| # Affero General Public License for more details. | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public | ||||
| # License along with this program.  If not, see | ||||
| # <https://www.gnu.org/licenses/>. | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| CLI commons. | ||||
| 
 | ||||
| ''' | ||||
| import os | ||||
| # from contextlib import AsyncExitStack | ||||
| from types import ModuleType | ||||
| from pprint import pformat | ||||
| 
 | ||||
| import click | ||||
| import trio | ||||
| import tractor | ||||
| from tractor._multiaddr import parse_maddr | ||||
| 
 | ||||
| from ..log import ( | ||||
|     get_console_log, | ||||
|     get_logger, | ||||
|     colorize_json, | ||||
| ) | ||||
| from ..log import get_console_log, get_logger, colorize_json | ||||
| from ..brokers import get_brokermod | ||||
| from ..service import ( | ||||
|     _default_registry_host, | ||||
|     _default_registry_port, | ||||
| ) | ||||
| from .._daemon import _tractor_kwargs | ||||
| from .. import config | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger('piker.cli') | ||||
| 
 | ||||
| 
 | ||||
| def load_trans_eps( | ||||
|     network: dict | None = None, | ||||
|     maddrs: list[tuple] | None = None, | ||||
| 
 | ||||
| ) -> dict[str, dict[str, dict]]: | ||||
| 
 | ||||
|     # transport-oriented endpoint multi-addresses | ||||
|     eps: dict[ | ||||
|         str,  # service name, eg. `pikerd`, `emsd`.. | ||||
| 
 | ||||
|         # libp2p style multi-addresses parsed into prot layers | ||||
|         list[dict[str, str | int]] | ||||
|     ] = {} | ||||
| 
 | ||||
|     if ( | ||||
|         network | ||||
|         and not maddrs | ||||
|     ): | ||||
|         # load network section and (attempt to) connect all endpoints | ||||
|         # which are reachable B) | ||||
|         for key, maddrs in network.items(): | ||||
|             match key: | ||||
| 
 | ||||
|                 # TODO: resolve table across multiple discov | ||||
|                 # prots Bo | ||||
|                 case 'resolv': | ||||
|                     pass | ||||
| 
 | ||||
|                 case 'pikerd': | ||||
|                     dname: str = key | ||||
|                     for maddr in maddrs: | ||||
|                         layers: dict = parse_maddr(maddr) | ||||
|                         eps.setdefault( | ||||
|                             dname, | ||||
|                             [], | ||||
|                         ).append(layers) | ||||
| 
 | ||||
|     elif maddrs: | ||||
|         # presume user is manually specifying the root actor ep. | ||||
|         eps['pikerd'] = [parse_maddr(maddr)] | ||||
| 
 | ||||
|     return eps | ||||
| log = get_logger('cli') | ||||
| DEFAULT_BROKER = 'questrade' | ||||
| 
 | ||||
| 
 | ||||
| @click.command() | ||||
| @click.option('--loglevel', '-l', default='warning', help='Logging level') | ||||
| @click.option('--tl', is_flag=True, help='Enable tractor logging') | ||||
| @click.option('--pdb', is_flag=True, help='Enable tractor debug mode') | ||||
| @click.option('--host', '-h', default='127.0.0.1', help='Host address to bind') | ||||
| @click.option( | ||||
|     '--loglevel', | ||||
|     '-l', | ||||
|     default='warning', | ||||
|     help='Logging level', | ||||
| ) | ||||
| @click.option( | ||||
|     '--tl', | ||||
|     '--tsdb', | ||||
|     is_flag=True, | ||||
|     help='Enable tractor-runtime logs', | ||||
|     help='Enable local ``marketstore`` instance' | ||||
| ) | ||||
| @click.option( | ||||
|     '--pdb', | ||||
|     is_flag=True, | ||||
|     help='Enable tractor debug mode', | ||||
| ) | ||||
| @click.option( | ||||
|     '--maddr', | ||||
|     '-m', | ||||
|     default=None, | ||||
|     help='Multiaddrs to bind or contact', | ||||
| ) | ||||
| # @click.option( | ||||
| #     '--tsdb', | ||||
| #     is_flag=True, | ||||
| #     help='Enable local ``marketstore`` instance' | ||||
| # ) | ||||
| # @click.option( | ||||
| #     '--es', | ||||
| #     is_flag=True, | ||||
| #     help='Enable local ``elasticsearch`` instance' | ||||
| # ) | ||||
| def pikerd( | ||||
|     maddr: list[str] | None, | ||||
|     loglevel: str, | ||||
|     tl: bool, | ||||
|     pdb: bool, | ||||
|     # tsdb: bool, | ||||
|     # es: bool, | ||||
| ): | ||||
| def pikerd(loglevel, host, tl, pdb, tsdb): | ||||
|     ''' | ||||
|     Spawn the piker broker-daemon. | ||||
| 
 | ||||
|     ''' | ||||
|     from tractor.devx import maybe_open_crash_handler | ||||
|     with maybe_open_crash_handler(pdb=pdb): | ||||
|         log = get_console_log(loglevel, name='cli') | ||||
|     from .._daemon import open_pikerd | ||||
|     log = get_console_log(loglevel) | ||||
| 
 | ||||
|         if pdb: | ||||
|             log.warning(( | ||||
|                 "\n" | ||||
|                 "!!! YOU HAVE ENABLED DAEMON DEBUG MODE !!!\n" | ||||
|                 "When a `piker` daemon crashes it will block the " | ||||
|                 "task-thread until resumed from console!\n" | ||||
|                 "\n" | ||||
|             )) | ||||
|     if pdb: | ||||
|         log.warning(( | ||||
|             "\n" | ||||
|             "!!! You have enabled daemon DEBUG mode !!!\n" | ||||
|             "If a daemon crashes it will likely block" | ||||
|             " the service until resumed from console!\n" | ||||
|             "\n" | ||||
|         )) | ||||
| 
 | ||||
|         # service-actor registry endpoint socket-address set | ||||
|         regaddrs: list[tuple[str, int]] = [] | ||||
|     async def main(): | ||||
| 
 | ||||
|         conf, _ = config.load( | ||||
|             conf_name='conf', | ||||
|         ) | ||||
|         network: dict = conf.get('network') | ||||
|         if ( | ||||
|             network is None | ||||
|             and not maddr | ||||
|         async with ( | ||||
|             open_pikerd( | ||||
|                 loglevel=loglevel, | ||||
|                 debug_mode=pdb, | ||||
|             ),  # normally delivers a ``Services`` handle | ||||
|             trio.open_nursery() as n, | ||||
|         ): | ||||
|             regaddrs = [( | ||||
|                 _default_registry_host, | ||||
|                 _default_registry_port, | ||||
|             )] | ||||
|             if tsdb: | ||||
|                 from piker.data._ahab import start_ahab | ||||
|                 from piker.data.marketstore import start_marketstore | ||||
| 
 | ||||
|         else: | ||||
|             eps: dict = load_trans_eps( | ||||
|                 network, | ||||
|                 maddr, | ||||
|             ) | ||||
|             for layers in eps['pikerd']: | ||||
|                 regaddrs.append(( | ||||
|                     layers['ipv4']['addr'], | ||||
|                     layers['tcp']['port'], | ||||
|                 )) | ||||
|                 log.info('Spawning `marketstore` supervisor') | ||||
|                 ctn_ready, config, (cid, pid) = await n.start( | ||||
|                     start_ahab, | ||||
|                     'marketstored', | ||||
|                     start_marketstore, | ||||
| 
 | ||||
|         from .. import service | ||||
|                 ) | ||||
|                 log.info( | ||||
|                     f'`marketstored` up!\n' | ||||
|                     f'pid: {pid}\n' | ||||
|                     f'container id: {cid[:12]}\n' | ||||
|                     f'config: {pformat(config)}' | ||||
|                 ) | ||||
| 
 | ||||
|         async def main(): | ||||
|             service_mngr: service.Services | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|             async with ( | ||||
|                 service.open_pikerd( | ||||
|                     registry_addrs=regaddrs, | ||||
|                     loglevel=loglevel, | ||||
|                     debug_mode=pdb, | ||||
| 
 | ||||
|                 ) as service_mngr,  # normally delivers a ``Services`` handle | ||||
| 
 | ||||
|                 # AsyncExitStack() as stack, | ||||
|             ): | ||||
|                 # TODO: spawn all other sub-actor daemons according to | ||||
|                 # multiaddress endpoint spec defined by user config | ||||
|                 assert service_mngr | ||||
| 
 | ||||
|                 # if tsdb: | ||||
|                 #     dname, conf = await stack.enter_async_context( | ||||
|                 #         service.marketstore.start_ahab_daemon( | ||||
|                 #             service_mngr, | ||||
|                 #             loglevel=loglevel, | ||||
|                 #         ) | ||||
|                 #     ) | ||||
|                 #     log.info(f'TSDB `{dname}` up with conf:\n{conf}') | ||||
| 
 | ||||
|                 # if es: | ||||
|                 #     dname, conf = await stack.enter_async_context( | ||||
|                 #         service.elastic.start_ahab_daemon( | ||||
|                 #             service_mngr, | ||||
|                 #             loglevel=loglevel, | ||||
|                 #         ) | ||||
|                 #     ) | ||||
|                 #     log.info(f'DB `{dname}` up with conf:\n{conf}') | ||||
| 
 | ||||
|                 await trio.sleep_forever() | ||||
| 
 | ||||
|         trio.run(main) | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @click.group(context_settings=config._context_defaults) | ||||
| @click.option( | ||||
|     '--brokers', '-b', | ||||
|     default=None, | ||||
|     default=[DEFAULT_BROKER], | ||||
|     multiple=True, | ||||
|     help='Broker backend to use' | ||||
| ) | ||||
| @click.option('--loglevel', '-l', default='warning', help='Logging level') | ||||
| @click.option('--tl', is_flag=True, help='Enable tractor logging') | ||||
| @click.option('--configdir', '-c', help='Configuration directory') | ||||
| @click.option( | ||||
|     '--pdb', | ||||
|     is_flag=True, | ||||
|     help='Enable runtime debug mode ', | ||||
| ) | ||||
| @click.option( | ||||
|     '--maddr', | ||||
|     '-m', | ||||
|     default=None, | ||||
|     multiple=True, | ||||
|     help='Multiaddr to bind', | ||||
| ) | ||||
| @click.option( | ||||
|     '--regaddr', | ||||
|     '-r', | ||||
|     default=None, | ||||
|     help='Registrar addr to contact', | ||||
| ) | ||||
| @click.pass_context | ||||
| def cli( | ||||
|     ctx: click.Context, | ||||
|     brokers: list[str], | ||||
|     loglevel: str, | ||||
|     tl: bool, | ||||
|     configdir: str, | ||||
|     pdb: bool, | ||||
| 
 | ||||
|     # TODO: make these list[str] with multiple -m maddr0 -m maddr1 | ||||
|     maddr: list[str], | ||||
|     regaddr: str, | ||||
| 
 | ||||
| ) -> None: | ||||
| def cli(ctx, brokers, loglevel, tl, configdir): | ||||
|     if configdir is not None: | ||||
|         assert os.path.isdir(configdir), f"`{configdir}` is not a valid path" | ||||
|         config._override_config_dir(configdir) | ||||
| 
 | ||||
|     # TODO: for typer see | ||||
|     # https://typer.tiangolo.com/tutorial/commands/context/ | ||||
|     ctx.ensure_object(dict) | ||||
| 
 | ||||
|     if not brokers: | ||||
|         # (try to) load all (supposedly) supported data/broker backends | ||||
|         from piker.brokers import __brokers__ | ||||
|         brokers = __brokers__ | ||||
| 
 | ||||
|     brokermods: dict[str, ModuleType] = { | ||||
|         broker: get_brokermod(broker) for broker in brokers | ||||
|     } | ||||
|     assert brokermods | ||||
| 
 | ||||
|     # TODO: load endpoints from `conf::[network].pikerd` | ||||
|     # - pikerd vs. regd, separate registry daemon? | ||||
|     # - expose datad vs. brokerd? | ||||
|     # - bind emsd with certain perms on public iface? | ||||
|     regaddrs: list[tuple[str, int]] = regaddr or [( | ||||
|         _default_registry_host, | ||||
|         _default_registry_port, | ||||
|     )] | ||||
| 
 | ||||
|     # TODO: factor [network] section parsing out from pikerd | ||||
|     # above and call it here as well. | ||||
|     # if maddr: | ||||
|     #     for addr in maddr: | ||||
|     #         layers: dict = parse_maddr(addr) | ||||
|     if len(brokers) == 1: | ||||
|         brokermods = [get_brokermod(brokers[0])] | ||||
|     else: | ||||
|         brokermods = [get_brokermod(broker) for broker in brokers] | ||||
| 
 | ||||
|     ctx.obj.update({ | ||||
|         'brokers': brokers, | ||||
|  | @ -299,12 +125,6 @@ def cli( | |||
|         'log': get_console_log(loglevel), | ||||
|         'confdir': config._config_dir, | ||||
|         'wl_path': config._watchlists_data_path, | ||||
|         'registry_addrs': regaddrs, | ||||
|         'pdb': pdb,  # debug mode flag | ||||
| 
 | ||||
|         # TODO: endpoint parsing, pinging and binding | ||||
|         # on no existing server. | ||||
|         # 'maddrs': maddr, | ||||
|     }) | ||||
| 
 | ||||
|     # allow enabling same loglevel in ``tractor`` machinery | ||||
|  | @ -314,52 +134,38 @@ def cli( | |||
| 
 | ||||
| @cli.command() | ||||
| @click.option('--tl', is_flag=True, help='Enable tractor logging') | ||||
| @click.argument('ports', nargs=-1, required=False) | ||||
| @click.argument('names', nargs=-1, required=False) | ||||
| @click.pass_obj | ||||
| def services(config, tl, ports): | ||||
| 
 | ||||
|     from ..service import ( | ||||
|         open_piker_runtime, | ||||
|         _default_registry_port, | ||||
|         _default_registry_host, | ||||
|     ) | ||||
| 
 | ||||
|     host = _default_registry_host | ||||
|     if not ports: | ||||
|         ports = [_default_registry_port] | ||||
| def services(config, tl, names): | ||||
| 
 | ||||
|     async def list_services(): | ||||
|         nonlocal host | ||||
|         async with ( | ||||
|             open_piker_runtime( | ||||
|                 name='service_query', | ||||
|                 loglevel=config['loglevel'] if tl else None, | ||||
|             ), | ||||
|             tractor.get_arbiter( | ||||
|                 host=host, | ||||
|                 port=ports[0] | ||||
|             ) as portal | ||||
|         ): | ||||
| 
 | ||||
|         async with tractor.get_arbiter( | ||||
|             *_tractor_kwargs['arbiter_addr'] | ||||
|         ) as portal: | ||||
|             registry = await portal.run_from_ns('self', 'get_registry') | ||||
|             json_d = {} | ||||
|             for key, socket in registry.items(): | ||||
|                 # name, uuid = uid | ||||
|                 host, port = socket | ||||
|                 json_d[key] = f'{host}:{port}' | ||||
|             click.echo(f"{colorize_json(json_d)}") | ||||
| 
 | ||||
|     trio.run(list_services) | ||||
|     tractor.run( | ||||
|         list_services, | ||||
|         name='service_query', | ||||
|         loglevel=config['loglevel'] if tl else None, | ||||
|         arbiter_addr=_tractor_kwargs['arbiter_addr'], | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def _load_clis() -> None: | ||||
|     # from ..service import elastic  # noqa | ||||
|     from ..data import marketstore  # noqa | ||||
|     from ..data import cli  # noqa | ||||
|     from ..brokers import cli  # noqa | ||||
|     from ..ui import cli  # noqa | ||||
|     from ..watchlists import cli  # noqa | ||||
| 
 | ||||
|     # typer implemented | ||||
|     from ..storage import cli  # noqa | ||||
|     from ..accounting import cli  # noqa | ||||
| 
 | ||||
| 
 | ||||
| # load downstream cli modules | ||||
| _load_clis() | ||||
|  |  | |||
							
								
								
									
										250
									
								
								piker/config.py
								
								
								
								
							
							
						
						
									
										250
									
								
								piker/config.py
								
								
								
								
							|  | @ -15,42 +15,28 @@ | |||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Platform configuration (files) mgmt. | ||||
| Broker configuration mgmt. | ||||
| 
 | ||||
| """ | ||||
| import platform | ||||
| import sys | ||||
| import os | ||||
| from os import path | ||||
| from os.path import dirname | ||||
| import shutil | ||||
| from typing import ( | ||||
|     Callable, | ||||
|     MutableMapping, | ||||
| ) | ||||
| from pathlib import Path | ||||
| from typing import Optional | ||||
| 
 | ||||
| from bidict import bidict | ||||
| import tomlkit | ||||
| try: | ||||
|     import tomllib | ||||
| except ModuleNotFoundError: | ||||
|     import tomli as tomllib | ||||
| 
 | ||||
| import toml | ||||
| 
 | ||||
| from .log import get_logger | ||||
| 
 | ||||
| log = get_logger('broker-config') | ||||
| 
 | ||||
| 
 | ||||
| # XXX NOTE: taken from ``click`` since apparently they have some | ||||
| # taken from ``click`` since apparently they have some | ||||
| # super weirdness with sigint and sudo..no clue | ||||
| # we're probably going to slowly just modify it to our own version over | ||||
| # time.. | ||||
| def get_app_dir( | ||||
|     app_name: str, | ||||
|     roaming: bool = True, | ||||
|     force_posix: bool = False, | ||||
| 
 | ||||
| ) -> str: | ||||
| def get_app_dir(app_name, roaming=True, force_posix=False): | ||||
|     r"""Returns the config folder for the application.  The default behavior | ||||
|     is to return whatever is most appropriate for the operating system. | ||||
| 
 | ||||
|  | @ -89,31 +75,7 @@ def get_app_dir( | |||
|     def _posixify(name): | ||||
|         return "-".join(name.split()).lower() | ||||
| 
 | ||||
|     # NOTE: for testing with `pytest` we leverage the `tmp_dir` | ||||
|     # fixture to generate (and clean up) a test-request-specific | ||||
|     # directory for isolated configuration files such that, | ||||
|     # - multiple tests can run (possibly in parallel) without data races | ||||
|     #   on the config state, | ||||
|     # - we don't need to ever worry about leaking configs into the | ||||
|     #   system thus avoiding needing to manage config cleaup fixtures or | ||||
|     #   other bothers (since obviously `tmp_dir` cleans up after itself). | ||||
|     # | ||||
|     # In order to "pass down" the test dir path to all (sub-)actors in | ||||
|     # the actor tree we preload the root actor's runtime vars state (an | ||||
|     # internal mechanism for inheriting state down an actor tree in | ||||
|     # `tractor`) with the testing dir and check for it whenever we | ||||
|     # detect `pytest` is being used (which it isn't under normal | ||||
|     # operation). | ||||
|     # if "pytest" in sys.modules: | ||||
|     #     import tractor | ||||
|     #     actor = tractor.current_actor(err_on_no_runtime=False) | ||||
|     #     if actor:  # runtime is up | ||||
|     #         rvs = tractor._state._runtime_vars | ||||
|     #         import pdbp; pdbp.set_trace() | ||||
|     #         testdirpath = Path(rvs['piker_vars']['piker_test_dir']) | ||||
|     #         assert testdirpath.exists(), 'piker test harness might be borked!?' | ||||
|     #         app_name = str(testdirpath) | ||||
| 
 | ||||
|     # if WIN: | ||||
|     if platform.system() == 'Windows': | ||||
|         key = "APPDATA" if roaming else "LOCALAPPDATA" | ||||
|         folder = os.environ.get(key) | ||||
|  | @ -133,38 +95,29 @@ def get_app_dir( | |||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| _click_config_dir: Path = Path(get_app_dir('piker')) | ||||
| _config_dir: Path = _click_config_dir | ||||
| _config_dir = _click_config_dir = get_app_dir('piker') | ||||
| _parent_user = os.environ.get('SUDO_USER') | ||||
| 
 | ||||
| # NOTE: when using `sudo` we attempt to determine the non-root user | ||||
| # and still use their normal config dir. | ||||
| if ( | ||||
|     (_parent_user := os.environ.get('SUDO_USER')) | ||||
|     and | ||||
|     _parent_user != 'root' | ||||
| ): | ||||
|     non_root_user_dir = Path( | ||||
|         os.path.expanduser(f'~{_parent_user}') | ||||
| if _parent_user: | ||||
|     non_root_user_dir = os.path.expanduser( | ||||
|         f'~{_parent_user}' | ||||
|     ) | ||||
|     root: str = 'root' | ||||
|     _ccds: str = str(_click_config_dir)  # click config dir as string | ||||
|     i_tail: int = int(_ccds.rfind(root) + len(root)) | ||||
|     root = 'root' | ||||
|     _config_dir = ( | ||||
|         non_root_user_dir | ||||
|         / | ||||
|         Path(_ccds[i_tail+1:])  # +1 to capture trailing '/' | ||||
|         non_root_user_dir + | ||||
|         _click_config_dir[ | ||||
|             _click_config_dir.rfind(root) + len(root): | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| _conf_names: set[str] = { | ||||
|     'conf',  # god config | ||||
|     'brokers',  # sec backend deatz | ||||
|     'watchlists',  # (user defined) market lists | ||||
|     'brokers', | ||||
|     'pps', | ||||
|     'trades', | ||||
|     'watchlists', | ||||
| } | ||||
| 
 | ||||
| # TODO: probably drop all this super legacy, questrade specific, | ||||
| # config stuff XD ? | ||||
| _watchlists_data_path: Path = _config_dir / Path('watchlists.json') | ||||
| _watchlists_data_path = os.path.join(_config_dir, 'watchlists.json') | ||||
| _context_defaults = dict( | ||||
|     default_map={ | ||||
|         # Questrade specific quote poll rates | ||||
|  | @ -178,14 +131,6 @@ _context_defaults = dict( | |||
| ) | ||||
| 
 | ||||
| 
 | ||||
| class ConfigurationError(Exception): | ||||
|     'Misconfigured settings, likely in a TOML file.' | ||||
| 
 | ||||
| 
 | ||||
| class NoSignature(ConfigurationError): | ||||
|     'No credentials setup for broker backend!' | ||||
| 
 | ||||
| 
 | ||||
| def _override_config_dir( | ||||
|     path: str | ||||
| ) -> None: | ||||
|  | @ -200,19 +145,10 @@ def _conf_fn_w_ext( | |||
|     return f'{name}.toml' | ||||
| 
 | ||||
| 
 | ||||
| def get_conf_dir() -> Path: | ||||
|     ''' | ||||
|     Return the user configuration directory ``Path`` | ||||
|     on the local filesystem. | ||||
| 
 | ||||
|     ''' | ||||
|     return _config_dir | ||||
| 
 | ||||
| 
 | ||||
| def get_conf_path( | ||||
|     conf_name: str = 'brokers', | ||||
| 
 | ||||
| ) -> Path: | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Return the top-level default config path normally under | ||||
|     ``~/.config/piker`` on linux for a given ``conf_name``, the config | ||||
|  | @ -220,6 +156,7 @@ def get_conf_path( | |||
| 
 | ||||
|     Contains files such as: | ||||
|     - brokers.toml | ||||
|     - pp.toml | ||||
|     - watchlists.toml | ||||
| 
 | ||||
|     # maybe coming soon ;) | ||||
|  | @ -227,102 +164,64 @@ def get_conf_path( | |||
|     - strats.toml | ||||
| 
 | ||||
|     ''' | ||||
|     if 'account.' not in conf_name: | ||||
|         assert str(conf_name) in _conf_names | ||||
| 
 | ||||
|     assert conf_name in _conf_names | ||||
|     fn = _conf_fn_w_ext(conf_name) | ||||
|     return _config_dir / Path(fn) | ||||
|     return os.path.join( | ||||
|         _config_dir, | ||||
|         fn, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def repodir() -> Path: | ||||
| def repodir(): | ||||
|     ''' | ||||
|     Return the abspath as ``Path`` to the git repo's root dir. | ||||
|     Return the abspath to the repo directory. | ||||
| 
 | ||||
|     ''' | ||||
|     repodir: Path = Path(__file__).absolute().parent.parent | ||||
|     confdir: Path = repodir / 'config' | ||||
| 
 | ||||
|     if not confdir.is_dir(): | ||||
|         # prolly inside stupid GH actions CI.. | ||||
|         repodir: Path = Path(os.environ.get('GITHUB_WORKSPACE')) | ||||
|         confdir: Path = repodir / 'config' | ||||
| 
 | ||||
|     assert confdir.is_dir(), f'{confdir} DNE, {repodir} is likely incorrect!' | ||||
|     return repodir | ||||
|     dirpath = path.abspath( | ||||
|         # we're 3 levels down in **this** module file | ||||
|         dirname(dirname(os.path.realpath(__file__))) | ||||
|     ) | ||||
|     return dirpath | ||||
| 
 | ||||
| 
 | ||||
| def load( | ||||
|     # NOTE: always appended with .toml suffix | ||||
|     conf_name: str = 'conf', | ||||
|     path: Path | None = None, | ||||
| 
 | ||||
|     decode: Callable[ | ||||
|         [str | bytes,], | ||||
|         MutableMapping, | ||||
|     ] = tomllib.loads, | ||||
| 
 | ||||
|     touch_if_dne: bool = False, | ||||
|     conf_name: str = 'brokers', | ||||
|     path: str = None, | ||||
| 
 | ||||
|     **tomlkws, | ||||
| 
 | ||||
| ) -> tuple[dict, Path]: | ||||
| ) -> (dict, str): | ||||
|     ''' | ||||
|     Load config file by name. | ||||
| 
 | ||||
|     If desired config is not in the top level piker-user config path then | ||||
|     pass the ``path: Path`` explicitly. | ||||
| 
 | ||||
|     ''' | ||||
|     # create the $HOME/.config/piker dir if dne | ||||
|     if not _config_dir.is_dir(): | ||||
|         _config_dir.mkdir( | ||||
|             parents=True, | ||||
|             exist_ok=True, | ||||
|     path = path or get_conf_path(conf_name) | ||||
| 
 | ||||
|     if not os.path.isfile(path): | ||||
|         fn = _conf_fn_w_ext(conf_name) | ||||
| 
 | ||||
|         template = os.path.join( | ||||
|             repodir(), | ||||
|             'config', | ||||
|             fn | ||||
|         ) | ||||
| 
 | ||||
|     path_provided: bool = path is not None | ||||
|     path: Path = path or get_conf_path(conf_name) | ||||
| 
 | ||||
|     if ( | ||||
|         not path.is_file() | ||||
|         and touch_if_dne | ||||
|     ): | ||||
|         # only do a template if no path provided, | ||||
|         # just touch an empty file with same name. | ||||
|         if path_provided: | ||||
|             with path.open(mode='x'): | ||||
|                 pass | ||||
| 
 | ||||
|         # try to copy in a template config to the user's dir if one | ||||
|         # exists. | ||||
|         # try to copy in a template config to the user's directory | ||||
|         # if one exists. | ||||
|         if os.path.isfile(template): | ||||
|             shutil.copyfile(template, path) | ||||
|         else: | ||||
|             fn: str = _conf_fn_w_ext(conf_name) | ||||
|             template: Path = repodir() / 'config' / fn | ||||
|             if template.is_file(): | ||||
|                 shutil.copyfile(template, path) | ||||
| 
 | ||||
|             elif fn and template: | ||||
|                 assert template.is_file(), f'{template} is not a file!?' | ||||
| 
 | ||||
|             assert path.is_file(), f'Config file {path} not created!?' | ||||
| 
 | ||||
|     with path.open(mode='r') as fp: | ||||
|         config: dict = decode( | ||||
|             fp.read(), | ||||
|             **tomlkws, | ||||
|         ) | ||||
|             with open(path, 'w'): | ||||
|                 pass  # touch | ||||
| 
 | ||||
|     config = toml.load(path, **tomlkws) | ||||
|     log.debug(f"Read config file {path}") | ||||
|     return config, path | ||||
| 
 | ||||
| 
 | ||||
| def write( | ||||
|     config: dict,  # toml config as dict | ||||
| 
 | ||||
|     name: str | None = None, | ||||
|     path: Path | None = None, | ||||
|     fail_empty: bool = True, | ||||
| 
 | ||||
|     name: str = 'brokers', | ||||
|     path: str = None, | ||||
|     **toml_kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|  | @ -332,41 +231,34 @@ def write( | |||
|     Create a ``brokers.ini`` file if one does not exist. | ||||
| 
 | ||||
|     ''' | ||||
|     if name: | ||||
|         path: Path = path or get_conf_path(name) | ||||
|         dirname: Path = path.parent | ||||
|         if not dirname.is_dir(): | ||||
|             log.debug(f"Creating config dir {_config_dir}") | ||||
|             dirname.mkdir() | ||||
|     path = path or get_conf_path(name) | ||||
|     dirname = os.path.dirname(path) | ||||
|     if not os.path.isdir(dirname): | ||||
|         log.debug(f"Creating config dir {_config_dir}") | ||||
|         os.makedirs(dirname) | ||||
| 
 | ||||
|     if ( | ||||
|         not config | ||||
|         and fail_empty | ||||
|     ): | ||||
|     if not config: | ||||
|         raise ValueError( | ||||
|             "Watch out you're trying to write a blank config!" | ||||
|         ) | ||||
|             "Watch out you're trying to write a blank config!") | ||||
| 
 | ||||
|     log.debug( | ||||
|         f"Writing config `{name}` file to:\n" | ||||
|         f"{path}" | ||||
|     ) | ||||
|     with path.open(mode='w') as fp: | ||||
|         return tomlkit.dump(  # preserve style on write B) | ||||
|     with open(path, 'w') as cf: | ||||
|         return toml.dump( | ||||
|             config, | ||||
|             fp, | ||||
|             cf, | ||||
|             **toml_kwargs, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def load_accounts( | ||||
|     providers: list[str] | None = None | ||||
|     providers: Optional[list[str]] = None | ||||
| 
 | ||||
| ) -> bidict[str, str | None]: | ||||
| ) -> bidict[str, Optional[str]]: | ||||
| 
 | ||||
|     conf, path = load( | ||||
|         conf_name='brokers', | ||||
|     ) | ||||
|     conf, path = load() | ||||
|     accounts = bidict() | ||||
|     for provider_name, section in conf.items(): | ||||
|         accounts_section = section.get('accounts') | ||||
|  |  | |||
|  | @ -22,7 +22,7 @@ and storing data from your brokers as well as | |||
| sharing live streams over a network. | ||||
| 
 | ||||
| """ | ||||
| from .ticktools import iterticks | ||||
| from ._normalize import iterticks | ||||
| from ._sharedmem import ( | ||||
|     maybe_open_shm_array, | ||||
|     attach_shm_array, | ||||
|  | @ -30,42 +30,19 @@ from ._sharedmem import ( | |||
|     get_shm_token, | ||||
|     ShmArray, | ||||
| ) | ||||
| from ._source import ( | ||||
|     def_iohlcv_fields, | ||||
|     def_ohlcv_fields, | ||||
| ) | ||||
| from .feed import ( | ||||
|     Feed, | ||||
|     open_feed, | ||||
|     _setup_persistent_brokerd, | ||||
| ) | ||||
| from .flows import Flume | ||||
| from ._symcache import ( | ||||
|     SymbologyCache, | ||||
|     open_symcache, | ||||
|     get_symcache, | ||||
|     match_from_pairs, | ||||
| ) | ||||
| from ._sampling import open_sample_stream | ||||
| from ..types import Struct | ||||
| 
 | ||||
| 
 | ||||
| __all__: list[str] = [ | ||||
|     'Flume', | ||||
|     'Feed', | ||||
| __all__ = [ | ||||
|     'open_feed', | ||||
|     'ShmArray', | ||||
|     'iterticks', | ||||
|     'maybe_open_shm_array', | ||||
|     'match_from_pairs', | ||||
|     'attach_shm_array', | ||||
|     'open_shm_array', | ||||
|     'get_shm_token', | ||||
|     'def_iohlcv_fields', | ||||
|     'def_ohlcv_fields', | ||||
|     'open_symcache', | ||||
|     'open_sample_stream', | ||||
|     'get_symcache', | ||||
|     'Struct', | ||||
|     'SymbologyCache', | ||||
|     'types', | ||||
|     '_setup_persistent_brokerd', | ||||
| ] | ||||
|  |  | |||
|  | @ -15,13 +15,9 @@ | |||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Supervisor for ``docker`` with included async and SC wrapping to | ||||
| ensure a cancellable container lifetime system. | ||||
| Supervisor for docker with included specific-image service helpers. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from collections import ChainMap | ||||
| from functools import partial | ||||
| import os | ||||
| import time | ||||
| from typing import ( | ||||
|  | @ -43,19 +39,13 @@ from docker.errors import ( | |||
|     APIError, | ||||
|     # ContainerError, | ||||
| ) | ||||
| import requests | ||||
| from requests.exceptions import ( | ||||
|     ConnectionError, | ||||
|     ReadTimeout, | ||||
| ) | ||||
| from requests.exceptions import ConnectionError, ReadTimeout | ||||
| 
 | ||||
| from ._mngr import Services | ||||
| from ._util import ( | ||||
|     log,  # sub-sys logger | ||||
|     get_console_log, | ||||
| ) | ||||
| from ..log import get_logger, get_console_log | ||||
| from .. import config | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class DockerNotStarted(Exception): | ||||
|     'Prolly you dint start da daemon bruh' | ||||
|  | @ -130,19 +120,8 @@ class Container: | |||
| 
 | ||||
|     async def process_logs_until( | ||||
|         self, | ||||
|         log_msg_key: str, | ||||
| 
 | ||||
|         # this is a predicate func for matching log msgs emitted by the | ||||
|         # underlying containerized app | ||||
|         patt_matcher: Callable[[str], bool], | ||||
| 
 | ||||
|         # XXX WARNING XXX: do not touch this sleep value unless | ||||
|         # you know what you are doing! the value is critical to | ||||
|         # making sure the caller code inside the startup context | ||||
|         # does not timeout BEFORE we receive a match on the | ||||
|         # ``patt_matcher()`` predicate above. | ||||
|         checkpoint_period: float = 0.001, | ||||
| 
 | ||||
|         patt: str, | ||||
|         bp_on_msg: bool = False, | ||||
|     ) -> bool: | ||||
|         ''' | ||||
|         Attempt to capture container log messages and relay through our | ||||
|  | @ -153,15 +132,6 @@ class Container: | |||
| 
 | ||||
|         while True: | ||||
|             logs = self.cntr.logs() | ||||
|             try: | ||||
|                 logs = self.cntr.logs() | ||||
|             except ( | ||||
|                 docker.errors.NotFound, | ||||
|                 docker.errors.APIError | ||||
|             ): | ||||
|                 log.exception('Failed to parse logs?') | ||||
|                 return False | ||||
| 
 | ||||
|             entries = logs.decode().split('\n') | ||||
|             for entry in entries: | ||||
| 
 | ||||
|  | @ -169,48 +139,34 @@ class Container: | |||
|                 if not entry: | ||||
|                     continue | ||||
| 
 | ||||
|                 entry = entry.strip() | ||||
|                 try: | ||||
|                     record = json.loads(entry) | ||||
|                     msg = record[log_msg_key] | ||||
|                     level = record['level'] | ||||
| 
 | ||||
|                     record = json.loads(entry.strip()) | ||||
|                 except json.JSONDecodeError: | ||||
|                     msg = entry | ||||
|                     level = 'error' | ||||
|                     if 'Error' in entry: | ||||
|                         raise RuntimeError(entry) | ||||
|                     raise | ||||
| 
 | ||||
|                 # TODO: do we need a more general mechanism | ||||
|                 # for these kinda of "log record entries"? | ||||
|                 # if 'Error' in entry: | ||||
|                 #     raise RuntimeError(entry) | ||||
| 
 | ||||
|                 if ( | ||||
|                     msg | ||||
|                     and entry not in seen_so_far | ||||
|                 ): | ||||
|                 msg = record['msg'] | ||||
|                 level = record['level'] | ||||
|                 if msg and entry not in seen_so_far: | ||||
|                     seen_so_far.add(entry) | ||||
|                     getattr( | ||||
|                         log, | ||||
|                         level.lower(), | ||||
|                         log.error | ||||
|                     )(f'{msg}') | ||||
|                     if bp_on_msg: | ||||
|                         await tractor.breakpoint() | ||||
| 
 | ||||
|                     if level == 'fatal': | ||||
|                     getattr(log, level, log.error)(f'{msg}') | ||||
| 
 | ||||
|                     # print(f'level: {level}') | ||||
|                     if level in ('error', 'fatal'): | ||||
|                         raise ApplicationLogError(msg) | ||||
| 
 | ||||
|                 if await patt_matcher(msg): | ||||
|                 if patt in msg: | ||||
|                     return True | ||||
| 
 | ||||
|                 # do a checkpoint so we don't block if cancelled B) | ||||
|                 await trio.sleep(checkpoint_period) | ||||
|                 await trio.sleep(0.01) | ||||
| 
 | ||||
|         return False | ||||
| 
 | ||||
|     @property | ||||
|     def cuid(self) -> str: | ||||
|         fqcn: str = self.cntr.attrs['Config']['Image'] | ||||
|         return f'{fqcn}[{self.cntr.short_id}]' | ||||
| 
 | ||||
|     def try_signal( | ||||
|         self, | ||||
|         signal: str = 'SIGINT', | ||||
|  | @ -232,12 +188,13 @@ class Container: | |||
| 
 | ||||
|     def hard_kill(self, start: float) -> None: | ||||
|         delay = time.time() - start | ||||
|         log.error( | ||||
|             f'Failed to kill container {self.cntr.id} after {delay}s\n' | ||||
|             'sending SIGKILL..' | ||||
|         ) | ||||
|         # get out the big guns, bc apparently marketstore | ||||
|         # doesn't actually know how to terminate gracefully | ||||
|         # :eyeroll:... | ||||
|         log.error( | ||||
|             f'SIGKILL-ing: {self.cntr.id} after {delay}s\n' | ||||
|         ) | ||||
|         self.try_signal('SIGKILL') | ||||
|         self.cntr.wait( | ||||
|             timeout=3, | ||||
|  | @ -246,51 +203,35 @@ class Container: | |||
| 
 | ||||
|     async def cancel( | ||||
|         self, | ||||
|         log_msg_key: str, | ||||
|         stop_predicate: Callable[[str], bool], | ||||
| 
 | ||||
|         stop_msg: str, | ||||
|         hard_kill: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Attempt to cancel this container gracefully, fail over to | ||||
|         a hard kill on timeout. | ||||
| 
 | ||||
|         ''' | ||||
|         cid = self.cntr.id | ||||
| 
 | ||||
|         # first try a graceful cancel | ||||
|         log.cancel( | ||||
|             f'SIGINT cancelling container: {self.cuid}\n' | ||||
|             'waiting on stop predicate...' | ||||
|             f'SIGINT cancelling container: {cid}\n' | ||||
|             f'waiting on stop msg: "{stop_msg}"' | ||||
|         ) | ||||
|         self.try_signal('SIGINT') | ||||
| 
 | ||||
|         start = time.time() | ||||
|         for _ in range(6): | ||||
|         for _ in range(30): | ||||
| 
 | ||||
|             with trio.move_on_after(1) as cs: | ||||
|                 log.cancel( | ||||
|                     'polling for CNTR logs for {stop_predicate}..' | ||||
|                 ) | ||||
|             with trio.move_on_after(0.5) as cs: | ||||
|                 cs.shield = True | ||||
|                 log.cancel('polling for CNTR logs...') | ||||
| 
 | ||||
|                 try: | ||||
|                     await self.process_logs_until( | ||||
|                         log_msg_key, | ||||
|                         stop_predicate, | ||||
|                     ) | ||||
|                     await self.process_logs_until(stop_msg) | ||||
|                 except ApplicationLogError: | ||||
|                     hard_kill = True | ||||
|                 else: | ||||
|                     # if we aren't cancelled on above checkpoint then we | ||||
|                     # assume we read the expected stop msg and | ||||
|                     # terminated. | ||||
|                     break | ||||
| 
 | ||||
|             if cs.cancelled_caught: | ||||
|                 # on timeout just try a hard kill after | ||||
|                 # a quick container sync-wait. | ||||
|                 hard_kill = True | ||||
|                 # if we aren't cancelled on above checkpoint then we | ||||
|                 # assume we read the expected stop msg and terminated. | ||||
|                 break | ||||
| 
 | ||||
|             try: | ||||
|                 log.info(f'Polling for container shutdown:\n{cid}') | ||||
|  | @ -313,16 +254,9 @@ class Container: | |||
|             except ( | ||||
|                 docker.errors.APIError, | ||||
|                 ConnectionError, | ||||
|                 requests.exceptions.ConnectionError, | ||||
|                 trio.Cancelled, | ||||
|             ): | ||||
|                 log.exception('Docker connection failure') | ||||
|                 self.hard_kill(start) | ||||
|                 raise | ||||
| 
 | ||||
|             except trio.Cancelled: | ||||
|                 log.exception('trio cancelled...') | ||||
|                 self.hard_kill(start) | ||||
|         else: | ||||
|             hard_kill = True | ||||
| 
 | ||||
|  | @ -336,13 +270,11 @@ class Container: | |||
| async def open_ahabd( | ||||
|     ctx: tractor.Context, | ||||
|     endpoint: str,  # ns-pointer str-msg-type | ||||
|     loglevel: str | None = None, | ||||
| 
 | ||||
|     **ep_kwargs, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     log = get_console_log(loglevel or 'cancel') | ||||
|     get_console_log('info', name=__name__) | ||||
| 
 | ||||
|     async with open_docker() as client: | ||||
| 
 | ||||
|  | @ -353,84 +285,26 @@ async def open_ahabd( | |||
|         ( | ||||
|             dcntr, | ||||
|             cntr_config, | ||||
|             start_pred, | ||||
|             stop_pred, | ||||
|         ) = ep_func(client, **ep_kwargs) | ||||
|             start_msg, | ||||
|             stop_msg, | ||||
|         ) = ep_func(client) | ||||
|         cntr = Container(dcntr) | ||||
| 
 | ||||
|         conf: ChainMap[str, Any] = ChainMap( | ||||
|         with trio.move_on_after(1): | ||||
|             found = await cntr.process_logs_until(start_msg) | ||||
| 
 | ||||
|             # container specific | ||||
|             if not found and cntr not in client.containers.list(): | ||||
|                 raise RuntimeError( | ||||
|                     'Failed to start `marketstore` check logs deats' | ||||
|                 ) | ||||
| 
 | ||||
|         await ctx.started(( | ||||
|             cntr.cntr.id, | ||||
|             os.getpid(), | ||||
|             cntr_config, | ||||
| 
 | ||||
|             # defaults | ||||
|             { | ||||
|                 # startup time limit which is the max the supervisor | ||||
|                 # will wait for the container to be registered in | ||||
|                 # ``client.containers.list()`` | ||||
|                 'startup_timeout': 1.0, | ||||
| 
 | ||||
|                 # how fast to poll for the starup predicate by sleeping | ||||
|                 # this amount incrementally thus yielding to the | ||||
|                 # ``trio`` scheduler on during sync polling execution. | ||||
|                 'startup_query_period': 0.001, | ||||
| 
 | ||||
|                 # str-key value expected to contain log message body-contents | ||||
|                 # when read using: | ||||
|                 # ``json.loads(entry for entry in DockerContainer.logs())`` | ||||
|                 'log_msg_key': 'msg', | ||||
| 
 | ||||
| 
 | ||||
|                 # startup sync func, like `Nursery.started()` | ||||
|                 'started_afunc': None, | ||||
|             }, | ||||
|         ) | ||||
|         )) | ||||
| 
 | ||||
|         try: | ||||
|             with trio.move_on_after(conf['startup_timeout']) as cs: | ||||
|                 async with trio.open_nursery() as tn: | ||||
|                     tn.start_soon( | ||||
|                         partial( | ||||
|                             cntr.process_logs_until, | ||||
|                             log_msg_key=conf['log_msg_key'], | ||||
|                             patt_matcher=start_pred, | ||||
|                             checkpoint_period=conf['startup_query_period'], | ||||
|                         ) | ||||
|                     ) | ||||
| 
 | ||||
|                     # optional blocking routine | ||||
|                     started = conf['started_afunc'] | ||||
|                     if started: | ||||
|                         await started() | ||||
| 
 | ||||
|                     # poll for container startup or timeout | ||||
|                     while not cs.cancel_called: | ||||
|                         if dcntr in client.containers.list(): | ||||
|                             break | ||||
| 
 | ||||
|                         await trio.sleep(conf['startup_query_period']) | ||||
| 
 | ||||
|                     # sync with remote caller actor-task but allow log | ||||
|                     # processing to continue running in bg. | ||||
|                     await ctx.started(( | ||||
|                         cntr.cntr.id, | ||||
|                         os.getpid(), | ||||
|                         cntr_config, | ||||
|                     )) | ||||
| 
 | ||||
|                 # XXX: if we timeout on finding the "startup msg" we | ||||
|                 # expect then we want to FOR SURE raise an error | ||||
|                 # upwards! | ||||
|                 if cs.cancelled_caught: | ||||
|                     # if dcntr not in client.containers.list(): | ||||
|                     for entry in cntr.seen_so_far: | ||||
|                         log.info(entry) | ||||
| 
 | ||||
|                     raise DockerNotStarted( | ||||
|                         f'Failed to start container: {cntr.cuid}\n' | ||||
|                         f'due to timeout={conf["startup_timeout"]}s\n\n' | ||||
|                         "check ur container's logs!" | ||||
|                     ) | ||||
| 
 | ||||
|             # TODO: we might eventually want a proxy-style msg-prot here | ||||
|             # to allow remote control of containers without needing | ||||
|  | @ -438,32 +312,14 @@ async def open_ahabd( | |||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|         finally: | ||||
|             # TODO: ensure loglevel can be set and teardown logs are | ||||
|             # reported if possible on error or cancel.. | ||||
|             # XXX WARNING: currently shielding here can result in hangs | ||||
|             # on ctl-c from user.. ideally we can avoid a cancel getting | ||||
|             # consumed and not propagating whilst still doing teardown | ||||
|             # logging.. | ||||
|             # needed? | ||||
|             with trio.CancelScope(shield=True): | ||||
|                 await cntr.cancel( | ||||
|                     log_msg_key=conf['log_msg_key'], | ||||
|                     stop_predicate=stop_pred, | ||||
|                 ) | ||||
|                 await cntr.cancel(stop_msg) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def start_ahab_service( | ||||
|     services: Services, | ||||
| async def start_ahab( | ||||
|     service_name: str, | ||||
| 
 | ||||
|     # endpoint config passed as **kwargs | ||||
|     endpoint: Callable[docker.DockerClient, DockerContainer], | ||||
|     ep_kwargs: dict, | ||||
|     loglevel: str | None = 'cancel', | ||||
| 
 | ||||
|     # supervisor config | ||||
|     drop_root_perms: bool = True, | ||||
| 
 | ||||
|     task_status: TaskStatus[ | ||||
|         tuple[ | ||||
|             trio.Event, | ||||
|  | @ -482,17 +338,15 @@ async def start_ahab_service( | |||
|     is started. | ||||
| 
 | ||||
|     ''' | ||||
|     # global log | ||||
|     log = get_console_log(loglevel or 'cancel') | ||||
| 
 | ||||
|     cn_ready = trio.Event() | ||||
|     try: | ||||
|         async with tractor.open_nursery() as an: | ||||
|         async with tractor.open_nursery( | ||||
|             loglevel='runtime', | ||||
|         ) as tn: | ||||
| 
 | ||||
|             portal = await an.start_actor( | ||||
|             portal = await tn.start_actor( | ||||
|                 service_name, | ||||
|                 enable_modules=[__name__], | ||||
|                 loglevel=loglevel, | ||||
|                 enable_modules=[__name__] | ||||
|             ) | ||||
| 
 | ||||
|             # TODO: we have issues with this on teardown | ||||
|  | @ -502,10 +356,7 @@ async def start_ahab_service( | |||
| 
 | ||||
|             # de-escalate root perms to the original user | ||||
|             # after the docker supervisor actor is spawned. | ||||
|             if ( | ||||
|                 drop_root_perms | ||||
|                 and config._parent_user | ||||
|             ): | ||||
|             if config._parent_user: | ||||
|                 import pwd | ||||
|                 os.setuid( | ||||
|                     pwd.getpwnam( | ||||
|  | @ -513,28 +364,20 @@ async def start_ahab_service( | |||
|                     )[2]  # named user's uid | ||||
|                 ) | ||||
| 
 | ||||
|             cs, first = await services.start_service_task( | ||||
|                 name=service_name, | ||||
|                 portal=portal, | ||||
| 
 | ||||
|                 # rest: endpoint inputs | ||||
|                 target=open_ahabd, | ||||
|             async with portal.open_context( | ||||
|                 open_ahabd, | ||||
|                 endpoint=str(NamespacePath.from_ref(endpoint)), | ||||
|                 loglevel='cancel', | ||||
|                 **ep_kwargs, | ||||
|             ) | ||||
|             ) as (ctx, first): | ||||
| 
 | ||||
|             cid, pid, cntr_config = first | ||||
|                 cid, pid, cntr_config = first | ||||
| 
 | ||||
|             try: | ||||
|                 yield ( | ||||
|                 task_status.started(( | ||||
|                     cn_ready, | ||||
|                     cntr_config, | ||||
|                     (cid, pid), | ||||
|                 ) | ||||
|             finally: | ||||
|                 log.info(f'Cancelling ahab service `{service_name}`') | ||||
|                 await services.cancel_service(service_name) | ||||
|                 )) | ||||
| 
 | ||||
|                 await trio.sleep_forever() | ||||
| 
 | ||||
|     # since we demoted root perms in this parent | ||||
|     # we'll get a perms error on proc cleanup in | ||||
|  | @ -1,838 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| ''' | ||||
| Pre-(path)-graphics formatted x/y nd/1d rendering subsystem. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from typing import ( | ||||
|     Optional, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import msgspec | ||||
| from msgspec import field | ||||
| import numpy as np | ||||
| from numpy.lib import recfunctions as rfn | ||||
| 
 | ||||
| from ._sharedmem import ( | ||||
|     ShmArray, | ||||
| ) | ||||
| from ._pathops import ( | ||||
|     path_arrays_from_ohlc, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._dataviz import ( | ||||
|         Viz, | ||||
|     ) | ||||
|     from piker.toolz import Profiler | ||||
| 
 | ||||
| # default gap between bars: "bar gap multiplier" | ||||
| # - 0.5 is no overlap between OC arms, | ||||
| # - 1.0 is full overlap on each neighbor sample | ||||
| BGM: float = 0.16 | ||||
| 
 | ||||
| 
 | ||||
| class IncrementalFormatter(msgspec.Struct): | ||||
|     ''' | ||||
|     Incrementally updating, pre-path-graphics tracking, formatter. | ||||
| 
 | ||||
|     Allows tracking source data state in an updateable pre-graphics | ||||
|     ``np.ndarray`` format (in local process memory) as well as | ||||
|     incrementally rendering from that format **to** 1d x/y for path | ||||
|     generation using ``pg.functions.arrayToQPath()``. | ||||
| 
 | ||||
|     ''' | ||||
|     shm: ShmArray | ||||
|     viz: Viz | ||||
| 
 | ||||
|     # the value to be multiplied any any index into the x/y_1d arrays | ||||
|     # given the input index is based on the original source data array. | ||||
|     flat_index_ratio: float = 1 | ||||
| 
 | ||||
|     @property | ||||
|     def index_field(self) -> 'str': | ||||
|         ''' | ||||
|         Value (``str``) used to look up the "index series" from the | ||||
|         underlying source ``numpy`` struct-array; delegate directly to | ||||
|         the managing ``Viz``. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.viz.index_field | ||||
| 
 | ||||
|     # Incrementally updated xy ndarray formatted data, a pre-1d | ||||
|     # format which is updated and cached independently of the final | ||||
|     # pre-graphics-path 1d format. | ||||
|     x_nd: Optional[np.ndarray] = None | ||||
|     y_nd: Optional[np.ndarray] = None | ||||
| 
 | ||||
|     @property | ||||
|     def xy_nd(self) -> tuple[np.ndarray, np.ndarray]: | ||||
|         return ( | ||||
|             self.x_nd[self.xy_slice], | ||||
|             self.y_nd[self.xy_slice], | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def xy_slice(self) -> slice: | ||||
|         return slice( | ||||
|             self.xy_nd_start, | ||||
|             self.xy_nd_stop, | ||||
|         ) | ||||
| 
 | ||||
|     # indexes which slice into the above arrays (which are allocated | ||||
|     # based on source data shm input size) and allow retrieving | ||||
|     # incrementally updated data. | ||||
|     xy_nd_start: int | None = None | ||||
|     xy_nd_stop: int | None = None | ||||
| 
 | ||||
|     # TODO: eventually incrementally update 1d-pre-graphics path data? | ||||
|     x_1d: np.ndarray | None = None | ||||
|     y_1d: np.ndarray | None = None | ||||
| 
 | ||||
|     # incremental view-change state(s) tracking | ||||
|     _last_vr: tuple[float, float] | None = None | ||||
|     _last_ivdr: tuple[float, float] | None = None | ||||
| 
 | ||||
|     @property | ||||
|     def index_step_size(self) -> float: | ||||
|         ''' | ||||
|         Readonly value computed on first ``.diff()`` call. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.viz.index_step() | ||||
| 
 | ||||
|     def diff( | ||||
|         self, | ||||
|         new_read: tuple[np.ndarray], | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         np.ndarray, | ||||
|         np.ndarray, | ||||
|     ]: | ||||
|         # TODO: | ||||
|         # - can the renderer just call ``Viz.read()`` directly? unpack | ||||
|         #   latest source data read | ||||
|         # - eventually maybe we can implement some kind of | ||||
|         #   transform on the ``QPainterPath`` that will more or less | ||||
|         #   detect the diff in "elements" terms? update diff state since | ||||
|         #   we've now rendered paths. | ||||
|         ( | ||||
|             xfirst, | ||||
|             xlast, | ||||
|             array, | ||||
|             ivl, | ||||
|             ivr, | ||||
|             in_view, | ||||
|         ) = new_read | ||||
| 
 | ||||
|         index = array['index'] | ||||
| 
 | ||||
|         # if the first index in the read array is 0 then | ||||
|         # it means the source buffer has bee completely backfilled to | ||||
|         # available space. | ||||
|         src_start = index[0] | ||||
|         src_stop = index[-1] + 1 | ||||
| 
 | ||||
|         # these are the "formatted output data" indices | ||||
|         # for the pre-graphics arrays. | ||||
|         nd_start = self.xy_nd_start | ||||
|         nd_stop = self.xy_nd_stop | ||||
| 
 | ||||
|         if ( | ||||
|             nd_start is None | ||||
|         ): | ||||
|             assert nd_stop is None | ||||
| 
 | ||||
|             # setup to do a prepend of all existing src history | ||||
|             nd_start = self.xy_nd_start = src_stop | ||||
|             # set us in a zero-to-append state | ||||
|             nd_stop = self.xy_nd_stop = src_stop | ||||
| 
 | ||||
|         # compute the length diffs between the first/last index entry in | ||||
|         # the input data and the last indexes we have on record from the | ||||
|         # last time we updated the curve index. | ||||
|         prepend_length = int(nd_start - src_start) | ||||
|         append_length = int(src_stop - nd_stop) | ||||
| 
 | ||||
|         # blah blah blah | ||||
|         # do diffing for prepend, append and last entry | ||||
|         return ( | ||||
|             slice(src_start, nd_start), | ||||
|             prepend_length, | ||||
|             append_length, | ||||
|             slice(nd_stop, src_stop), | ||||
|         ) | ||||
| 
 | ||||
|     def _track_inview_range( | ||||
|         self, | ||||
|         view_range: tuple[int, int], | ||||
| 
 | ||||
|     ) -> bool: | ||||
|         # if a view range is passed, plan to draw the | ||||
|         # source ouput that's "in view" of the chart. | ||||
|         vl, vr = view_range | ||||
|         zoom_or_append = False | ||||
|         last_vr = self._last_vr | ||||
| 
 | ||||
|         # incremental in-view data update. | ||||
|         if last_vr: | ||||
|             lvl, lvr = last_vr  # relative slice indices | ||||
| 
 | ||||
|             # TODO: detecting more specifically the interaction changes | ||||
|             # last_ivr = self._last_ivdr or (vl, vr) | ||||
|             # al, ar = last_ivr  # abs slice indices | ||||
|             # left_change = abs(x_iv[0] - al) >= 1 | ||||
|             # right_change = abs(x_iv[-1] - ar) >= 1 | ||||
| 
 | ||||
|             # likely a zoom/pan view change or data append update | ||||
|             if ( | ||||
|                 (vr - lvr) > 2 | ||||
|                 or vl < lvl | ||||
| 
 | ||||
|                 # append / prepend update | ||||
|                 # we had an append update where the view range | ||||
|                 # didn't change but the data-viewed (shifted) | ||||
|                 # underneath, so we need to redraw. | ||||
|                 # or left_change and right_change and last_vr == view_range | ||||
| 
 | ||||
|                 # not (left_change and right_change) and ivr | ||||
|                 # ( | ||||
|                 # or abs(x_iv[ivr] - livr) > 1 | ||||
|             ): | ||||
|                 zoom_or_append = True | ||||
| 
 | ||||
|         self._last_vr = view_range | ||||
| 
 | ||||
|         return zoom_or_append | ||||
| 
 | ||||
|     def format_to_1d( | ||||
|         self, | ||||
|         new_read: tuple, | ||||
|         array_key: str, | ||||
|         profiler: Profiler, | ||||
| 
 | ||||
|         slice_to_inview: bool = True, | ||||
|         force_full_realloc: bool = False, | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         np.ndarray, | ||||
|         np.ndarray, | ||||
|     ]: | ||||
|         shm = self.shm | ||||
| 
 | ||||
|         ( | ||||
|             _, | ||||
|             _, | ||||
|             array, | ||||
|             ivl, | ||||
|             ivr, | ||||
|             in_view, | ||||
| 
 | ||||
|         ) = new_read | ||||
| 
 | ||||
|         ( | ||||
|             pre_slice, | ||||
|             prepend_len, | ||||
|             append_len, | ||||
|             post_slice, | ||||
|         ) = self.diff(new_read) | ||||
| 
 | ||||
|         # we first need to allocate xy data arrays | ||||
|         # from the source data. | ||||
|         if ( | ||||
|             self.y_nd is None | ||||
|             or force_full_realloc | ||||
|         ): | ||||
|             self.xy_nd_start = shm._first.value | ||||
|             self.xy_nd_stop = shm._last.value | ||||
|             self.x_nd, self.y_nd = self.allocate_xy_nd( | ||||
|                 shm, | ||||
|                 array_key, | ||||
|             ) | ||||
|             profiler('allocated xy history') | ||||
| 
 | ||||
|         # once allocated we do incremental pre/append | ||||
|         # updates from the diff with the source buffer. | ||||
|         else: | ||||
|             if prepend_len: | ||||
| 
 | ||||
|                 self.incr_update_xy_nd( | ||||
|                     shm, | ||||
|                     array_key, | ||||
| 
 | ||||
|                     # this is the pre-sliced, "normally expected" | ||||
|                     # new data that an updater would normally be | ||||
|                     # expected to process, however in some cases (like | ||||
|                     # step curves) the updater routine may want to do | ||||
|                     # the source history-data reading itself, so we pass | ||||
|                     # both here. | ||||
|                     shm._array[pre_slice], | ||||
|                     pre_slice, | ||||
|                     prepend_len, | ||||
| 
 | ||||
|                     self.xy_nd_start, | ||||
|                     self.xy_nd_stop, | ||||
|                     is_append=False, | ||||
|                 ) | ||||
| 
 | ||||
|                 self.xy_nd_start -= prepend_len | ||||
|                 profiler('prepended xy history: {prepend_length}') | ||||
| 
 | ||||
|             if append_len: | ||||
|                 self.incr_update_xy_nd( | ||||
|                     shm, | ||||
|                     array_key, | ||||
| 
 | ||||
|                     shm._array[post_slice], | ||||
|                     post_slice, | ||||
|                     append_len, | ||||
| 
 | ||||
|                     self.xy_nd_start, | ||||
|                     self.xy_nd_stop, | ||||
|                     is_append=True, | ||||
|                 ) | ||||
|                 self.xy_nd_stop += append_len | ||||
|                 profiler('appened xy history: {append_length}') | ||||
|                 # sanity | ||||
|                 # slice_ln = post_slice.stop - post_slice.start | ||||
|                 # assert append_len == slice_ln | ||||
| 
 | ||||
|         view_changed: bool = False | ||||
|         view_range: tuple[int, int] = (ivl, ivr) | ||||
|         if slice_to_inview: | ||||
|             view_changed = self._track_inview_range(view_range) | ||||
|             array = in_view | ||||
|             profiler(f'{self.viz.name} view range slice {view_range}') | ||||
| 
 | ||||
|         # TODO: we need to check if the last-datum-in-view is true and | ||||
|         # if so only slice to the 2nd last datumonly slice to the 2nd | ||||
|         # last datum. | ||||
|         # hist = array[:slice_to_head] | ||||
| 
 | ||||
|         # XXX: WOA WTF TRACTOR DEBUGGING BUGGG | ||||
|         # assert 0 | ||||
| 
 | ||||
|         # xy-path data transform: convert source data to a format | ||||
|         # able to be passed to a `QPainterPath` rendering routine. | ||||
|         if not len(array): | ||||
|             # XXX: this might be why the profiler only has exits? | ||||
|             return | ||||
| 
 | ||||
|         # TODO: hist here should be the pre-sliced | ||||
|         # x/y_data in the case where allocate_xy is | ||||
|         # defined? | ||||
|         x_1d, y_1d, connect = self.format_xy_nd_to_1d( | ||||
|             array, | ||||
|             array_key, | ||||
|             view_range, | ||||
|         ) | ||||
|         # cache/save last 1d outputs for use by other | ||||
|         # readers (eg. `Viz.draw_last_datum()` in the | ||||
|         # only-draw-last-uppx case). | ||||
|         self.x_1d = x_1d | ||||
|         self.y_1d = y_1d | ||||
| 
 | ||||
|         # app_tres = None | ||||
|         # if append_len: | ||||
|         #     appended = array[-append_len-1:slice_to_head] | ||||
|         #     app_tres = self.format_xy_nd_to_1d( | ||||
|         #         appended, | ||||
|         #         array_key, | ||||
|         #         ( | ||||
|         #             view_range[1] - append_len + slice_to_head, | ||||
|         #             view_range[1] | ||||
|         #         ), | ||||
|         #     ) | ||||
|         #     # assert (len(appended) - 1) == append_len | ||||
|         #     # assert len(appended) == append_len | ||||
|         #     print( | ||||
|         #         f'{self.viz.name} APPEND LEN: {append_len}\n' | ||||
|         #         f'{self.viz.name} APPENDED: {appended}\n' | ||||
|         #         f'{self.viz.name} app_tres: {app_tres}\n' | ||||
|         #     ) | ||||
| 
 | ||||
|         # update the last "in view data range" | ||||
|         if len(x_1d): | ||||
|             self._last_ivdr = x_1d[0], x_1d[-1] | ||||
| 
 | ||||
|         profiler('.format_to_1d()') | ||||
| 
 | ||||
|         return ( | ||||
|             x_1d, | ||||
|             y_1d, | ||||
|             connect, | ||||
|             prepend_len, | ||||
|             append_len, | ||||
|             view_changed, | ||||
|             # app_tres, | ||||
|         ) | ||||
| 
 | ||||
|     ############################### | ||||
|     # Sub-type override interface # | ||||
|     ############################### | ||||
| 
 | ||||
|     x_offset: np.ndarray = np.array([0]) | ||||
| 
 | ||||
|     # optional pre-graphics xy formatted data which | ||||
|     # is incrementally updated in sync with the source data. | ||||
|     # XXX: was ``.allocate_xy()`` | ||||
|     def allocate_xy_nd( | ||||
|         self, | ||||
|         src_shm: ShmArray, | ||||
|         data_field: str, | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         np.ndarray,  # x | ||||
|         np.nd.array  # y | ||||
|     ]: | ||||
|         ''' | ||||
|         Convert the structured-array ``src_shm`` format to | ||||
|         a equivalently shaped (and field-less) ``np.ndarray``. | ||||
| 
 | ||||
|         Eg. a 4 field x N struct-array => (N, 4) | ||||
| 
 | ||||
|         ''' | ||||
|         y_nd = src_shm._array[data_field].copy() | ||||
|         x_nd = ( | ||||
|             src_shm._array[self.index_field].copy() | ||||
|             + | ||||
|             self.x_offset | ||||
|         ) | ||||
|         return x_nd, y_nd | ||||
| 
 | ||||
|     # XXX: was ``.update_xy()`` | ||||
|     def incr_update_xy_nd( | ||||
|         self, | ||||
| 
 | ||||
|         src_shm: ShmArray, | ||||
|         data_field: str, | ||||
| 
 | ||||
|         new_from_src: np.ndarray,  # portion of source that was updated | ||||
| 
 | ||||
|         read_slc: slice, | ||||
|         ln: int,  # len of updated | ||||
| 
 | ||||
|         nd_start: int, | ||||
|         nd_stop: int, | ||||
| 
 | ||||
|         is_append: bool, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         # write pushed data to flattened copy | ||||
|         y_nd_new = new_from_src[data_field] | ||||
|         self.y_nd[read_slc] = y_nd_new | ||||
| 
 | ||||
|         x_nd_new = self.x_nd[read_slc] | ||||
|         x_nd_new[:] = ( | ||||
|             new_from_src[self.index_field] | ||||
|             + | ||||
|             self.x_offset | ||||
|         ) | ||||
| 
 | ||||
|         # x_nd = self.x_nd[self.xy_slice] | ||||
|         # y_nd = self.y_nd[self.xy_slice] | ||||
|         # name = self.viz.name | ||||
|         # if 'trade_rate' == name: | ||||
|         #     s = 4 | ||||
|         #     print( | ||||
|         #         f'{name.upper()}:\n' | ||||
|         #         'NEW_FROM_SRC:\n' | ||||
|         #         f'new_from_src: {new_from_src}\n\n' | ||||
| 
 | ||||
|         #         f'PRE self.x_nd:' | ||||
|         #         f'\n{list(x_nd[-s:])}\n' | ||||
| 
 | ||||
|         #         f'PRE self.y_nd:\n' | ||||
|         #         f'{list(y_nd[-s:])}\n\n' | ||||
| 
 | ||||
|         #         f'TO WRITE:\n' | ||||
| 
 | ||||
|         #         f'x_nd_new:\n' | ||||
|         #         f'{x_nd_new[0]}\n' | ||||
| 
 | ||||
|         #         f'y_nd_new:\n' | ||||
|         #         f'{y_nd_new}\n' | ||||
|         #     ) | ||||
| 
 | ||||
|     # XXX: was ``.format_xy()`` | ||||
|     def format_xy_nd_to_1d( | ||||
|         self, | ||||
| 
 | ||||
|         array: np.ndarray, | ||||
|         array_key: str, | ||||
|         vr: tuple[int, int], | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         np.ndarray,  # 1d x | ||||
|         np.ndarray,  # 1d y | ||||
|         np.ndarray | str,  # connection array/style | ||||
|     ]: | ||||
|         ''' | ||||
|         Default xy-nd array to 1d pre-graphics-path render routine. | ||||
| 
 | ||||
|         Return single field column data verbatim | ||||
| 
 | ||||
|         ''' | ||||
|         # NOTE: we don't include the very last datum which is filled in | ||||
|         # normally by another graphics object. | ||||
|         x_1d = array[self.index_field][:-1] | ||||
|         y_1d = array[array_key][:-1] | ||||
| 
 | ||||
|         # name = self.viz.name | ||||
|         # if 'trade_rate' == name: | ||||
|         #     s = 4 | ||||
|         #     x_nd = list(self.x_nd[self.xy_slice][-s:-1]) | ||||
|         #     y_nd = list(self.y_nd[self.xy_slice][-s:-1]) | ||||
|         #     print( | ||||
|         #         f'{name}:\n' | ||||
|         #         f'XY data:\n' | ||||
|         #         f'x: {x_nd}\n' | ||||
|         #         f'y: {y_nd}\n\n' | ||||
|         #         f'x_1d: {list(x_1d[-s:])}\n' | ||||
|         #         f'y_1d: {list(y_1d[-s:])}\n\n' | ||||
| 
 | ||||
|         #     ) | ||||
|         return ( | ||||
|             x_1d, | ||||
|             y_1d, | ||||
| 
 | ||||
|             # 1d connection array or style-key to | ||||
|             # ``pg.functions.arrayToQPath()`` | ||||
|             'all', | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| class OHLCBarsFmtr(IncrementalFormatter): | ||||
| 
 | ||||
|     x_offset: np.ndarray = np.array([ | ||||
|         -0.5, | ||||
|         0, | ||||
|         0, | ||||
|         0.5, | ||||
|     ]) | ||||
| 
 | ||||
|     fields: list[str] = field( | ||||
|         default_factory=lambda: ['open', 'high', 'low', 'close'] | ||||
|     ) | ||||
|     flat_index_ratio: float = 4 | ||||
| 
 | ||||
|     def allocate_xy_nd( | ||||
|         self, | ||||
| 
 | ||||
|         ohlc_shm: ShmArray, | ||||
|         data_field: str, | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         np.ndarray,  # x | ||||
|         np.nd.array  # y | ||||
|     ]: | ||||
|         ''' | ||||
|         Convert an input struct-array holding OHLC samples into a pair of | ||||
|         flattened x, y arrays with the same size (datums wise) as the source | ||||
|         data. | ||||
| 
 | ||||
|         ''' | ||||
|         y_nd = ohlc_shm.ustruct(self.fields) | ||||
| 
 | ||||
|         # generate an flat-interpolated x-domain | ||||
|         x_nd = ( | ||||
|             np.broadcast_to( | ||||
|                 ohlc_shm._array[self.index_field][:, None], | ||||
|                 ( | ||||
|                     ohlc_shm._array.size, | ||||
|                     # 4,  # only ohlc | ||||
|                     y_nd.shape[1], | ||||
|                 ), | ||||
|             ) | ||||
|             + | ||||
|             self.x_offset | ||||
|         ) | ||||
|         assert y_nd.any() | ||||
| 
 | ||||
|         # write pushed data to flattened copy | ||||
|         return ( | ||||
|             x_nd, | ||||
|             y_nd, | ||||
|         ) | ||||
| 
 | ||||
|     def incr_update_xy_nd( | ||||
|         self, | ||||
| 
 | ||||
|         src_shm: ShmArray, | ||||
|         data_field: str, | ||||
| 
 | ||||
|         new_from_src: np.ndarray,  # portion of source that was updated | ||||
| 
 | ||||
|         read_slc: slice, | ||||
|         ln: int,  # len of updated | ||||
| 
 | ||||
|         nd_start: int, | ||||
|         nd_stop: int, | ||||
| 
 | ||||
|         is_append: bool, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         # write newly pushed data to flattened copy | ||||
|         # a struct-arr is always passed in. | ||||
|         new_y_nd = rfn.structured_to_unstructured( | ||||
|             new_from_src[self.fields] | ||||
|         ) | ||||
|         self.y_nd[read_slc] = new_y_nd | ||||
| 
 | ||||
|         # generate same-valued-per-row x support based on y shape | ||||
|         x_nd_new = self.x_nd[read_slc] | ||||
|         x_nd_new[:] = np.broadcast_to( | ||||
|             new_from_src[self.index_field][:, None], | ||||
|             new_y_nd.shape, | ||||
|         ) + self.x_offset | ||||
| 
 | ||||
|     # TODO: can we drop this frame and just use the above? | ||||
|     def format_xy_nd_to_1d( | ||||
|         self, | ||||
| 
 | ||||
|         array: np.ndarray, | ||||
|         array_key: str, | ||||
|         vr: tuple[int, int], | ||||
| 
 | ||||
|         start: int = 0,  # XXX: do we need this? | ||||
| 
 | ||||
|         # 0.5 is no overlap between arms, 1.0 is full overlap | ||||
|         gap: float = BGM, | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         np.ndarray, | ||||
|         np.ndarray, | ||||
|         np.ndarray, | ||||
|     ]: | ||||
|         ''' | ||||
|         More or less direct proxy to the ``numba``-fied | ||||
|         ``path_arrays_from_ohlc()`` (above) but with closed in kwargs | ||||
|         for line spacing. | ||||
| 
 | ||||
|         ''' | ||||
|         x, y, c = path_arrays_from_ohlc( | ||||
|             array[:-1], | ||||
|             start, | ||||
|             bar_w=self.index_step_size, | ||||
|             bar_gap=gap * self.index_step_size, | ||||
| 
 | ||||
|             # XXX: don't ask, due to a ``numba`` bug.. | ||||
|             use_time_index=(self.index_field == 'time'), | ||||
|         ) | ||||
|         return x, y, c | ||||
| 
 | ||||
| 
 | ||||
| class OHLCBarsAsCurveFmtr(OHLCBarsFmtr): | ||||
| 
 | ||||
|     def format_xy_nd_to_1d( | ||||
|         self, | ||||
| 
 | ||||
|         array: np.ndarray, | ||||
|         array_key: str, | ||||
|         vr: tuple[int, int], | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         np.ndarray, | ||||
|         np.ndarray, | ||||
|         str, | ||||
|     ]: | ||||
|         # TODO: in the case of an existing ``.update_xy()`` | ||||
|         # should we be passing in array as an xy arrays tuple? | ||||
| 
 | ||||
|         # 2 more datum-indexes to capture zero at end | ||||
|         x_flat = self.x_nd[self.xy_nd_start:self.xy_nd_stop-1] | ||||
|         y_flat = self.y_nd[self.xy_nd_start:self.xy_nd_stop-1] | ||||
| 
 | ||||
|         # slice to view | ||||
|         ivl, ivr = vr | ||||
|         x_iv_flat = x_flat[ivl:ivr] | ||||
|         y_iv_flat = y_flat[ivl:ivr] | ||||
| 
 | ||||
|         # reshape to 1d for graphics rendering | ||||
|         y_iv = y_iv_flat.reshape(-1) | ||||
|         x_iv = x_iv_flat.reshape(-1) | ||||
| 
 | ||||
|         return x_iv, y_iv, 'all' | ||||
| 
 | ||||
| 
 | ||||
| class StepCurveFmtr(IncrementalFormatter): | ||||
| 
 | ||||
|     x_offset: np.ndarray = np.array([ | ||||
|         0, | ||||
|         1, | ||||
|     ]) | ||||
| 
 | ||||
|     def allocate_xy_nd( | ||||
|         self, | ||||
| 
 | ||||
|         shm: ShmArray, | ||||
|         data_field: str, | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         np.ndarray,  # x | ||||
|         np.nd.array  # y | ||||
|     ]: | ||||
|         ''' | ||||
|         Convert an input 1d shm array to a "step array" format | ||||
|         for use by path graphics generation. | ||||
| 
 | ||||
|         ''' | ||||
|         i = shm._array[self.index_field].copy() | ||||
|         out = shm._array[data_field].copy() | ||||
| 
 | ||||
|         x_out = ( | ||||
|             np.broadcast_to( | ||||
|                 i[:, None], | ||||
|                 (i.size, 2), | ||||
|             ) | ||||
|             + | ||||
|             self.x_offset | ||||
|         ) | ||||
| 
 | ||||
|         # fill out Nx2 array to hold each step's left + right vertices. | ||||
|         y_out = np.empty( | ||||
|             x_out.shape, | ||||
|             dtype=out.dtype, | ||||
|         ) | ||||
|         # fill in (current) values from source shm buffer | ||||
|         y_out[:] = out[:, np.newaxis] | ||||
| 
 | ||||
|         # TODO: pretty sure we can drop this? | ||||
|         # start y at origin level | ||||
|         # y_out[0, 0] = 0 | ||||
|         # y_out[self.xy_nd_start] = 0 | ||||
|         return x_out, y_out | ||||
| 
 | ||||
|     def incr_update_xy_nd( | ||||
|         self, | ||||
| 
 | ||||
|         src_shm: ShmArray, | ||||
|         array_key: str, | ||||
| 
 | ||||
|         new_from_src: np.ndarray,  # portion of source that was updated | ||||
|         read_slc: slice, | ||||
|         ln: int,  # len of updated | ||||
| 
 | ||||
|         nd_start: int, | ||||
|         nd_stop: int, | ||||
| 
 | ||||
|         is_append: bool, | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         np.ndarray, | ||||
|         slice, | ||||
|     ]: | ||||
|         # NOTE: for a step curve we slice from one datum prior | ||||
|         # to the current "update slice" to get the previous | ||||
|         # "level". | ||||
|         # | ||||
|         # why this is needed, | ||||
|         # - the current new append slice will often have a zero | ||||
|         #   value in the latest datum-step (at least for zero-on-new | ||||
|         #   cases like vlm in the) as per configuration of the FSP | ||||
|         #   engine. | ||||
|         # - we need to look back a datum to get the last level which | ||||
|         #   will be used to terminate/complete the last step x-width | ||||
|         #   which will be set to pair with the last x-index THIS MEANS | ||||
|         # | ||||
|         # XXX: this means WE CAN'T USE the append slice since we need to | ||||
|         # "look backward" one step to get the needed back-to-zero level | ||||
|         # and the update data in ``new_from_src`` will only contain the | ||||
|         # latest new data. | ||||
|         back_1 = slice( | ||||
|             read_slc.start - 1, | ||||
|             read_slc.stop, | ||||
|         ) | ||||
| 
 | ||||
|         to_write = src_shm._array[back_1] | ||||
|         y_nd_new = self.y_nd[back_1] | ||||
|         y_nd_new[:] = to_write[array_key][:, None] | ||||
| 
 | ||||
|         x_nd_new = self.x_nd[read_slc] | ||||
|         x_nd_new[:] = ( | ||||
|             new_from_src[self.index_field][:, None] | ||||
|             + | ||||
|             self.x_offset | ||||
|         ) | ||||
| 
 | ||||
|         # XXX: uncomment for debugging | ||||
|         # x_nd = self.x_nd[self.xy_slice] | ||||
|         # y_nd = self.y_nd[self.xy_slice] | ||||
|         # name = self.viz.name | ||||
|         # if 'dolla_vlm' in name: | ||||
|         #     s = 4 | ||||
|         #     print( | ||||
|         #         f'{name}:\n' | ||||
|         #         'NEW_FROM_SRC:\n' | ||||
|         #         f'new_from_src: {new_from_src}\n\n' | ||||
| 
 | ||||
|         #         f'PRE self.x_nd:' | ||||
|         #         f'\n{x_nd[-s:]}\n' | ||||
|         #         f'PRE self.y_nd:\n' | ||||
|         #         f'{y_nd[-s:]}\n\n' | ||||
| 
 | ||||
|         #         f'TO WRITE:\n' | ||||
|         #         f'x_nd_new:\n' | ||||
|         #         f'{x_nd_new}\n' | ||||
|         #         f'y_nd_new:\n' | ||||
|         #         f'{y_nd_new}\n' | ||||
|         #     ) | ||||
| 
 | ||||
|     def format_xy_nd_to_1d( | ||||
|         self, | ||||
| 
 | ||||
|         array: np.ndarray, | ||||
|         array_key: str, | ||||
|         vr: tuple[int, int], | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         np.ndarray, | ||||
|         np.ndarray, | ||||
|         str, | ||||
|     ]: | ||||
|         last_t, last = array[-1][[self.index_field, array_key]] | ||||
| 
 | ||||
|         start = self.xy_nd_start | ||||
|         stop = self.xy_nd_stop | ||||
| 
 | ||||
|         x_step = self.x_nd[start:stop] | ||||
|         y_step = self.y_nd[start:stop] | ||||
| 
 | ||||
|         # slice out in-view data | ||||
|         ivl, ivr = vr | ||||
| 
 | ||||
|         # NOTE: add an extra step to get the vertical-line-down-to-zero | ||||
|         # adjacent to the last-datum graphic (filled rect). | ||||
|         x_step_iv = x_step[ivl:ivr+1] | ||||
|         y_step_iv = y_step[ivl:ivr+1] | ||||
| 
 | ||||
|         # flatten to 1d | ||||
|         x_1d = x_step_iv.reshape(x_step_iv.size) | ||||
|         y_1d = y_step_iv.reshape(y_step_iv.size) | ||||
| 
 | ||||
|         # debugging | ||||
|         # if y_1d.any(): | ||||
|         #     s = 6 | ||||
|         #     print( | ||||
|         #         f'x_step_iv:\n{x_step_iv[-s:]}\n' | ||||
|         #         f'y_step_iv:\n{y_step_iv[-s:]}\n\n' | ||||
|         #         f'x_1d:\n{x_1d[-s:]}\n' | ||||
|         #         f'y_1d:\n{y_1d[-s:]}\n' | ||||
|         #     ) | ||||
| 
 | ||||
|         return x_1d, y_1d, 'all' | ||||
|  | @ -0,0 +1,82 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Stream format enforcement. | ||||
| 
 | ||||
| ''' | ||||
| from itertools import chain | ||||
| from typing import AsyncIterator | ||||
| 
 | ||||
| 
 | ||||
| def iterticks( | ||||
|     quote: dict, | ||||
|     types: tuple[str] = ( | ||||
|         'trade', | ||||
|         'dark_trade', | ||||
|     ), | ||||
|     deduplicate_darks: bool = False, | ||||
| 
 | ||||
| ) -> AsyncIterator: | ||||
|     ''' | ||||
|     Iterate through ticks delivered per quote cycle. | ||||
| 
 | ||||
|     ''' | ||||
|     if deduplicate_darks: | ||||
|         assert 'dark_trade' in types | ||||
| 
 | ||||
|     # print(f"{quote}\n\n") | ||||
|     ticks = quote.get('ticks', ()) | ||||
|     trades = {} | ||||
|     darks = {} | ||||
| 
 | ||||
|     if ticks: | ||||
| 
 | ||||
|         # do a first pass and attempt to remove duplicate dark | ||||
|         # trades with the same tick signature. | ||||
|         if deduplicate_darks: | ||||
|             for tick in ticks: | ||||
|                 ttype = tick.get('type') | ||||
| 
 | ||||
|                 time = tick.get('time', None) | ||||
|                 if time: | ||||
|                     sig = ( | ||||
|                         time, | ||||
|                         tick['price'], | ||||
|                         tick.get('size') | ||||
|                     ) | ||||
| 
 | ||||
|                     if ttype == 'dark_trade': | ||||
|                         darks[sig] = tick | ||||
| 
 | ||||
|                     elif ttype == 'trade': | ||||
|                         trades[sig] = tick | ||||
| 
 | ||||
|             # filter duplicates | ||||
|             for sig, tick in trades.items(): | ||||
|                 tick = darks.pop(sig, None) | ||||
|                 if tick: | ||||
|                     ticks.remove(tick) | ||||
|                     # print(f'DUPLICATE {tick}') | ||||
| 
 | ||||
|             # re-insert ticks | ||||
|             ticks.extend(list(chain(trades.values(), darks.values()))) | ||||
| 
 | ||||
|         for tick in ticks: | ||||
|             # print(f"{quote['symbol']}: {tick}") | ||||
|             ttype = tick.get('type') | ||||
|             if ttype in types: | ||||
|                 yield tick | ||||
|  | @ -1,281 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| """ | ||||
| Super fast ``QPainterPath`` generation related operator routines. | ||||
| 
 | ||||
| """ | ||||
| import numpy as np | ||||
| from numpy.lib import recfunctions as rfn | ||||
| from numba import ( | ||||
|     # types, | ||||
|     njit, | ||||
|     float64, | ||||
|     int64, | ||||
|     # optional, | ||||
| ) | ||||
| 
 | ||||
| # TODO: for ``numba`` typing.. | ||||
| # from ._source import numba_ohlc_dtype | ||||
| from ._m4 import ds_m4 | ||||
| 
 | ||||
| 
 | ||||
| def xy_downsample( | ||||
|     x, | ||||
|     y, | ||||
|     uppx, | ||||
| 
 | ||||
|     x_spacer: float = 0.5, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     np.ndarray, | ||||
|     np.ndarray, | ||||
|     float, | ||||
|     float, | ||||
| ]: | ||||
|     ''' | ||||
|     Downsample 1D (flat ``numpy.ndarray``) arrays using M4 given an input | ||||
|     ``uppx`` (units-per-pixel) and add space between discreet datums. | ||||
| 
 | ||||
|     ''' | ||||
|     # downsample whenever more then 1 pixels per datum can be shown. | ||||
|     # always refresh data bounds until we get diffing | ||||
|     # working properly, see above.. | ||||
|     m4_out = ds_m4( | ||||
|         x, | ||||
|         y, | ||||
|         uppx, | ||||
|     ) | ||||
| 
 | ||||
|     if m4_out is not None: | ||||
|         bins, x, y, ymn, ymx = m4_out | ||||
|         # flatten output to 1d arrays suitable for path-graphics generation. | ||||
|         x = np.broadcast_to(x[:, None], y.shape) | ||||
|         x = (x + np.array( | ||||
|             [-x_spacer, 0, 0, x_spacer] | ||||
|         )).flatten() | ||||
|         y = y.flatten() | ||||
| 
 | ||||
|         return x, y, ymn, ymx | ||||
| 
 | ||||
|     # XXX: we accept a None output for the case where the input range | ||||
|     # to ``ds_m4()`` is bad (-ve) and we want to catch and debug | ||||
|     # that (seemingly super rare) circumstance.. | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| @njit( | ||||
|     # NOTE: need to construct this manually for readonly | ||||
|     # arrays, see https://github.com/numba/numba/issues/4511 | ||||
|     # ( | ||||
|     #     types.Array( | ||||
|     #         numba_ohlc_dtype, | ||||
|     #         1, | ||||
|     #         'C', | ||||
|     #         readonly=True, | ||||
|     #     ), | ||||
|     #     int64, | ||||
|     #     types.unicode_type, | ||||
|     #     optional(float64), | ||||
|     # ), | ||||
|     nogil=True | ||||
| ) | ||||
| def path_arrays_from_ohlc( | ||||
|     data: np.ndarray, | ||||
|     start: int64, | ||||
|     bar_w: float64, | ||||
|     bar_gap: float64 = 0.16, | ||||
|     use_time_index: bool = True, | ||||
| 
 | ||||
|     # XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622 | ||||
|     # index_field: str, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     np.ndarray, | ||||
|     np.ndarray, | ||||
|     np.ndarray, | ||||
| ]: | ||||
|     ''' | ||||
|     Generate an array of lines objects from input ohlc data. | ||||
| 
 | ||||
|     ''' | ||||
|     size = int(data.shape[0] * 6) | ||||
| 
 | ||||
|     # XXX: see this for why the dtype might have to be defined outside | ||||
|     # the routine. | ||||
|     # https://github.com/numba/numba/issues/4098#issuecomment-493914533 | ||||
|     x = np.zeros( | ||||
|         shape=size, | ||||
|         dtype=float64, | ||||
|     ) | ||||
|     y, c = x.copy(), x.copy() | ||||
| 
 | ||||
|     half_w: float = bar_w/2 | ||||
| 
 | ||||
|     # TODO: report bug for assert @ | ||||
|     # ../piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991 | ||||
|     for i, q in enumerate(data[start:], start): | ||||
| 
 | ||||
|         open = q['open'] | ||||
|         high = q['high'] | ||||
|         low = q['low'] | ||||
|         close = q['close'] | ||||
| 
 | ||||
|         if use_time_index: | ||||
|             index = float64(q['time']) | ||||
|         else: | ||||
|             index = float64(q['index']) | ||||
| 
 | ||||
|         # XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622 | ||||
|         # index = float64(q[index_field]) | ||||
|         # AND this (probably) | ||||
|         # open, high, low, close, index = q[ | ||||
|         #     ['open', 'high', 'low', 'close', 'index']] | ||||
| 
 | ||||
|         istart = i * 6 | ||||
|         istop = istart + 6 | ||||
| 
 | ||||
|         # x,y detail the 6 points which connect all vertexes of a ohlc bar | ||||
|         mid: float = index + half_w | ||||
|         x[istart:istop] = ( | ||||
|             index + bar_gap, | ||||
|             mid, | ||||
|             mid, | ||||
|             mid, | ||||
|             mid, | ||||
|             index + bar_w - bar_gap, | ||||
|         ) | ||||
|         y[istart:istop] = ( | ||||
|             open, | ||||
|             open, | ||||
|             low, | ||||
|             high, | ||||
|             close, | ||||
|             close, | ||||
|         ) | ||||
| 
 | ||||
|         # specifies that the first edge is never connected to the | ||||
|         # prior bars last edge thus providing a small "gap"/"space" | ||||
|         # between bars determined by ``bar_gap``. | ||||
|         c[istart:istop] = (1, 1, 1, 1, 1, 0) | ||||
| 
 | ||||
|     return x, y, c | ||||
| 
 | ||||
| 
 | ||||
| def hl2mxmn( | ||||
|     ohlc: np.ndarray, | ||||
|     index_field: str = 'index', | ||||
| 
 | ||||
| ) -> np.ndarray: | ||||
|     ''' | ||||
|     Convert a OHLC struct-array containing 'high'/'low' columns | ||||
|     to a "joined" max/min 1-d array. | ||||
| 
 | ||||
|     ''' | ||||
|     index = ohlc[index_field] | ||||
|     hls = ohlc[[ | ||||
|         'low', | ||||
|         'high', | ||||
|     ]] | ||||
| 
 | ||||
|     mxmn = np.empty(2*hls.size, dtype=np.float64) | ||||
|     x = np.empty(2*hls.size, dtype=np.float64) | ||||
|     trace_hl(hls, mxmn, x, index[0]) | ||||
|     x = x + index[0] | ||||
| 
 | ||||
|     return mxmn, x | ||||
| 
 | ||||
| 
 | ||||
| @njit( | ||||
|     # TODO: the type annots.. | ||||
|     # float64[:](float64[:],), | ||||
| ) | ||||
| def trace_hl( | ||||
|     hl: 'np.ndarray', | ||||
|     out: np.ndarray, | ||||
|     x: np.ndarray, | ||||
|     start: int, | ||||
| 
 | ||||
|     # the "offset" values in the x-domain which | ||||
|     # place the 2 output points around each ``int`` | ||||
|     # master index. | ||||
|     margin: float = 0.43, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     "Trace" the outline of the high-low values of an ohlc sequence | ||||
|     as a line such that the maximum deviation (aka disperaion) between | ||||
|     bars if preserved. | ||||
| 
 | ||||
|     This routine is expected to modify input arrays in-place. | ||||
| 
 | ||||
|     ''' | ||||
|     last_l = hl['low'][0] | ||||
|     last_h = hl['high'][0] | ||||
| 
 | ||||
|     for i in range(hl.size): | ||||
|         row = hl[i] | ||||
|         lo, hi = row['low'], row['high'] | ||||
| 
 | ||||
|         up_diff = hi - last_l | ||||
|         down_diff = last_h - lo | ||||
| 
 | ||||
|         if up_diff > down_diff: | ||||
|             out[2*i + 1] = hi | ||||
|             out[2*i] = last_l | ||||
|         else: | ||||
|             out[2*i + 1] = lo | ||||
|             out[2*i] = last_h | ||||
| 
 | ||||
|         last_l = lo | ||||
|         last_h = hi | ||||
| 
 | ||||
|         x[2*i] = int(i) - margin | ||||
|         x[2*i + 1] = int(i) + margin | ||||
| 
 | ||||
|     return out | ||||
| 
 | ||||
| 
 | ||||
| def ohlc_flatten( | ||||
|     ohlc: np.ndarray, | ||||
|     use_mxmn: bool = True, | ||||
|     index_field: str = 'index', | ||||
| 
 | ||||
| ) -> tuple[np.ndarray, np.ndarray]: | ||||
|     ''' | ||||
|     Convert an OHLCV struct-array into a flat ready-for-line-plotting | ||||
|     1-d array that is 4 times the size with x-domain values distributed | ||||
|     evenly (by 0.5 steps) over each index. | ||||
| 
 | ||||
|     ''' | ||||
|     index = ohlc[index_field] | ||||
| 
 | ||||
|     if use_mxmn: | ||||
|         # traces a line optimally over highs to lows | ||||
|         # using numba. NOTE: pretty sure this is faster | ||||
|         # and looks about the same as the below output. | ||||
|         flat, x = hl2mxmn(ohlc) | ||||
| 
 | ||||
|     else: | ||||
|         flat = rfn.structured_to_unstructured( | ||||
|             ohlc[['open', 'high', 'low', 'close']] | ||||
|         ).flatten() | ||||
| 
 | ||||
|         x = np.linspace( | ||||
|             start=index[0] - 0.5, | ||||
|             stop=index[-1] + 0.5, | ||||
|             num=len(flat), | ||||
|         ) | ||||
|     return x, flat | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -32,17 +32,25 @@ import numpy as np | |||
| from numpy.lib import recfunctions as rfn | ||||
| import tractor | ||||
| 
 | ||||
| from ._util import log | ||||
| from ._source import def_iohlcv_fields | ||||
| from piker.types import Struct | ||||
| from ..log import get_logger | ||||
| from ._source import base_iohlc_dtype | ||||
| from .types import Struct | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| # how  much is probably dependent on lifestyle | ||||
| _secs_in_day = int(60 * 60 * 24) | ||||
| # we try for a buncha times, but only on a run-every-other-day kinda week. | ||||
| _days_worth = 16 | ||||
| _default_size = _days_worth * _secs_in_day | ||||
| # where to start the new data append index | ||||
| _rt_buffer_start = int((_days_worth - 1) * _secs_in_day) | ||||
| 
 | ||||
| 
 | ||||
| def cuckoff_mantracker(): | ||||
|     ''' | ||||
|     Disable all ``multiprocessing``` "resource tracking" machinery since | ||||
|     it's an absolute multi-threaded mess of non-SC madness. | ||||
| 
 | ||||
|     ''' | ||||
|     from multiprocessing import resource_tracker as mantracker | ||||
| 
 | ||||
|     # Tell the "resource tracker" thing to fuck off. | ||||
|  | @ -61,6 +69,7 @@ def cuckoff_mantracker(): | |||
|     mantracker._resource_tracker = ManTracker() | ||||
|     mantracker.register = mantracker._resource_tracker.register | ||||
|     mantracker.ensure_running = mantracker._resource_tracker.ensure_running | ||||
|     # ensure_running = mantracker._resource_tracker.ensure_running | ||||
|     mantracker.unregister = mantracker._resource_tracker.unregister | ||||
|     mantracker.getfd = mantracker._resource_tracker.getfd | ||||
| 
 | ||||
|  | @ -109,7 +118,6 @@ class _Token(Struct, frozen=True): | |||
|     shm_first_index_name: str | ||||
|     shm_last_index_name: str | ||||
|     dtype_descr: tuple | ||||
|     size: int  # in struct-array index / row terms | ||||
| 
 | ||||
|     @property | ||||
|     def dtype(self) -> np.dtype: | ||||
|  | @ -150,7 +158,6 @@ def get_shm_token(key: str) -> _Token: | |||
| 
 | ||||
| def _make_token( | ||||
|     key: str, | ||||
|     size: int, | ||||
|     dtype: Optional[np.dtype] = None, | ||||
| ) -> _Token: | ||||
|     ''' | ||||
|  | @ -158,13 +165,12 @@ def _make_token( | |||
|     to access a shared array. | ||||
| 
 | ||||
|     ''' | ||||
|     dtype = def_iohlcv_fields if dtype is None else dtype | ||||
|     dtype = base_iohlc_dtype if dtype is None else dtype | ||||
|     return _Token( | ||||
|         shm_name=key, | ||||
|         shm_first_index_name=key + "_first", | ||||
|         shm_last_index_name=key + "_last", | ||||
|         dtype_descr=tuple(np.dtype(dtype).descr), | ||||
|         size=size, | ||||
|         dtype_descr=tuple(np.dtype(dtype).descr) | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -216,7 +222,6 @@ class ShmArray: | |||
|             shm_first_index_name=self._first._shm.name, | ||||
|             shm_last_index_name=self._last._shm.name, | ||||
|             dtype_descr=tuple(self._array.dtype.descr), | ||||
|             size=self._len, | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|  | @ -248,6 +253,7 @@ class ShmArray: | |||
|         # to load an empty array.. | ||||
|         if len(a) == 0 and self._post_init: | ||||
|             raise RuntimeError('Empty array race condition hit!?') | ||||
|             # breakpoint() | ||||
| 
 | ||||
|         return a | ||||
| 
 | ||||
|  | @ -257,7 +263,7 @@ class ShmArray: | |||
| 
 | ||||
|         # type that all field values will be cast to | ||||
|         # in the returned view. | ||||
|         common_dtype: np.dtype = float, | ||||
|         common_dtype: np.dtype = np.float, | ||||
| 
 | ||||
|     ) -> np.ndarray: | ||||
| 
 | ||||
|  | @ -312,7 +318,7 @@ class ShmArray: | |||
|         field_map: Optional[dict[str, str]] = None, | ||||
|         prepend: bool = False, | ||||
|         update_first: bool = True, | ||||
|         start: int | None = None, | ||||
|         start: Optional[int] = None, | ||||
| 
 | ||||
|     ) -> int: | ||||
|         ''' | ||||
|  | @ -354,11 +360,7 @@ class ShmArray: | |||
|             # tries to access ``.array`` (which due to the index | ||||
|             # overlap will be empty). Pretty sure we've fixed it now | ||||
|             # but leaving this here as a reminder. | ||||
|             if ( | ||||
|                 prepend | ||||
|                 and update_first | ||||
|                 and length | ||||
|             ): | ||||
|             if prepend and update_first and length: | ||||
|                 assert index < self._first.value | ||||
| 
 | ||||
|             if ( | ||||
|  | @ -432,10 +434,10 @@ class ShmArray: | |||
| 
 | ||||
| 
 | ||||
| def open_shm_array( | ||||
|     size: int, | ||||
|     key: str | None = None, | ||||
|     dtype: np.dtype | None = None, | ||||
|     append_start_index: int | None = None, | ||||
| 
 | ||||
|     key: Optional[str] = None, | ||||
|     size: int = _default_size, | ||||
|     dtype: Optional[np.dtype] = None, | ||||
|     readonly: bool = False, | ||||
| 
 | ||||
| ) -> ShmArray: | ||||
|  | @ -465,8 +467,7 @@ def open_shm_array( | |||
| 
 | ||||
|     token = _make_token( | ||||
|         key=key, | ||||
|         size=size, | ||||
|         dtype=dtype, | ||||
|         dtype=dtype | ||||
|     ) | ||||
| 
 | ||||
|     # create single entry arrays for storing an first and last indices | ||||
|  | @ -500,13 +501,10 @@ def open_shm_array( | |||
|     # ``ShmArray._start.value: int = 0`` and the yet-to-be written | ||||
|     # real-time section will start at ``ShmArray.index: int``. | ||||
| 
 | ||||
|     # this sets the index to nearly 2/3rds into the the length of | ||||
|     # the buffer leaving at least a "days worth of second samples" | ||||
|     # for the real-time section. | ||||
|     if append_start_index is None: | ||||
|         append_start_index = round(size * 0.616) | ||||
| 
 | ||||
|     last.value = first.value = append_start_index | ||||
|     # this sets the index to 3/4 of the length of the buffer | ||||
|     # leaving a "days worth of second samples" for the real-time | ||||
|     # section. | ||||
|     last.value = first.value = _rt_buffer_start | ||||
| 
 | ||||
|     shmarr = ShmArray( | ||||
|         array, | ||||
|  | @ -520,15 +518,16 @@ def open_shm_array( | |||
| 
 | ||||
|     # "unlink" created shm on process teardown by | ||||
|     # pushing teardown calls onto actor context stack | ||||
|     stack = tractor.current_actor().lifetime_stack | ||||
|     stack.callback(shmarr.close) | ||||
|     stack.callback(shmarr.destroy) | ||||
| 
 | ||||
|     tractor._actor._lifetime_stack.callback(shmarr.close) | ||||
|     tractor._actor._lifetime_stack.callback(shmarr.destroy) | ||||
| 
 | ||||
|     return shmarr | ||||
| 
 | ||||
| 
 | ||||
| def attach_shm_array( | ||||
|     token: tuple[str, str, tuple[str, str]], | ||||
|     size: int = _default_size, | ||||
|     readonly: bool = True, | ||||
| 
 | ||||
| ) -> ShmArray: | ||||
|  | @ -567,7 +566,7 @@ def attach_shm_array( | |||
|             raise _err | ||||
| 
 | ||||
|     shmarr = np.ndarray( | ||||
|         (token.size,), | ||||
|         (size,), | ||||
|         dtype=token.dtype, | ||||
|         buffer=shm.buf | ||||
|     ) | ||||
|  | @ -606,18 +605,15 @@ def attach_shm_array( | |||
|     if key not in _known_tokens: | ||||
|         _known_tokens[key] = token | ||||
| 
 | ||||
|     # "close" attached shm on actor teardown | ||||
|     tractor.current_actor().lifetime_stack.callback(sha.close) | ||||
|     # "close" attached shm on process teardown | ||||
|     tractor._actor._lifetime_stack.callback(sha.close) | ||||
| 
 | ||||
|     return sha | ||||
| 
 | ||||
| 
 | ||||
| def maybe_open_shm_array( | ||||
|     key: str, | ||||
|     size: int, | ||||
|     dtype: np.dtype | None = None, | ||||
|     append_start_index: int | None = None, | ||||
|     readonly: bool = False, | ||||
|     dtype: Optional[np.dtype] = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> tuple[ShmArray, bool]: | ||||
|  | @ -641,41 +637,23 @@ def maybe_open_shm_array( | |||
|     try: | ||||
|         # see if we already know this key | ||||
|         token = _known_tokens[key] | ||||
|         return ( | ||||
|             attach_shm_array( | ||||
|                 token=token, | ||||
|                 readonly=readonly, | ||||
|             ), | ||||
|             False, | ||||
|         ) | ||||
|         return attach_shm_array(token=token, **kwargs), False | ||||
|     except KeyError: | ||||
|         log.debug(f"Could not find {key} in shms cache") | ||||
|         log.warning(f"Could not find {key} in shms cache") | ||||
|         if dtype: | ||||
|             token = _make_token( | ||||
|                 key, | ||||
|                 size=size, | ||||
|                 dtype=dtype, | ||||
|             ) | ||||
|             token = _make_token(key, dtype) | ||||
|             try: | ||||
|                 return attach_shm_array(token=token, **kwargs), False | ||||
|             except FileNotFoundError: | ||||
|                 log.debug(f"Could not attach to shm with token {token}") | ||||
|                 log.warning(f"Could not attach to shm with token {token}") | ||||
| 
 | ||||
|         # This actor does not know about memory | ||||
|         # associated with the provided "key". | ||||
|         # Attempt to open a block and expect | ||||
|         # to fail if a block has been allocated | ||||
|         # on the OS by someone else. | ||||
|         return ( | ||||
|             open_shm_array( | ||||
|                 key=key, | ||||
|                 size=size, | ||||
|                 dtype=dtype, | ||||
|                 append_start_index=append_start_index, | ||||
|                 readonly=readonly, | ||||
|             ), | ||||
|             True, | ||||
|         ) | ||||
|         return open_shm_array(key=key, dtype=dtype, **kwargs), True | ||||
| 
 | ||||
| 
 | ||||
| def try_read( | ||||
|     array: np.ndarray | ||||
|  |  | |||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -18,47 +18,34 @@ | |||
| numpy data source coversion helpers. | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from typing import Any | ||||
| import decimal | ||||
| 
 | ||||
| from bidict import bidict | ||||
| import numpy as np | ||||
| from msgspec import Struct | ||||
| # from numba import from_dtype | ||||
| 
 | ||||
| 
 | ||||
| def_iohlcv_fields: list[tuple[str, type]] = [ | ||||
| 
 | ||||
|     # YES WE KNOW, this isn't needed in polars but we use it for doing | ||||
|     # ring-buffer like pre/append ops our our `ShmArray` real-time | ||||
|     # numpy-array buffering system such that there is a master index | ||||
|     # that can be used for index-arithmetic when write data to the | ||||
|     # "middle" of the array. See the ``tractor.ipc.shm`` pkg for more | ||||
|     # details. | ||||
|     ('index', int), | ||||
| 
 | ||||
|     # presume int for epoch stamps since it's most common | ||||
|     # and makes the most sense to avoid float rounding issues. | ||||
|     # TODO: if we want higher reso we should use the new | ||||
|     # ``time.time_ns()`` in python 3.10+ | ||||
|     ('time', int), | ||||
| ohlc_fields = [ | ||||
|     ('time', float), | ||||
|     ('open', float), | ||||
|     ('high', float), | ||||
|     ('low', float), | ||||
|     ('close', float), | ||||
|     ('volume', float), | ||||
| 
 | ||||
|     # TODO: can we elim this from default field set to save on mem? | ||||
|     # i think only kraken really uses this in terms of what we get from | ||||
|     # their ohlc history API? | ||||
|     # ('bar_wap', float),  # shouldn't be default right? | ||||
|     ('bar_wap', float), | ||||
| ] | ||||
| 
 | ||||
| # remove index field | ||||
| def_ohlcv_fields: list[tuple[str, type]] = def_iohlcv_fields.copy() | ||||
| def_ohlcv_fields.pop(0) | ||||
| assert (len(def_iohlcv_fields) - len(def_ohlcv_fields)) == 1 | ||||
| ohlc_with_index = ohlc_fields.copy() | ||||
| ohlc_with_index.insert(0, ('index', int)) | ||||
| 
 | ||||
| # our minimum structured array layout for ohlc data | ||||
| base_iohlc_dtype = np.dtype(ohlc_with_index) | ||||
| base_ohlc_dtype = np.dtype(ohlc_fields) | ||||
| 
 | ||||
| # TODO: for now need to construct this manually for readonly arrays, see | ||||
| # https://github.com/numba/numba/issues/4511 | ||||
| # from numba import from_dtype | ||||
| # base_ohlc_dtype = np.dtype(def_ohlc_fields) | ||||
| # numba_ohlc_dtype = from_dtype(base_ohlc_dtype) | ||||
| 
 | ||||
| # map time frame "keys" to seconds values | ||||
|  | @ -73,6 +60,28 @@ tf_in_1s = bidict({ | |||
| }) | ||||
| 
 | ||||
| 
 | ||||
| def mk_fqsn( | ||||
|     provider: str, | ||||
|     symbol: str, | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Generate a "fully qualified symbol name" which is | ||||
|     a reverse-hierarchical cross broker/provider symbol | ||||
| 
 | ||||
|     ''' | ||||
|     return '.'.join([symbol, provider]).lower() | ||||
| 
 | ||||
| 
 | ||||
| def float_digits( | ||||
|     value: float, | ||||
| ) -> int: | ||||
|     if value == 0: | ||||
|         return 0 | ||||
| 
 | ||||
|     return int(-decimal.Decimal(str(value)).as_tuple().exponent) | ||||
| 
 | ||||
| 
 | ||||
| def ohlc_zeros(length: int) -> np.ndarray: | ||||
|     """Construct an OHLC field formatted structarray. | ||||
| 
 | ||||
|  | @ -83,6 +92,164 @@ def ohlc_zeros(length: int) -> np.ndarray: | |||
|     return np.zeros(length, dtype=base_ohlc_dtype) | ||||
| 
 | ||||
| 
 | ||||
| def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: | ||||
|     ''' | ||||
|     Unpack a fully-qualified-symbol-name to ``tuple``. | ||||
| 
 | ||||
|     ''' | ||||
|     venue = '' | ||||
|     suffix = '' | ||||
| 
 | ||||
|     # TODO: probably reverse the order of all this XD | ||||
|     tokens = fqsn.split('.') | ||||
|     if len(tokens) < 3: | ||||
|         # probably crypto | ||||
|         symbol, broker = tokens | ||||
|         return ( | ||||
|             broker, | ||||
|             symbol, | ||||
|             '', | ||||
|         ) | ||||
| 
 | ||||
|     elif len(tokens) > 3: | ||||
|         symbol, venue, suffix, broker = tokens | ||||
|     else: | ||||
|         symbol, venue, broker = tokens | ||||
|         suffix = '' | ||||
| 
 | ||||
|     # head, _, broker = fqsn.rpartition('.') | ||||
|     # symbol, _, suffix = head.rpartition('.') | ||||
|     return ( | ||||
|         broker, | ||||
|         '.'.join([symbol, venue]), | ||||
|         suffix, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| class Symbol(Struct): | ||||
|     ''' | ||||
|     I guess this is some kinda container thing for dealing with | ||||
|     all the different meta-data formats from brokers? | ||||
| 
 | ||||
|     ''' | ||||
|     key: str | ||||
|     tick_size: float = 0.01 | ||||
|     lot_tick_size: float = 0.0  # "volume" precision as min step value | ||||
|     tick_size_digits: int = 2 | ||||
|     lot_size_digits: int = 0 | ||||
|     suffix: str = '' | ||||
|     broker_info: dict[str, dict[str, Any]] = {} | ||||
| 
 | ||||
|     # specifies a "class" of financial instrument | ||||
|     # ex. stock, futer, option, bond etc. | ||||
| 
 | ||||
|     # @validate_arguments | ||||
|     @classmethod | ||||
|     def from_broker_info( | ||||
|         cls, | ||||
|         broker: str, | ||||
|         symbol: str, | ||||
|         info: dict[str, Any], | ||||
|         suffix: str = '', | ||||
| 
 | ||||
|     ) -> Symbol: | ||||
| 
 | ||||
|         tick_size = info.get('price_tick_size', 0.01) | ||||
|         lot_tick_size = info.get('lot_tick_size', 0.0) | ||||
| 
 | ||||
|         return Symbol( | ||||
|             key=symbol, | ||||
|             tick_size=tick_size, | ||||
|             lot_tick_size=lot_tick_size, | ||||
|             tick_size_digits=float_digits(tick_size), | ||||
|             lot_size_digits=float_digits(lot_tick_size), | ||||
|             suffix=suffix, | ||||
|             broker_info={broker: info}, | ||||
|         ) | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_fqsn( | ||||
|         cls, | ||||
|         fqsn: str, | ||||
|         info: dict[str, Any], | ||||
| 
 | ||||
|     ) -> Symbol: | ||||
|         broker, key, suffix = unpack_fqsn(fqsn) | ||||
|         return cls.from_broker_info( | ||||
|             broker, | ||||
|             key, | ||||
|             info=info, | ||||
|             suffix=suffix, | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def type_key(self) -> str: | ||||
|         return list(self.broker_info.values())[0]['asset_type'] | ||||
| 
 | ||||
|     @property | ||||
|     def brokers(self) -> list[str]: | ||||
|         return list(self.broker_info.keys()) | ||||
| 
 | ||||
|     def nearest_tick(self, value: float) -> float: | ||||
|         ''' | ||||
|         Return the nearest tick value based on mininum increment. | ||||
| 
 | ||||
|         ''' | ||||
|         mult = 1 / self.tick_size | ||||
|         return round(value * mult) / mult | ||||
| 
 | ||||
|     def front_feed(self) -> tuple[str, str]: | ||||
|         ''' | ||||
|         Return the "current" feed key for this symbol. | ||||
| 
 | ||||
|         (i.e. the broker + symbol key in a tuple). | ||||
| 
 | ||||
|         ''' | ||||
|         return ( | ||||
|             list(self.broker_info.keys())[0], | ||||
|             self.key, | ||||
|         ) | ||||
| 
 | ||||
|     def tokens(self) -> tuple[str]: | ||||
|         broker, key = self.front_feed() | ||||
|         if self.suffix: | ||||
|             return (key, self.suffix, broker) | ||||
|         else: | ||||
|             return (key, broker) | ||||
| 
 | ||||
|     def front_fqsn(self) -> str: | ||||
|         ''' | ||||
|         fqsn = "fully qualified symbol name" | ||||
| 
 | ||||
|         Basically the idea here is for all client-ish code (aka programs/actors | ||||
|         that ask the provider agnostic layers in the stack for data) should be | ||||
|         able to tell which backend / venue / derivative each data feed/flow is | ||||
|         from by an explicit string key of the current form: | ||||
| 
 | ||||
|         <instrumentname>.<venue>.<suffixwithmetadata>.<brokerbackendname> | ||||
| 
 | ||||
|         TODO: I have thoughts that we should actually change this to be | ||||
|         more like an "attr lookup" (like how the web should have done | ||||
|         urls, but marketting peeps ruined it etc. etc.): | ||||
| 
 | ||||
|         <broker>.<venue>.<instrumentname>.<suffixwithmetadata> | ||||
| 
 | ||||
|         ''' | ||||
|         tokens = self.tokens() | ||||
|         fqsn = '.'.join(map(str.lower, tokens)) | ||||
|         return fqsn | ||||
| 
 | ||||
|     def iterfqsns(self) -> list[str]: | ||||
|         keys = [] | ||||
|         for broker in self.broker_info.keys(): | ||||
|             fqsn = mk_fqsn(self.key, broker) | ||||
|             if self.suffix: | ||||
|                 fqsn += f'.{self.suffix}' | ||||
|             keys.append(fqsn) | ||||
| 
 | ||||
|         return keys | ||||
| 
 | ||||
| 
 | ||||
| def _nan_to_closest_num(array: np.ndarray): | ||||
|     """Return interpolated values instead of NaN. | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,510 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Mega-simple symbology cache via TOML files. | ||||
| 
 | ||||
| Allow backend data providers and/or brokers to stash their | ||||
| symbology sets (aka the meta data we normalize into our | ||||
| `.accounting.MktPair` type) to the filesystem for faster lookup and | ||||
| offline usage. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from pathlib import Path | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Sequence, | ||||
|     Hashable, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| from types import ModuleType | ||||
| 
 | ||||
| from rapidfuzz import process as fuzzy | ||||
| import tomli_w  # for fast symbol cache writing | ||||
| import tractor | ||||
| import trio | ||||
| try: | ||||
|     import tomllib | ||||
| except ModuleNotFoundError: | ||||
|     import tomli as tomllib | ||||
| from msgspec import field | ||||
| 
 | ||||
| from piker.log import get_logger | ||||
| from piker import config | ||||
| from piker.types import Struct | ||||
| from piker.brokers import ( | ||||
|     open_cached_client, | ||||
|     get_brokermod, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ..accounting import ( | ||||
|         Asset, | ||||
|         MktPair, | ||||
|     ) | ||||
| 
 | ||||
| log = get_logger('data.cache') | ||||
| 
 | ||||
| 
 | ||||
| class SymbologyCache(Struct): | ||||
|     ''' | ||||
|     Asset meta-data cache which holds lookup tables for 3 sets of | ||||
|     market-symbology related struct-types required by the | ||||
|     `.accounting` and `.data` subsystems. | ||||
| 
 | ||||
|     ''' | ||||
|     mod: ModuleType | ||||
|     fp: Path | ||||
| 
 | ||||
|     # all asset-money-systems descriptions as minimally defined by | ||||
|     # in `.accounting.Asset` | ||||
|     assets: dict[str, Asset] = field(default_factory=dict) | ||||
| 
 | ||||
|     # backend-system pairs loaded in provider (schema) specific | ||||
|     # structs. | ||||
|     pairs: dict[str, Struct] = field(default_factory=dict) | ||||
|     # serialized namespace path to the backend's pair-info-`Struct` | ||||
|     # defn B) | ||||
|     pair_ns_path: tractor.msg.NamespacePath | None = None | ||||
| 
 | ||||
|     # TODO: piker-normalized `.accounting.MktPair` table? | ||||
|     # loaded from the `.pairs` and a normalizer | ||||
|     # provided by the backend pkg. | ||||
|     mktmaps: dict[str, MktPair] = field(default_factory=dict) | ||||
| 
 | ||||
|     def write_config(self) -> None: | ||||
| 
 | ||||
|         # put the backend's pair-struct type ref at the top | ||||
|         # of file if possible. | ||||
|         cachedict: dict[str, Any] = { | ||||
|             'pair_ns_path': str(self.pair_ns_path) or '', | ||||
|         } | ||||
| 
 | ||||
|         # serialize all tables as dicts for TOML. | ||||
|         for key, table in { | ||||
|             'assets': self.assets, | ||||
|             'pairs': self.pairs, | ||||
|             'mktmaps': self.mktmaps, | ||||
|         }.items(): | ||||
|             if not table: | ||||
|                 log.warning( | ||||
|                     f'Asset cache table for `{key}` is empty?' | ||||
|                 ) | ||||
|                 continue | ||||
| 
 | ||||
|             dct = cachedict[key] = {} | ||||
|             for key, struct in table.items(): | ||||
|                 dct[key] = struct.to_dict(include_non_members=False) | ||||
| 
 | ||||
|         try: | ||||
|             with self.fp.open(mode='wb') as fp: | ||||
|                 tomli_w.dump(cachedict, fp) | ||||
|         except TypeError: | ||||
|             self.fp.unlink() | ||||
|             raise | ||||
| 
 | ||||
|     async def load(self) -> None: | ||||
|         ''' | ||||
|         Explicitly load the "symbology set" for this provider by using | ||||
|         2 required `Client` methods: | ||||
| 
 | ||||
|           - `.get_assets()`: returning a table of `Asset`s | ||||
|           - `.get_mkt_pairs()`: returning a table of pair-`Struct` | ||||
|             types, custom defined by the particular backend. | ||||
| 
 | ||||
|         AND, the required `.get_mkt_info()` module-level endpoint | ||||
|         which maps `fqme: str` -> `MktPair`s. | ||||
| 
 | ||||
|         These tables are then used to fill out the `.assets`, `.pairs` and | ||||
|         `.mktmaps` tables on this cache instance, respectively. | ||||
| 
 | ||||
|         ''' | ||||
|         async with open_cached_client(self.mod.name) as client: | ||||
| 
 | ||||
|             if get_assets := getattr(client, 'get_assets', None): | ||||
|                 assets: dict[str, Asset] = await get_assets() | ||||
|                 for bs_mktid, asset in assets.items(): | ||||
|                     self.assets[bs_mktid] = asset | ||||
|             else: | ||||
|                 log.warning( | ||||
|                     'No symbology cache `Asset` support for `{provider}`..\n' | ||||
|                     'Implement `Client.get_assets()`!' | ||||
|                 ) | ||||
| 
 | ||||
|             if get_mkt_pairs := getattr(client, 'get_mkt_pairs', None): | ||||
| 
 | ||||
|                 pairs: dict[str, Struct] = await get_mkt_pairs() | ||||
|                 for bs_fqme, pair in pairs.items(): | ||||
| 
 | ||||
|                     # NOTE: every backend defined pair should | ||||
|                     # declare it's ns path for roundtrip | ||||
|                     # serialization lookup. | ||||
|                     if not getattr(pair, 'ns_path', None): | ||||
|                         raise TypeError( | ||||
|                             f'Pair-struct for {self.mod.name} MUST define a ' | ||||
|                             '`.ns_path: str`!\n' | ||||
|                             f'{pair}' | ||||
|                         ) | ||||
| 
 | ||||
|                     entry = await self.mod.get_mkt_info(pair.bs_fqme) | ||||
|                     if not entry: | ||||
|                         continue | ||||
| 
 | ||||
|                     mkt: MktPair | ||||
|                     pair: Struct | ||||
|                     mkt, _pair = entry | ||||
|                     assert _pair is pair, ( | ||||
|                         f'`{self.mod.name}` backend probably has a ' | ||||
|                         'keying-symmetry problem between the pair-`Struct` ' | ||||
|                         'returned from `Client.get_mkt_pairs()`and the ' | ||||
|                         'module level endpoint: `.get_mkt_info()`\n\n' | ||||
|                         "Here's the struct diff:\n" | ||||
|                         f'{_pair - pair}' | ||||
|                     ) | ||||
|                     # NOTE XXX: this means backends MUST implement | ||||
|                     # a `Struct.bs_mktid: str` field to provide | ||||
|                     # a native-keyed map to their own symbol | ||||
|                     # set(s). | ||||
|                     self.pairs[pair.bs_mktid] = pair | ||||
| 
 | ||||
|                     # NOTE: `MktPair`s are keyed here using piker's | ||||
|                     # internal FQME schema so that search, | ||||
|                     # accounting and feed init can be accomplished | ||||
|                     # a sane, uniform, normalized basis. | ||||
|                     self.mktmaps[mkt.fqme] = mkt | ||||
| 
 | ||||
|                 self.pair_ns_path: str = tractor.msg.NamespacePath.from_ref( | ||||
|                     pair, | ||||
|                 ) | ||||
| 
 | ||||
|             else: | ||||
|                 log.warning( | ||||
|                     'No symbology cache `Pair` support for `{provider}`..\n' | ||||
|                     'Implement `Client.get_mkt_pairs()`!' | ||||
|                 ) | ||||
| 
 | ||||
|         return self | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_dict( | ||||
|         cls: type, | ||||
|         data: dict, | ||||
|         **kwargs, | ||||
|     ) -> SymbologyCache: | ||||
| 
 | ||||
|         # normal init inputs | ||||
|         cache = cls(**kwargs) | ||||
| 
 | ||||
|         # XXX WARNING: this may break if backend namespacing | ||||
|         # changes (eg. `Pair` class def is moved to another | ||||
|         # module) in which case you can manually update the | ||||
|         # `pair_ns_path` in the symcache file and try again. | ||||
|         # TODO: probably a verbose error about this? | ||||
|         Pair: type = tractor.msg.NamespacePath( | ||||
|             str(data['pair_ns_path']) | ||||
|         ).load_ref() | ||||
| 
 | ||||
|         pairtable = data.pop('pairs') | ||||
|         for key, pairtable in pairtable.items(): | ||||
| 
 | ||||
|             # allow each serialized pair-dict-table to declare its | ||||
|             # specific struct type's path in cases where a backend | ||||
|             # supports multiples (normally with different | ||||
|             # schemas..) and we are storing them in a flat `.pairs` | ||||
|             # table. | ||||
|             ThisPair = Pair | ||||
|             if this_pair_type := pairtable.get('ns_path'): | ||||
|                 ThisPair: type = tractor.msg.NamespacePath( | ||||
|                     str(this_pair_type) | ||||
|                 ).load_ref() | ||||
| 
 | ||||
|             pair: Struct = ThisPair(**pairtable) | ||||
|             cache.pairs[key] = pair | ||||
| 
 | ||||
|         from ..accounting import ( | ||||
|             Asset, | ||||
|             MktPair, | ||||
|         ) | ||||
| 
 | ||||
|         # load `dict` -> `Asset` | ||||
|         assettable = data.pop('assets') | ||||
|         for name, asdict in assettable.items(): | ||||
|             cache.assets[name] = Asset.from_msg(asdict) | ||||
| 
 | ||||
|         # load `dict` -> `MktPair` | ||||
|         dne: list[str] = [] | ||||
|         mkttable = data.pop('mktmaps') | ||||
|         for fqme, mktdict in mkttable.items(): | ||||
| 
 | ||||
|             mkt = MktPair.from_msg(mktdict) | ||||
|             assert mkt.fqme == fqme | ||||
| 
 | ||||
|             # sanity check asset refs from those (presumably) | ||||
|             # loaded asset set above. | ||||
|             src: Asset = cache.assets[mkt.src.name] | ||||
|             assert src == mkt.src | ||||
|             dst: Asset | ||||
|             if not (dst := cache.assets.get(mkt.dst.name)): | ||||
|                 dne.append(mkt.dst.name) | ||||
|                 continue | ||||
|             else: | ||||
|                 assert dst.name == mkt.dst.name | ||||
| 
 | ||||
|             cache.mktmaps[fqme] = mkt | ||||
| 
 | ||||
|         log.warning( | ||||
|             f'These `MktPair.dst: Asset`s DNE says `{cache.mod.name}`?\n' | ||||
|             f'{pformat(dne)}' | ||||
|         ) | ||||
|         return cache | ||||
| 
 | ||||
|     @staticmethod | ||||
|     async def from_scratch( | ||||
|         mod: ModuleType, | ||||
|         fp: Path, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> SymbologyCache: | ||||
|         ''' | ||||
|         Generate (a) new symcache (contents) entirely from scratch | ||||
|         including all (TOML) serialized data and file. | ||||
| 
 | ||||
|         ''' | ||||
|         log.info(f'GENERATING symbology cache for `{mod.name}`') | ||||
|         cache = SymbologyCache( | ||||
|             mod=mod, | ||||
|             fp=fp, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         await cache.load() | ||||
|         cache.write_config() | ||||
|         return cache | ||||
| 
 | ||||
|     def search( | ||||
|         self, | ||||
|         pattern: str, | ||||
|         table: str = 'mktmaps' | ||||
| 
 | ||||
|     ) -> dict[str, Struct]: | ||||
|         ''' | ||||
|         (Fuzzy) search this cache's `.mktmaps` table, which is | ||||
|         keyed by FQMEs, for `pattern: str` and return the best | ||||
|         matches in a `dict` including the `MktPair` values. | ||||
| 
 | ||||
|         ''' | ||||
|         matches = fuzzy.extract( | ||||
|             pattern, | ||||
|             getattr(self, table), | ||||
|             score_cutoff=50, | ||||
|         ) | ||||
| 
 | ||||
|         # repack in dict[fqme, MktPair] form | ||||
|         return { | ||||
|             item[0].fqme: item[0] | ||||
|             for item in matches | ||||
|         } | ||||
| 
 | ||||
| 
 | ||||
| # actor-process-local in-mem-cache of symcaches (by backend). | ||||
| _caches: dict[str, SymbologyCache] = {} | ||||
| 
 | ||||
| 
 | ||||
| def mk_cachefile( | ||||
|     provider: str, | ||||
| ) -> Path: | ||||
|     cachedir: Path = config.get_conf_dir() / '_cache' | ||||
|     if not cachedir.is_dir(): | ||||
|         log.info(f'Creating `nativedb` director: {cachedir}') | ||||
|         cachedir.mkdir() | ||||
| 
 | ||||
|     cachefile: Path = cachedir / f'{str(provider)}.symcache.toml' | ||||
|     cachefile.touch() | ||||
|     return cachefile | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_symcache( | ||||
|     mod_or_name: ModuleType | str, | ||||
| 
 | ||||
|     reload: bool = False, | ||||
|     only_from_memcache: bool = False,  # no API req | ||||
|     _no_symcache: bool = False,  # no backend support | ||||
| 
 | ||||
| ) -> SymbologyCache: | ||||
| 
 | ||||
|     if isinstance(mod_or_name, str): | ||||
|         mod = get_brokermod(mod_or_name) | ||||
|     else: | ||||
|         mod: ModuleType = mod_or_name | ||||
| 
 | ||||
|     provider: str = mod.name | ||||
|     cachefile: Path = mk_cachefile(provider) | ||||
| 
 | ||||
|     # NOTE: certain backends might not support a symbology cache | ||||
|     # (easily) and thus we allow for an empty instance to be loaded | ||||
|     # and manually filled in at the whim of the caller presuming | ||||
|     # the backend pkg-module is annotated appropriately. | ||||
|     if ( | ||||
|         getattr(mod, '_no_symcache', False) | ||||
|         or _no_symcache | ||||
|     ): | ||||
|         yield SymbologyCache( | ||||
|             mod=mod, | ||||
|             fp=cachefile, | ||||
|         ) | ||||
|         # don't do nuttin | ||||
|         return | ||||
| 
 | ||||
|     # actor-level cache-cache XD | ||||
|     global _caches | ||||
|     if not reload: | ||||
|         try: | ||||
|             yield _caches[provider] | ||||
|         except KeyError: | ||||
|             msg: str = ( | ||||
|                 f'No asset info cache exists yet for `{provider}`' | ||||
|             ) | ||||
|             if only_from_memcache: | ||||
|                 raise RuntimeError(msg) | ||||
|             else: | ||||
|                 log.warning(msg) | ||||
| 
 | ||||
|     # if no cache exists or an explicit reload is requested, load | ||||
|     # the provider API and call appropriate endpoints to populate | ||||
|     # the mkt and asset tables. | ||||
|     if ( | ||||
|         reload | ||||
|         or not cachefile.is_file() | ||||
|     ): | ||||
|         cache = await SymbologyCache.from_scratch( | ||||
|             mod=mod, | ||||
|             fp=cachefile, | ||||
|         ) | ||||
| 
 | ||||
|     else: | ||||
|         log.info( | ||||
|             f'Loading EXISTING `{mod.name}` symbology cache:\n' | ||||
|             f'> {cachefile}' | ||||
|         ) | ||||
|         import time | ||||
|         now = time.time() | ||||
|         with cachefile.open('rb') as existing_fp: | ||||
|             data: dict[str, dict] = tomllib.load(existing_fp) | ||||
|             log.runtime(f'SYMCACHE TOML LOAD TIME: {time.time() - now}') | ||||
| 
 | ||||
|             # if there's an empty file for some reason we need | ||||
|             # to do a full reload as well! | ||||
|             if not data: | ||||
|                 cache = await SymbologyCache.from_scratch( | ||||
|                     mod=mod, | ||||
|                     fp=cachefile, | ||||
|                 ) | ||||
|             else: | ||||
|                 cache = SymbologyCache.from_dict( | ||||
|                     data, | ||||
|                     mod=mod, | ||||
|                     fp=cachefile, | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
|         # TODO: use a real profiling sys.. | ||||
|         # https://github.com/pikers/piker/issues/337 | ||||
|         log.info(f'SYMCACHE LOAD TIME: {time.time() - now}') | ||||
| 
 | ||||
|     yield cache | ||||
| 
 | ||||
|     # TODO: write only when changes detected? but that should | ||||
|     # never happen right except on reload? | ||||
|     # cache.write_config() | ||||
| 
 | ||||
| 
 | ||||
| def get_symcache( | ||||
|     provider: str, | ||||
|     force_reload: bool = False, | ||||
| 
 | ||||
| ) -> SymbologyCache: | ||||
|     ''' | ||||
|     Get any available symbology/assets cache from sync code by | ||||
|     (maybe) manually running `trio` to do the work. | ||||
| 
 | ||||
|     ''' | ||||
|     # spawn tractor runtime and generate cache | ||||
|     # if not existing. | ||||
|     async def sched_gen_symcache(): | ||||
|         async with ( | ||||
|             # only for runtime's debug mode | ||||
|             tractor.open_nursery(debug_mode=True), | ||||
| 
 | ||||
|             open_symcache( | ||||
|                 get_brokermod(provider), | ||||
|                 reload=force_reload, | ||||
|             ) as symcache, | ||||
|         ): | ||||
|             return symcache | ||||
| 
 | ||||
|     try: | ||||
|         symcache: SymbologyCache = trio.run(sched_gen_symcache) | ||||
|         assert symcache | ||||
|     except BaseException: | ||||
|         import pdbp | ||||
|         pdbp.xpm() | ||||
| 
 | ||||
|     return symcache | ||||
| 
 | ||||
| 
 | ||||
| def match_from_pairs( | ||||
|     pairs: dict[str, Struct], | ||||
|     query: str, | ||||
|     score_cutoff: int = 50, | ||||
|     **extract_kwargs, | ||||
| 
 | ||||
| ) -> dict[str, Struct]: | ||||
|     ''' | ||||
|     Fuzzy search over a "pairs table" maintained by most backends | ||||
|     as part of their symbology-info caching internals. | ||||
| 
 | ||||
|     Scan the native symbol key set and return best ranked | ||||
|     matches back in a new `dict`. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
|     # TODO: somehow cache this list (per call) like we were in | ||||
|     # `open_symbol_search()`? | ||||
|     keys: list[str] = list(pairs) | ||||
|     matches: list[tuple[ | ||||
|         Sequence[Hashable],  # matching input key | ||||
|         Any,  # scores | ||||
|         Any, | ||||
|     ]] = fuzzy.extract( | ||||
|         # NOTE: most backends provide keys uppercased | ||||
|         query=query, | ||||
|         choices=keys, | ||||
|         score_cutoff=score_cutoff, | ||||
|         **extract_kwargs, | ||||
|     ) | ||||
| 
 | ||||
|     # pop and repack pairs in output dict | ||||
|     matched_pairs: dict[str, Struct] = {} | ||||
|     for item in matches: | ||||
|         pair_key: str = item[0] | ||||
|         matched_pairs[pair_key] = pairs[pair_key] | ||||
| 
 | ||||
|     return matched_pairs | ||||
|  | @ -1,34 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Data layer module commons. | ||||
| 
 | ||||
| ''' | ||||
| from functools import partial | ||||
| 
 | ||||
| from ..log import ( | ||||
|     get_logger, | ||||
|     get_console_log, | ||||
| ) | ||||
| subsys: str = 'piker.data' | ||||
| 
 | ||||
| log = get_logger(subsys) | ||||
| 
 | ||||
| get_console_log = partial( | ||||
|     get_console_log, | ||||
|     name=subsys, | ||||
| ) | ||||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -18,30 +18,13 @@ | |||
| ToOlS fOr CoPInG wITh "tHE wEB" protocols. | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| from itertools import count | ||||
| from functools import partial | ||||
| from contextlib import asynccontextmanager, AsyncExitStack | ||||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Callable, | ||||
|     AsyncContextManager, | ||||
|     AsyncGenerator, | ||||
|     Iterable, | ||||
| ) | ||||
| from typing import Any, Callable, AsyncGenerator | ||||
| import json | ||||
| 
 | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| from trio_websocket import ( | ||||
|     WebSocketConnection, | ||||
|     open_websocket_url, | ||||
| ) | ||||
| from wsproto.utilities import LocalProtocolError | ||||
| import trio_websocket | ||||
| from trio_websocket._impl import ( | ||||
|     ConnectionClosed, | ||||
|     DisconnectionTimeout, | ||||
|  | @ -50,71 +33,74 @@ from trio_websocket._impl import ( | |||
|     ConnectionTimeout, | ||||
| ) | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| from ._util import log | ||||
| from ..log import get_logger | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class NoBsWs: | ||||
|     ''' | ||||
|     Make ``trio_websocket`` sockets stay up no matter the bs. | ||||
|     """Make ``trio_websocket`` sockets stay up no matter the bs. | ||||
| 
 | ||||
|     A shim interface that allows client code to stream from some | ||||
|     ``WebSocketConnection`` but where any connectivy bs is handled | ||||
|     automatcially and entirely in the background. | ||||
| 
 | ||||
|     NOTE: this type should never be created directly but instead is | ||||
|     provided via the ``open_autorecon_ws()`` factor below. | ||||
| 
 | ||||
|     ''' | ||||
|     # apparently we can QoS for all sorts of reasons..so catch em. | ||||
|     """ | ||||
|     recon_errors = ( | ||||
|         ConnectionClosed, | ||||
|         DisconnectionTimeout, | ||||
|         ConnectionRejected, | ||||
|         HandshakeError, | ||||
|         ConnectionTimeout, | ||||
|         LocalProtocolError, | ||||
|     ) | ||||
| 
 | ||||
|     def __init__( | ||||
|         self, | ||||
|         url: str, | ||||
|         rxchan: trio.MemoryReceiveChannel, | ||||
|         msg_recv_timeout: float, | ||||
| 
 | ||||
|         serializer: ModuleType = json | ||||
|         stack: AsyncExitStack, | ||||
|         fixture: Callable, | ||||
|         serializer: ModuleType = json, | ||||
|     ): | ||||
|         self.url = url | ||||
|         self._rx = rxchan | ||||
|         self._timeout = msg_recv_timeout | ||||
|         self.fixture = fixture | ||||
|         self._stack = stack | ||||
|         self._ws: 'WebSocketConnection' = None  # noqa | ||||
| 
 | ||||
|         # signaling between caller and relay task which determines when | ||||
|         # socket is connected (and subscribed). | ||||
|         self._connected: trio.Event = trio.Event() | ||||
|     async def _connect( | ||||
|         self, | ||||
|         tries: int = 1000, | ||||
|     ) -> None: | ||||
|         while True: | ||||
|             try: | ||||
|                 await self._stack.aclose() | ||||
|             except (DisconnectionTimeout, RuntimeError): | ||||
|                 await trio.sleep(0.5) | ||||
|             else: | ||||
|                 break | ||||
| 
 | ||||
|         # dynamically reset by the bg relay task | ||||
|         self._ws: WebSocketConnection | None = None | ||||
|         self._cs: trio.CancelScope | None = None | ||||
|         last_err = None | ||||
|         for i in range(tries): | ||||
|             try: | ||||
|                 self._ws = await self._stack.enter_async_context( | ||||
|                     trio_websocket.open_websocket_url(self.url) | ||||
|                 ) | ||||
|                 # rerun user code fixture | ||||
|                 ret = await self._stack.enter_async_context( | ||||
|                     self.fixture(self) | ||||
|                 ) | ||||
| 
 | ||||
|         # interchange codec methods | ||||
|         # TODO: obviously the method API here may be different | ||||
|         # for another interchange format.. | ||||
|         self._dumps: Callable = serializer.dumps | ||||
|         self._loads: Callable = serializer.loads | ||||
|                 assert ret is None | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         return self._connected.is_set() | ||||
|                 log.info(f'Connection success: {self.url}') | ||||
|                 return self._ws | ||||
| 
 | ||||
|     async def reset(self) -> None: | ||||
|         ''' | ||||
|         Reset the underlying ws connection by cancelling | ||||
|         the bg relay task and waiting for it to signal | ||||
|         a new connection. | ||||
| 
 | ||||
|         ''' | ||||
|         self._connected = trio.Event() | ||||
|         self._cs.cancel() | ||||
|         await self._connected.wait() | ||||
|             except self.recon_errors as err: | ||||
|                 last_err = err | ||||
|                 log.error( | ||||
|                     f'{self} connection bail with ' | ||||
|                     f'{type(err)}...retry attempt {i}' | ||||
|                 ) | ||||
|                 await trio.sleep(0.5) | ||||
|                 continue | ||||
|         else: | ||||
|             log.exception('ws connection fail...') | ||||
|             raise last_err | ||||
| 
 | ||||
|     async def send_msg( | ||||
|         self, | ||||
|  | @ -122,397 +108,37 @@ class NoBsWs: | |||
|     ) -> None: | ||||
|         while True: | ||||
|             try: | ||||
|                 msg: Any = self._dumps(data) | ||||
|                 return await self._ws.send_message(msg) | ||||
|                 return await self._ws.send_message(json.dumps(data)) | ||||
|             except self.recon_errors: | ||||
|                 await self.reset() | ||||
|                 await self._connect() | ||||
| 
 | ||||
|     async def recv_msg(self) -> Any: | ||||
|         msg: Any = await self._rx.receive() | ||||
|         data = self._loads(msg) | ||||
|         return data | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self | ||||
| 
 | ||||
|     async def __anext__(self): | ||||
|         return await self.recv_msg() | ||||
| 
 | ||||
|     def set_recv_timeout( | ||||
|     async def recv_msg( | ||||
|         self, | ||||
|         timeout: float, | ||||
|     ) -> None: | ||||
|         self._timeout = timeout | ||||
| 
 | ||||
| 
 | ||||
| async def _reconnect_forever( | ||||
|     url: str, | ||||
|     snd: trio.MemorySendChannel, | ||||
|     nobsws: NoBsWs, | ||||
|     reset_after: int,  # msg recv timeout before reset attempt | ||||
| 
 | ||||
|     fixture: AsyncContextManager | None = None, | ||||
|     task_status: TaskStatus = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # TODO: can we just report "where" in the call stack | ||||
|     # the client code is using the ws stream? | ||||
|     # Maybe we can just drop this since it's already in the log msg | ||||
|     # orefix? | ||||
|     if fixture is not None: | ||||
|         src_mod: str = fixture.__module__ | ||||
|     else: | ||||
|         src_mod: str = 'unknown' | ||||
| 
 | ||||
|     async def proxy_msgs( | ||||
|         ws: WebSocketConnection, | ||||
|         pcs: trio.CancelScope,  # parent cancel scope | ||||
|     ): | ||||
|         ''' | ||||
|         Receive (under `timeout` deadline) all msgs from from underlying | ||||
|         websocket and relay them to (calling) parent task via ``trio`` | ||||
|         mem chan. | ||||
| 
 | ||||
|         ''' | ||||
|         # after so many msg recv timeouts, reset the connection | ||||
|         timeouts: int = 0 | ||||
| 
 | ||||
|     ) -> Any: | ||||
|         while True: | ||||
|             with trio.move_on_after( | ||||
|                 # can be dynamically changed by user code | ||||
|                 nobsws._timeout, | ||||
|             ) as cs: | ||||
|                 try: | ||||
|                     msg: Any = await ws.get_message() | ||||
|                     await snd.send(msg) | ||||
|                 except nobsws.recon_errors: | ||||
|                     log.exception( | ||||
|                         f'{src_mod}\n' | ||||
|                         f'{url} connection bail with:' | ||||
|                     ) | ||||
|                     await trio.sleep(0.5) | ||||
|                     pcs.cancel() | ||||
| 
 | ||||
|                     # go back to reonnect loop in parent task | ||||
|                     return | ||||
| 
 | ||||
|             if cs.cancelled_caught: | ||||
|                 timeouts += 1 | ||||
|                 if timeouts > reset_after: | ||||
|                     log.error( | ||||
|                         f'{src_mod}\n' | ||||
|                         'WS feed seems down and slow af.. reconnecting\n' | ||||
|                     ) | ||||
|                     pcs.cancel() | ||||
| 
 | ||||
|                     # go back to reonnect loop in parent task | ||||
|                     return | ||||
| 
 | ||||
|     async def open_fixture( | ||||
|         fixture: AsyncContextManager, | ||||
|         nobsws: NoBsWs, | ||||
|         task_status: TaskStatus = trio.TASK_STATUS_IGNORED, | ||||
|     ): | ||||
|         ''' | ||||
|         Open user provided `@acm` and sleep until any connection | ||||
|         reset occurs. | ||||
| 
 | ||||
|         ''' | ||||
|         async with fixture(nobsws) as ret: | ||||
|             assert ret is None | ||||
|             task_status.started() | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|     # last_err = None | ||||
|     nobsws._connected = trio.Event() | ||||
|     task_status.started() | ||||
| 
 | ||||
|     while not snd._closed: | ||||
|         log.info( | ||||
|             f'{src_mod}\n' | ||||
|             f'{url} trying (RE)CONNECT' | ||||
|         ) | ||||
| 
 | ||||
|         ws: WebSocketConnection | ||||
|         try: | ||||
|             async with ( | ||||
|                 trio.open_nursery() as n, | ||||
|                 open_websocket_url(url) as ws, | ||||
|             ): | ||||
|                 cs = nobsws._cs = n.cancel_scope | ||||
|                 nobsws._ws = ws | ||||
|                 log.info( | ||||
|                     f'{src_mod}\n' | ||||
|                     f'Connection success: {url}' | ||||
|                 ) | ||||
| 
 | ||||
|                 # begin relay loop to forward msgs | ||||
|                 n.start_soon( | ||||
|                     proxy_msgs, | ||||
|                     ws, | ||||
|                     cs, | ||||
|                 ) | ||||
| 
 | ||||
|                 if fixture is not None: | ||||
|                     log.info( | ||||
|                         f'{src_mod}\n' | ||||
|                         f'Entering fixture: {fixture}' | ||||
|                     ) | ||||
| 
 | ||||
|                     # TODO: should we return an explicit sub-cs | ||||
|                     # from this fixture task? | ||||
|                     await n.start( | ||||
|                         open_fixture, | ||||
|                         fixture, | ||||
|                         nobsws, | ||||
|                     ) | ||||
| 
 | ||||
|                 # indicate to wrapper / opener that we are up and block | ||||
|                 # to let tasks run **inside** the ws open block above. | ||||
|                 nobsws._connected.set() | ||||
|                 await trio.sleep_forever() | ||||
|         except HandshakeError: | ||||
|             log.exception('Retrying connection') | ||||
| 
 | ||||
|         # ws & nursery block ends | ||||
| 
 | ||||
|         nobsws._connected = trio.Event() | ||||
|         if cs.cancelled_caught: | ||||
|             log.cancel( | ||||
|                 f'{url} connection cancelled!' | ||||
|             ) | ||||
|             # if wrapper cancelled us, we expect it to also | ||||
|             # have re-assigned a new event | ||||
|             assert ( | ||||
|                 nobsws._connected | ||||
|                 and not nobsws._connected.is_set() | ||||
|             ) | ||||
| 
 | ||||
|         # -> from here, move to next reconnect attempt iteration | ||||
|         # in the while loop above Bp | ||||
| 
 | ||||
|     else: | ||||
|         log.exception( | ||||
|             f'{src_mod}\n' | ||||
|             'ws connection closed by client...' | ||||
|         ) | ||||
|             try: | ||||
|                 return json.loads(await self._ws.get_message()) | ||||
|             except self.recon_errors: | ||||
|                 await self._connect() | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| @asynccontextmanager | ||||
| async def open_autorecon_ws( | ||||
|     url: str, | ||||
| 
 | ||||
|     fixture: AsyncContextManager | None = None, | ||||
| 
 | ||||
|     # time in sec between msgs received before | ||||
|     # we presume connection might need a reset. | ||||
|     msg_recv_timeout: float = 16, | ||||
| 
 | ||||
|     # count of the number of above timeouts before connection reset | ||||
|     reset_after: int = 3, | ||||
|     # TODO: proper type annot smh | ||||
|     fixture: Callable, | ||||
| 
 | ||||
| ) -> AsyncGenerator[tuple[...],  NoBsWs]: | ||||
|     ''' | ||||
|     An auto-reconnect websocket (wrapper API) around | ||||
|     ``trio_websocket.open_websocket_url()`` providing automatic | ||||
|     re-connection on network errors, msg latency and thus roaming. | ||||
|     """Apparently we can QoS for all sorts of reasons..so catch em. | ||||
| 
 | ||||
|     Here we implement a re-connect websocket interface where a bg | ||||
|     nursery runs ``WebSocketConnection.receive_message()``s in a loop | ||||
|     and restarts the full http(s) handshake on catches of certain | ||||
|     connetivity errors, or some user defined recv timeout. | ||||
| 
 | ||||
|     You can provide a ``fixture`` async-context-manager which will be | ||||
|     entered/exitted around each connection reset; eg. for (re)requesting | ||||
|     subscriptions without requiring streaming setup code to rerun. | ||||
| 
 | ||||
|     ''' | ||||
|     snd: trio.MemorySendChannel | ||||
|     rcv: trio.MemoryReceiveChannel | ||||
|     snd, rcv = trio.open_memory_channel(616) | ||||
| 
 | ||||
|     async with trio.open_nursery() as n: | ||||
|         nobsws = NoBsWs( | ||||
|             url, | ||||
|             rcv, | ||||
|             msg_recv_timeout=msg_recv_timeout, | ||||
|         ) | ||||
|         await n.start( | ||||
|             partial( | ||||
|                 _reconnect_forever, | ||||
|                 url, | ||||
|                 snd, | ||||
|                 nobsws, | ||||
|                 fixture=fixture, | ||||
|                 reset_after=reset_after, | ||||
|             ) | ||||
|         ) | ||||
|         await nobsws._connected.wait() | ||||
|         assert nobsws._cs | ||||
|         assert nobsws.connected() | ||||
|     """ | ||||
|     async with AsyncExitStack() as stack: | ||||
|         ws = NoBsWs(url, stack, fixture=fixture) | ||||
|         await ws._connect() | ||||
| 
 | ||||
|         try: | ||||
|             yield nobsws | ||||
|             yield ws | ||||
| 
 | ||||
|         finally: | ||||
|             n.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| ''' | ||||
| JSONRPC response-request style machinery for transparent multiplexing | ||||
| of msgs over a `NoBsWs`. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| 
 | ||||
| class JSONRPCResult(Struct): | ||||
|     id: int | ||||
|     jsonrpc: str = '2.0' | ||||
|     result: Optional[dict] = None | ||||
|     error: Optional[dict] = None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_jsonrpc_session( | ||||
|     url: str, | ||||
|     start_id: int = 0, | ||||
|     response_type: type = JSONRPCResult, | ||||
|     msg_recv_timeout: float = float('inf'), | ||||
|     # ^NOTE, since only `deribit` is using this jsonrpc stuff atm | ||||
|     # and options mkts are generally "slow moving".. | ||||
|     # | ||||
|     # FURTHER if we break the underlying ws connection then since we | ||||
|     # don't pass a `fixture` to the task that manages `NoBsWs`, i.e. | ||||
|     # `_reconnect_forever()`, the jsonrpc "transport pipe" get's | ||||
|     # broken and never restored with wtv init sequence is required to | ||||
|     # re-establish a working req-resp session. | ||||
| 
 | ||||
| ) -> Callable[[str, dict], dict]: | ||||
|     ''' | ||||
|     Init a json-RPC-over-websocket connection to the provided `url`. | ||||
| 
 | ||||
|     A `json_rpc: Callable[[str, dict], dict` is delivered to the | ||||
|     caller for sending requests and a bg-`trio.Task` handles | ||||
|     processing of response msgs including error reporting/raising in | ||||
|     the parent/caller task. | ||||
| 
 | ||||
|     ''' | ||||
|     # NOTE, store all request msgs so we can raise errors on the | ||||
|     # caller side! | ||||
|     req_msgs: dict[int, dict] = {} | ||||
| 
 | ||||
|     async with ( | ||||
|         trio.open_nursery() as tn, | ||||
|         open_autorecon_ws( | ||||
|             url=url, | ||||
|             msg_recv_timeout=msg_recv_timeout, | ||||
|         ) as ws | ||||
|     ): | ||||
|         rpc_id: Iterable[int] = count(start_id) | ||||
|         rpc_results: dict[int, dict] = {} | ||||
| 
 | ||||
|         async def json_rpc( | ||||
|             method: str, | ||||
|             params: dict, | ||||
|         ) -> dict: | ||||
|             ''' | ||||
|             perform a json rpc call and wait for the result, raise exception in | ||||
|             case of error field present on response | ||||
|             ''' | ||||
|             nonlocal req_msgs | ||||
| 
 | ||||
|             req_id: int = next(rpc_id) | ||||
|             msg = { | ||||
|                 'jsonrpc': '2.0', | ||||
|                 'id': req_id, | ||||
|                 'method': method, | ||||
|                 'params': params | ||||
|             } | ||||
|             _id = msg['id'] | ||||
| 
 | ||||
|             result = rpc_results[_id] = { | ||||
|                 'result': None, | ||||
|                 'error': None, | ||||
|                 'event': trio.Event(),  # signal caller resp arrived | ||||
|             } | ||||
|             req_msgs[_id] = msg | ||||
| 
 | ||||
|             await ws.send_msg(msg) | ||||
| 
 | ||||
|             # wait for reponse before unblocking requester code | ||||
|             await rpc_results[_id]['event'].wait() | ||||
| 
 | ||||
|             if (maybe_result := result['result']): | ||||
|                 ret = maybe_result | ||||
|                 del rpc_results[_id] | ||||
| 
 | ||||
|             else: | ||||
|                 err = result['error'] | ||||
|                 raise Exception( | ||||
|                     f'JSONRPC request failed\n' | ||||
|                     f'req: {msg}\n' | ||||
|                     f'resp: {err}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             if ret.error is not None: | ||||
|                 raise Exception(json.dumps(ret.error, indent=4)) | ||||
| 
 | ||||
|             return ret | ||||
| 
 | ||||
|         async def recv_task(): | ||||
|             ''' | ||||
|             receives every ws message and stores it in its corresponding | ||||
|             result field, then sets the event to wakeup original sender | ||||
|             tasks. also recieves responses to requests originated from | ||||
|             the server side. | ||||
| 
 | ||||
|             ''' | ||||
|             nonlocal req_msgs | ||||
|             async for msg in ws: | ||||
|                 match msg: | ||||
|                     case { | ||||
|                         'result': _, | ||||
|                         'id': mid, | ||||
|                     } if res_entry := rpc_results.get(mid): | ||||
| 
 | ||||
|                         res_entry['result'] = response_type(**msg) | ||||
|                         res_entry['event'].set() | ||||
| 
 | ||||
|                     case { | ||||
|                         'result': _, | ||||
|                         'id': mid, | ||||
|                     } if not rpc_results.get(mid): | ||||
|                         log.warning( | ||||
|                             f'Unexpected ws msg: {json.dumps(msg, indent=4)}' | ||||
|                         ) | ||||
| 
 | ||||
|                     case { | ||||
|                         'method': _, | ||||
|                         'params': _, | ||||
|                     }: | ||||
|                         log.debug(f'Recieved\n{msg}') | ||||
| 
 | ||||
|                     case { | ||||
|                         'error': error | ||||
|                     }: | ||||
|                         # retreive orig request msg, set error | ||||
|                         # response in original "result" msg, | ||||
|                         # THEN FINALLY set the event to signal caller | ||||
|                         # to raise the error in the parent task. | ||||
|                         req_id: int = error['id'] | ||||
|                         req_msg: dict = req_msgs[req_id] | ||||
|                         result: dict = rpc_results[req_id] | ||||
|                         result['error'] = error | ||||
|                         result['event'].set() | ||||
|                         log.error( | ||||
|                             f'JSONRPC request failed\n' | ||||
|                             f'req: {req_msg}\n' | ||||
|                             f'resp: {error}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                     case _: | ||||
|                         log.warning(f'Unhandled JSON-RPC msg!?\n{msg}') | ||||
| 
 | ||||
|         tn.start_soon(recv_task) | ||||
|         yield json_rpc | ||||
|         tn.cancel_scope.cancel() | ||||
|             await stack.aclose() | ||||
|  |  | |||
|  | @ -0,0 +1,196 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| marketstore cli. | ||||
| 
 | ||||
| """ | ||||
| from functools import partial | ||||
| from pprint import pformat | ||||
| 
 | ||||
| from anyio_marketstore import open_marketstore_client | ||||
| import trio | ||||
| import tractor | ||||
| import click | ||||
| import numpy as np | ||||
| 
 | ||||
| from .marketstore import ( | ||||
|     get_client, | ||||
|     # stream_quotes, | ||||
|     ingest_quote_stream, | ||||
|     # _url, | ||||
|     _tick_tbk_ids, | ||||
|     mk_tbk, | ||||
| ) | ||||
| from ..cli import cli | ||||
| from .. import watchlists as wl | ||||
| from ..log import get_logger | ||||
| from ._sharedmem import ( | ||||
|     maybe_open_shm_array, | ||||
| ) | ||||
| from ._source import ( | ||||
|     base_iohlc_dtype, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.option( | ||||
|     '--url', | ||||
|     default='ws://localhost:5993/ws', | ||||
|     help='HTTP URL of marketstore instance' | ||||
| ) | ||||
| @click.argument('names', nargs=-1) | ||||
| @click.pass_obj | ||||
| def ms_stream( | ||||
|     config: dict, | ||||
|     names: list[str], | ||||
|     url: str, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Connect to a marketstore time bucket stream for (a set of) symbols(s) | ||||
|     and print to console. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         # async for quote in stream_quotes(symbols=names): | ||||
|         #    log.info(f"Received quote:\n{quote}") | ||||
|         ... | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| # @cli.command() | ||||
| # @click.option( | ||||
| #     '--url', | ||||
| #     default=_url, | ||||
| #     help='HTTP URL of marketstore instance' | ||||
| # ) | ||||
| # @click.argument('names', nargs=-1) | ||||
| # @click.pass_obj | ||||
| # def ms_destroy(config: dict, names: list[str], url: str) -> None: | ||||
| #     """Destroy symbol entries in the local marketstore instance. | ||||
| #     """ | ||||
| #     async def main(): | ||||
| #         nonlocal names | ||||
| #         async with get_client(url) as client: | ||||
| #  | ||||
| #             if not names: | ||||
| #                 names = await client.list_symbols() | ||||
| #  | ||||
| #             # default is to wipe db entirely. | ||||
| #             answer = input( | ||||
| #                 "This will entirely wipe you local marketstore db @ " | ||||
| #                 f"{url} of the following symbols:\n {pformat(names)}" | ||||
| #                 "\n\nDelete [N/y]?\n") | ||||
| #  | ||||
| #             if answer == 'y': | ||||
| #                 for sym in names: | ||||
| #                     # tbk = _tick_tbk.format(sym) | ||||
| #                     tbk = tuple(sym, *_tick_tbk_ids) | ||||
| #                     print(f"Destroying {tbk}..") | ||||
| #                     await client.destroy(mk_tbk(tbk)) | ||||
| #             else: | ||||
| #                 print("Nothing deleted.") | ||||
| #  | ||||
| #     tractor.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.option( | ||||
|     '--tl', | ||||
|     is_flag=True, | ||||
|     help='Enable tractor logging') | ||||
| @click.option( | ||||
|     '--host', | ||||
|     default='localhost' | ||||
| ) | ||||
| @click.option( | ||||
|     '--port', | ||||
|     default=5993 | ||||
| ) | ||||
| @click.argument('symbols', nargs=-1) | ||||
| @click.pass_obj | ||||
| def storesh( | ||||
|     config, | ||||
|     tl, | ||||
|     host, | ||||
|     port, | ||||
|     symbols: list[str], | ||||
| ): | ||||
|     ''' | ||||
|     Start an IPython shell ready to query the local marketstore db. | ||||
| 
 | ||||
|     ''' | ||||
|     from piker.data.marketstore import tsdb_history_update | ||||
|     from piker._daemon import open_piker_runtime | ||||
| 
 | ||||
|     async def main(): | ||||
|         nonlocal symbols | ||||
| 
 | ||||
|         async with open_piker_runtime( | ||||
|             'storesh', | ||||
|             enable_modules=['piker.data._ahab'], | ||||
|         ): | ||||
|             symbol = symbols[0] | ||||
|             await tsdb_history_update(symbol) | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.option('--test-file', '-t', help='Test quote stream file') | ||||
| @click.option('--tl', is_flag=True, help='Enable tractor logging') | ||||
| @click.argument('name', nargs=1, required=True) | ||||
| @click.pass_obj | ||||
| def ingest(config, name, test_file, tl): | ||||
|     ''' | ||||
|     Ingest real-time broker quotes and ticks to a marketstore instance. | ||||
| 
 | ||||
|     ''' | ||||
|     # global opts | ||||
|     loglevel = config['loglevel'] | ||||
|     tractorloglevel = config['tractorloglevel'] | ||||
|     # log = config['log'] | ||||
| 
 | ||||
|     watchlist_from_file = wl.ensure_watchlists(config['wl_path']) | ||||
|     watchlists = wl.merge_watchlist(watchlist_from_file, wl._builtins) | ||||
|     symbols = watchlists[name] | ||||
| 
 | ||||
|     grouped_syms = {} | ||||
|     for sym in symbols: | ||||
|         symbol, _, provider = sym.rpartition('.') | ||||
|         if provider not in grouped_syms: | ||||
|             grouped_syms[provider] = [] | ||||
| 
 | ||||
|         grouped_syms[provider].append(symbol) | ||||
| 
 | ||||
|     async def entry_point(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|             for provider, symbols in grouped_syms.items():  | ||||
|                 await n.run_in_actor( | ||||
|                     ingest_quote_stream, | ||||
|                     name='ingest_marketstore', | ||||
|                     symbols=symbols, | ||||
|                     brokername=provider, | ||||
|                     tries=1, | ||||
|                     actorloglevel=loglevel, | ||||
|                     loglevel=tractorloglevel | ||||
|                 ) | ||||
| 
 | ||||
|     tractor.run(entry_point) | ||||
							
								
								
									
										1779
									
								
								piker/data/feed.py
								
								
								
								
							
							
						
						
									
										1779
									
								
								piker/data/feed.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,221 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Public abstractions for organizing, managing and generally operating-on | ||||
| real-time data processing data-structures. | ||||
| 
 | ||||
| "Streams, flumes, cascades and flows.." | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import tractor | ||||
| import pendulum | ||||
| import numpy as np | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| from ._sharedmem import ( | ||||
|     attach_shm_array, | ||||
|     ShmArray, | ||||
|     _Token, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ..accounting import MktPair | ||||
|     from .feed import Feed | ||||
| 
 | ||||
| 
 | ||||
| class Flume(Struct): | ||||
|     ''' | ||||
|     Composite reference type which points to all the addressing | ||||
|     handles and other meta-data necessary for the read, measure and | ||||
|     management of a set of real-time updated data flows. | ||||
| 
 | ||||
|     Can be thought of as a "flow descriptor" or "flow frame" which | ||||
|     describes the high level properties of a set of data flows that | ||||
|     can be used seamlessly across process-memory boundaries. | ||||
| 
 | ||||
|     Each instance's sub-components normally includes: | ||||
|      - a msg oriented quote stream provided via an IPC transport | ||||
|      - history and real-time shm buffers which are both real-time | ||||
|        updated and backfilled. | ||||
|      - associated startup indexing information related to both buffer | ||||
|        real-time-append and historical prepend addresses. | ||||
|      - low level APIs to read and measure the updated data and manage | ||||
|        queuing properties. | ||||
| 
 | ||||
|     ''' | ||||
|     mkt: MktPair | ||||
|     first_quote: dict | ||||
|     _rt_shm_token: _Token | ||||
| 
 | ||||
|     # optional since some data flows won't have a "downsampled" history | ||||
|     # buffer/stream (eg. FSPs). | ||||
|     _hist_shm_token: _Token | None = None | ||||
| 
 | ||||
|     # private shm refs loaded dynamically from tokens | ||||
|     _hist_shm: ShmArray | None = None | ||||
|     _rt_shm: ShmArray | None = None | ||||
|     _readonly: bool = True | ||||
| 
 | ||||
|     stream: tractor.MsgStream | None = None | ||||
|     izero_hist: int = 0 | ||||
|     izero_rt: int = 0 | ||||
|     throttle_rate: int | None = None | ||||
| 
 | ||||
|     # TODO: do we need this really if we can pull the `Portal` from | ||||
|     # ``tractor``'s internals? | ||||
|     feed: Feed | None = None | ||||
| 
 | ||||
|     @property | ||||
|     def rt_shm(self) -> ShmArray: | ||||
| 
 | ||||
|         if self._rt_shm is None: | ||||
|             self._rt_shm = attach_shm_array( | ||||
|                 token=self._rt_shm_token, | ||||
|                 readonly=self._readonly, | ||||
|             ) | ||||
| 
 | ||||
|         return self._rt_shm | ||||
| 
 | ||||
|     @property | ||||
|     def hist_shm(self) -> ShmArray: | ||||
| 
 | ||||
|         if self._hist_shm_token is None: | ||||
|             raise RuntimeError( | ||||
|                 'No shm token has been set for the history buffer?' | ||||
|             ) | ||||
| 
 | ||||
|         if self._hist_shm is None: | ||||
|             self._hist_shm = attach_shm_array( | ||||
|                 token=self._hist_shm_token, | ||||
|                 readonly=self._readonly, | ||||
|             ) | ||||
| 
 | ||||
|         return self._hist_shm | ||||
| 
 | ||||
|     async def receive(self) -> dict: | ||||
|         return await self.stream.receive() | ||||
| 
 | ||||
|     def get_ds_info( | ||||
|         self, | ||||
|     ) -> tuple[float, float, float]: | ||||
|         ''' | ||||
|         Compute the "downsampling" ratio info between the historical shm | ||||
|         buffer and the real-time (HFT) one. | ||||
| 
 | ||||
|         Return a tuple of the fast sample period, historical sample | ||||
|         period and ratio between them. | ||||
| 
 | ||||
|         ''' | ||||
|         times: np.ndarray = self.hist_shm.array['time'] | ||||
|         end: float | int = pendulum.from_timestamp(times[-1]) | ||||
|         start: float | int = pendulum.from_timestamp(times[times != times[-1]][-1]) | ||||
|         hist_step_size_s: float = (end - start).seconds | ||||
| 
 | ||||
|         times = self.rt_shm.array['time'] | ||||
|         end = pendulum.from_timestamp(times[-1]) | ||||
|         start = pendulum.from_timestamp(times[times != times[-1]][-1]) | ||||
|         rt_step_size_s = (end - start).seconds | ||||
| 
 | ||||
|         ratio = hist_step_size_s / rt_step_size_s | ||||
|         return ( | ||||
|             rt_step_size_s, | ||||
|             hist_step_size_s, | ||||
|             ratio, | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: get native msgspec decoding for these workinn | ||||
|     def to_msg(self) -> dict: | ||||
| 
 | ||||
|         msg = self.to_dict() | ||||
|         msg['mkt'] = self.mkt.to_dict() | ||||
| 
 | ||||
|         # NOTE: pop all un-msg-serializable fields: | ||||
|         # - `tractor.MsgStream` | ||||
|         # - `Feed` | ||||
|         # - `Shmarray` | ||||
|         # it's expected the `.from_msg()` on the other side | ||||
|         # will get instead some kind of msg-compat version | ||||
|         # that it can load. | ||||
|         msg.pop('stream') | ||||
|         msg.pop('feed') | ||||
|         msg.pop('_rt_shm') | ||||
|         msg.pop('_hist_shm') | ||||
| 
 | ||||
|         return msg | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_msg( | ||||
|         cls, | ||||
|         msg: dict, | ||||
|         readonly: bool = True, | ||||
| 
 | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Load from an IPC msg presumably in either `dict` or | ||||
|         `msgspec.Struct` form. | ||||
| 
 | ||||
|         ''' | ||||
|         mkt_msg = msg.pop('mkt') | ||||
|         from ..accounting import MktPair  # cycle otherwise.. | ||||
|         mkt = MktPair.from_msg(mkt_msg) | ||||
|         msg |= {'_readonly': readonly} | ||||
|         return cls( | ||||
|             mkt=mkt, | ||||
|             **msg, | ||||
|         ) | ||||
| 
 | ||||
|     def get_index( | ||||
|         self, | ||||
|         time_s: float, | ||||
|         array: np.ndarray, | ||||
| 
 | ||||
|     ) -> int | float: | ||||
|         ''' | ||||
|         Return array shm-buffer index for for epoch time. | ||||
| 
 | ||||
|         ''' | ||||
|         times = array['time'] | ||||
|         first = np.searchsorted( | ||||
|             times, | ||||
|             time_s, | ||||
|             side='left', | ||||
|         ) | ||||
|         imx = times.shape[0] - 1 | ||||
|         return min(first, imx) | ||||
| 
 | ||||
|     # only set by external msg or creator, never | ||||
|     # manually! | ||||
|     _has_vlm: bool = True | ||||
| 
 | ||||
|     def has_vlm(self) -> bool: | ||||
| 
 | ||||
|         if not self._has_vlm: | ||||
|             return False | ||||
| 
 | ||||
|         # make sure that the instrument supports volume history | ||||
|         # (sometimes this is not the case for some commodities and | ||||
|         # derivatives) | ||||
|         vlm: np.ndarray = self.rt_shm.array['volume'] | ||||
|         return not bool( | ||||
|             np.all(np.isin(vlm, -1)) | ||||
|             or np.all(np.isnan(vlm)) | ||||
|         ) | ||||
|  | @ -23,7 +23,7 @@ Api layer likely in here... | |||
| from types import ModuleType | ||||
| from importlib import import_module | ||||
| 
 | ||||
| from ._util import get_logger | ||||
| from ..log import get_logger | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -25,55 +25,50 @@ | |||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from datetime import datetime | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Union, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| import time | ||||
| from math import isnan | ||||
| from pathlib import Path | ||||
| 
 | ||||
| from bidict import bidict | ||||
| from msgspec.msgpack import ( | ||||
|     encode, | ||||
|     decode, | ||||
| ) | ||||
| # import pyqtgraph as pg | ||||
| import msgpack | ||||
| import pyqtgraph as pg | ||||
| import numpy as np | ||||
| import tractor | ||||
| from trio_websocket import open_websocket_url | ||||
| from anyio_marketstore import (  # noqa | ||||
| from anyio_marketstore import ( | ||||
|     open_marketstore_client, | ||||
|     MarketstoreClient, | ||||
|     Params, | ||||
| ) | ||||
| import pendulum | ||||
| # TODO: import this for specific error set expected by mkts client | ||||
| # import purerpc | ||||
| 
 | ||||
| from ..data.feed import maybe_open_feed | ||||
| from . import Services | ||||
| from ._util import ( | ||||
|     log,  # sub-sys logger | ||||
|     get_console_log, | ||||
| ) | ||||
| import purerpc | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     import docker | ||||
|     from ._ahab import DockerContainer | ||||
| 
 | ||||
| from .feed import maybe_open_feed | ||||
| from ..log import get_logger, get_console_log | ||||
| 
 | ||||
| 
 | ||||
| # ahabd-supervisor and container level config | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| # container level config | ||||
| _config = { | ||||
|     'grpc_listen_port': 5995, | ||||
|     'ws_listen_port': 5993, | ||||
|     'log_level': 'debug', | ||||
|     'startup_timeout': 2, | ||||
| } | ||||
| 
 | ||||
| _yaml_config_str: str = ''' | ||||
| _yaml_config = ''' | ||||
| # piker's ``marketstore`` config. | ||||
| 
 | ||||
| # mount this config using: | ||||
|  | @ -92,12 +87,6 @@ stale_threshold: 5 | |||
| enable_add: true | ||||
| enable_remove: false | ||||
| 
 | ||||
| # SUPER DUPER CRITICAL to address a super weird issue: | ||||
| # https://github.com/pikers/piker/issues/443 | ||||
| # seems like "variable compression" is possibly borked | ||||
| # or snappy compression somehow breaks easily? | ||||
| disable_variable_compression: true | ||||
| 
 | ||||
| triggers: | ||||
|   - module: ondiskagg.so | ||||
|     on: "*/1Sec/OHLCV" | ||||
|  | @ -115,18 +104,18 @@ triggers: | |||
|     # config: | ||||
|     #     filter: "nasdaq" | ||||
| 
 | ||||
| ''' | ||||
| '''.format(**_config) | ||||
| 
 | ||||
| 
 | ||||
| def start_marketstore( | ||||
|     client: docker.DockerClient, | ||||
|     user_config: dict, | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> tuple[DockerContainer, dict[str, Any]]: | ||||
|     ''' | ||||
|     Start and supervise a marketstore instance with its config | ||||
|     bind-mounted in from the piker config directory on the system. | ||||
|     Start and supervise a marketstore instance with its config bind-mounted | ||||
|     in from the piker config directory on the system. | ||||
| 
 | ||||
|     The equivalent cli cmd to this code is: | ||||
| 
 | ||||
|  | @ -142,24 +131,19 @@ def start_marketstore( | |||
| 
 | ||||
|     mktsdir = os.path.join(config._config_dir, 'marketstore') | ||||
| 
 | ||||
|     # create dirs when dne | ||||
|     if not os.path.isdir(config._config_dir): | ||||
|         Path(config._config_dir).mkdir(parents=True, exist_ok=True) | ||||
| 
 | ||||
|     # create when dne | ||||
|     if not os.path.isdir(mktsdir): | ||||
|         os.mkdir(mktsdir) | ||||
| 
 | ||||
|     yml_file = os.path.join(mktsdir, 'mkts.yml') | ||||
|     yaml_config = _yaml_config_str.format(**user_config) | ||||
| 
 | ||||
|     if not os.path.isfile(yml_file): | ||||
|         log.warning( | ||||
|             f'No `marketstore` config exists?: {yml_file}\n' | ||||
|             'Generating new file from template:\n' | ||||
|             f'{yaml_config}\n' | ||||
|             f'{_yaml_config}\n' | ||||
|         ) | ||||
|         with open(yml_file, 'w') as yf: | ||||
|             yf.write(yaml_config) | ||||
|             yf.write(_yaml_config) | ||||
| 
 | ||||
|     # create a mount from user's local piker config dir into container | ||||
|     config_dir_mnt = docker.types.Mount( | ||||
|  | @ -182,9 +166,6 @@ def start_marketstore( | |||
|         type='bind', | ||||
|     ) | ||||
| 
 | ||||
|     grpc_listen_port = int(user_config['grpc_listen_port']) | ||||
|     ws_listen_port = int(user_config['ws_listen_port']) | ||||
| 
 | ||||
|     dcntr: DockerContainer = client.containers.run( | ||||
|         'alpacamarkets/marketstore:latest', | ||||
|         # do we need this for cmds? | ||||
|  | @ -192,88 +173,28 @@ def start_marketstore( | |||
| 
 | ||||
|         # '-p 5993:5993', | ||||
|         ports={ | ||||
|             f'{ws_listen_port}/tcp': ws_listen_port, | ||||
|             f'{grpc_listen_port}/tcp': grpc_listen_port, | ||||
|             '5993/tcp': 5993,  # jsonrpc / ws? | ||||
|             '5995/tcp': 5995,  # grpc | ||||
|         }, | ||||
|         mounts=[ | ||||
|             config_dir_mnt, | ||||
|             data_dir_mnt, | ||||
|         ], | ||||
| 
 | ||||
|         # XXX: this must be set to allow backgrounding/non-blocking | ||||
|         # usage interaction with the container's process. | ||||
|         detach=True, | ||||
| 
 | ||||
|         # stop_signal='SIGINT', | ||||
|         init=True, | ||||
|         # remove=True, | ||||
|     ) | ||||
| 
 | ||||
|     async def start_matcher(msg: str): | ||||
|         return "launching tcp listener for all services..." in msg | ||||
| 
 | ||||
|     async def stop_matcher(msg: str): | ||||
|         return ( | ||||
|             # not sure when this happens, some kinda stop condition | ||||
|             "exiting..." in msg | ||||
| 
 | ||||
|             # after we send SIGINT.. | ||||
|             or "initiating graceful shutdown due to 'interrupt' request" in msg | ||||
|         ) | ||||
| 
 | ||||
|     return ( | ||||
|         dcntr, | ||||
|         _config, | ||||
| 
 | ||||
|         # expected startup and stop msgs | ||||
|         start_matcher, | ||||
|         stop_matcher, | ||||
|         "launching tcp listener for all services...", | ||||
|         "exiting...", | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def start_ahab_daemon( | ||||
|     service_mngr: Services, | ||||
|     user_config: dict | None = None, | ||||
|     loglevel: str | None = None, | ||||
| 
 | ||||
| ) -> tuple[str, dict]: | ||||
|     ''' | ||||
|     Task entrypoint to start the marketstore docker container using the | ||||
|     service manager. | ||||
| 
 | ||||
|     ''' | ||||
|     from ._ahab import start_ahab_service | ||||
| 
 | ||||
|     # dict-merge any user settings | ||||
|     conf: dict = _config.copy() | ||||
|     if user_config: | ||||
|         conf: dict = conf | user_config | ||||
| 
 | ||||
|     dname: str = 'marketstored' | ||||
|     log.info(f'Spawning `{dname}` supervisor') | ||||
|     async with start_ahab_service( | ||||
|         service_mngr, | ||||
|         dname, | ||||
| 
 | ||||
|         # NOTE: docker-py client is passed at runtime | ||||
|         start_marketstore, | ||||
|         ep_kwargs={'user_config': conf}, | ||||
|         loglevel=loglevel, | ||||
|     ) as ( | ||||
|         _, | ||||
|         config, | ||||
|         (cid, pid), | ||||
|     ): | ||||
|         log.info( | ||||
|             f'`{dname}` up!\n' | ||||
|             f'pid: {pid}\n' | ||||
|             f'container id: {cid[:12]}\n' | ||||
|             f'config: {pformat(config)}' | ||||
|         ) | ||||
|         yield dname, conf | ||||
| 
 | ||||
| 
 | ||||
| _tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK') | ||||
| _tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids) | ||||
| 
 | ||||
|  | @ -327,6 +248,16 @@ _ohlcv_dt = [ | |||
| ] | ||||
| 
 | ||||
| 
 | ||||
| ohlc_key_map = bidict({ | ||||
|     'Epoch': 'time', | ||||
|     'Open': 'open', | ||||
|     'High': 'high', | ||||
|     'Low': 'low', | ||||
|     'Close': 'close', | ||||
|     'Volume': 'volume', | ||||
| }) | ||||
| 
 | ||||
| 
 | ||||
| def mk_tbk(keys: tuple[str, str, str]) -> str: | ||||
|     ''' | ||||
|     Generate a marketstore table key from a tuple. | ||||
|  | @ -339,7 +270,7 @@ def mk_tbk(keys: tuple[str, str, str]) -> str: | |||
| 
 | ||||
| def quote_to_marketstore_structarray( | ||||
|     quote: dict[str, Any], | ||||
|     last_fill: float | None, | ||||
|     last_fill: Optional[float] | ||||
| 
 | ||||
| ) -> np.array: | ||||
|     ''' | ||||
|  | @ -378,6 +309,24 @@ def quote_to_marketstore_structarray( | |||
|     return np.array([tuple(array_input)], dtype=_quote_dt) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_client( | ||||
|     host: str = 'localhost', | ||||
|     port: int = 5995 | ||||
| 
 | ||||
| ) -> MarketstoreClient: | ||||
|     ''' | ||||
|     Load a ``anyio_marketstore`` grpc client connected | ||||
|     to an existing ``marketstore`` server. | ||||
| 
 | ||||
|     ''' | ||||
|     async with open_marketstore_client( | ||||
|         host, | ||||
|         port | ||||
|     ) as client: | ||||
|         yield client | ||||
| 
 | ||||
| 
 | ||||
| class MarketStoreError(Exception): | ||||
|     "Generic marketstore client error" | ||||
| 
 | ||||
|  | @ -405,6 +354,340 @@ tf_in_1s = bidict({ | |||
| }) | ||||
| 
 | ||||
| 
 | ||||
| class Storage: | ||||
|     ''' | ||||
|     High level storage api for both real-time and historical ingest. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         client: MarketstoreClient, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         # TODO: eventually this should be an api/interface type that | ||||
|         # ensures we can support multiple tsdb backends. | ||||
|         self.client = client | ||||
| 
 | ||||
|         # series' cache from tsdb reads | ||||
|         self._arrays: dict[str, np.ndarray] = {} | ||||
| 
 | ||||
|     async def list_keys(self) -> list[str]: | ||||
|         return await self.client.list_symbols() | ||||
| 
 | ||||
|     async def search_keys(self, pattern: str) -> list[str]: | ||||
|         ''' | ||||
|         Search for time series key in the storage backend. | ||||
| 
 | ||||
|         ''' | ||||
|         ... | ||||
| 
 | ||||
|     async def write_ticks(self, ticks: list) -> None: | ||||
|         ... | ||||
| 
 | ||||
|     async def load( | ||||
|         self, | ||||
|         fqsn: str, | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         dict[int, np.ndarray],  # timeframe (in secs) to series | ||||
|         Optional[datetime],  # first dt | ||||
|         Optional[datetime],  # last dt | ||||
|     ]: | ||||
| 
 | ||||
|         first_tsdb_dt, last_tsdb_dt = None, None | ||||
|         tsdb_arrays = await self.read_ohlcv( | ||||
|             fqsn, | ||||
|             # on first load we don't need to pull the max | ||||
|             # history per request size worth. | ||||
|             limit=3000, | ||||
|         ) | ||||
|         log.info(f'Loaded tsdb history {tsdb_arrays}') | ||||
| 
 | ||||
|         if tsdb_arrays: | ||||
|             fastest = list(tsdb_arrays.values())[0] | ||||
|             times = fastest['Epoch'] | ||||
|             first, last = times[0], times[-1] | ||||
|             first_tsdb_dt, last_tsdb_dt = map( | ||||
|                 pendulum.from_timestamp, [first, last] | ||||
|             ) | ||||
| 
 | ||||
|         return tsdb_arrays, first_tsdb_dt, last_tsdb_dt | ||||
| 
 | ||||
|     async def read_ohlcv( | ||||
|         self, | ||||
|         fqsn: str, | ||||
|         timeframe: Optional[Union[int, str]] = None, | ||||
|         end: Optional[int] = None, | ||||
|         limit: int = int(800e3), | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         MarketstoreClient, | ||||
|         Union[dict, np.ndarray] | ||||
|     ]: | ||||
|         client = self.client | ||||
|         syms = await client.list_symbols() | ||||
| 
 | ||||
|         if fqsn not in syms: | ||||
|             return {} | ||||
| 
 | ||||
|         tfstr = tf_in_1s[1] | ||||
| 
 | ||||
|         params = Params( | ||||
|             symbols=fqsn, | ||||
|             timeframe=tfstr, | ||||
|             attrgroup='OHLCV', | ||||
|             end=end, | ||||
|             # limit_from_start=True, | ||||
| 
 | ||||
|             # TODO: figure the max limit here given the | ||||
|             # ``purepc`` msg size limit of purerpc: 33554432 | ||||
|             limit=limit, | ||||
|         ) | ||||
| 
 | ||||
|         if timeframe is None: | ||||
|             log.info(f'starting {fqsn} tsdb granularity scan..') | ||||
|             # loop through and try to find highest granularity | ||||
|             for tfstr in tf_in_1s.values(): | ||||
|                 try: | ||||
|                     log.info(f'querying for {tfstr}@{fqsn}') | ||||
|                     params.set('timeframe', tfstr) | ||||
|                     result = await client.query(params) | ||||
|                     break | ||||
| 
 | ||||
|                 except purerpc.grpclib.exceptions.UnknownError: | ||||
|                     # XXX: this is already logged by the container and | ||||
|                     # thus shows up through `marketstored` logs relay. | ||||
|                     # log.warning(f'{tfstr}@{fqsn} not found') | ||||
|                     continue | ||||
|             else: | ||||
|                 return {} | ||||
| 
 | ||||
|         else: | ||||
|             result = await client.query(params) | ||||
| 
 | ||||
|         # TODO: it turns out column access on recarrays is actually slower: | ||||
|         # https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist | ||||
|         # it might make sense to make these structured arrays? | ||||
|         # Fill out a `numpy` array-results map | ||||
|         arrays = {} | ||||
|         for fqsn, data_set in result.by_symbols().items(): | ||||
|             arrays.setdefault(fqsn, {})[ | ||||
|                 tf_in_1s.inverse[data_set.timeframe] | ||||
|             ] = data_set.array | ||||
| 
 | ||||
|         return arrays[fqsn][timeframe] if timeframe else arrays[fqsn] | ||||
| 
 | ||||
|     async def delete_ts( | ||||
|         self, | ||||
|         key: str, | ||||
|         timeframe: Optional[Union[int, str]] = None, | ||||
| 
 | ||||
|     ) -> bool: | ||||
| 
 | ||||
|         client = self.client | ||||
|         syms = await client.list_symbols() | ||||
|         print(syms) | ||||
|         # if key not in syms: | ||||
|         #     raise KeyError(f'`{fqsn}` table key not found?') | ||||
| 
 | ||||
|         return await client.destroy(tbk=key) | ||||
| 
 | ||||
|     async def write_ohlcv( | ||||
|         self, | ||||
|         fqsn: str, | ||||
|         ohlcv: np.ndarray, | ||||
|         append_and_duplicate: bool = True, | ||||
|         limit: int = int(800e3), | ||||
| 
 | ||||
|     ) -> None: | ||||
|         # build mkts schema compat array for writing | ||||
|         mkts_dt = np.dtype(_ohlcv_dt) | ||||
|         mkts_array = np.zeros( | ||||
|             len(ohlcv), | ||||
|             dtype=mkts_dt, | ||||
|         ) | ||||
|         # copy from shm array (yes it's this easy): | ||||
|         # https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays | ||||
|         mkts_array[:] = ohlcv[[ | ||||
|             'time', | ||||
|             'open', | ||||
|             'high', | ||||
|             'low', | ||||
|             'close', | ||||
|             'volume', | ||||
|         ]] | ||||
| 
 | ||||
|         m, r = divmod(len(mkts_array), limit) | ||||
| 
 | ||||
|         for i in range(m, 1): | ||||
|             to_push = mkts_array[i-1:i*limit] | ||||
| 
 | ||||
|             # write to db | ||||
|             resp = await self.client.write( | ||||
|                 to_push, | ||||
|                 tbk=f'{fqsn}/1Sec/OHLCV', | ||||
| 
 | ||||
|                 # NOTE: will will append duplicates | ||||
|                 # for the same timestamp-index. | ||||
|                 # TODO: pre deduplicate? | ||||
|                 isvariablelength=append_and_duplicate, | ||||
|             ) | ||||
| 
 | ||||
|             log.info( | ||||
|                 f'Wrote {mkts_array.size} datums to tsdb\n' | ||||
|             ) | ||||
| 
 | ||||
|             for resp in resp.responses: | ||||
|                 err = resp.error | ||||
|                 if err: | ||||
|                     raise MarketStoreError(err) | ||||
| 
 | ||||
|         if r: | ||||
|             to_push = mkts_array[m*limit:] | ||||
| 
 | ||||
|             # write to db | ||||
|             resp = await self.client.write( | ||||
|                 to_push, | ||||
|                 tbk=f'{fqsn}/1Sec/OHLCV', | ||||
| 
 | ||||
|                 # NOTE: will will append duplicates | ||||
|                 # for the same timestamp-index. | ||||
|                 # TODO: pre deduplicate? | ||||
|                 isvariablelength=append_and_duplicate, | ||||
|             ) | ||||
| 
 | ||||
|             log.info( | ||||
|                 f'Wrote {mkts_array.size} datums to tsdb\n' | ||||
|             ) | ||||
| 
 | ||||
|             for resp in resp.responses: | ||||
|                 err = resp.error | ||||
|                 if err: | ||||
|                     raise MarketStoreError(err) | ||||
| 
 | ||||
|     # XXX: currently the only way to do this is through the CLI: | ||||
| 
 | ||||
|     # sudo ./marketstore connect --dir ~/.config/piker/data | ||||
|     # >> \show mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15 | ||||
|     # and this seems to block and use up mem.. | ||||
|     # >> \trim mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15 | ||||
| 
 | ||||
|     # relevant source code for this is here: | ||||
|     # https://github.com/alpacahq/marketstore/blob/master/cmd/connect/session/trim.go#L14 | ||||
|     # def delete_range(self, start_dt, end_dt) -> None: | ||||
|     #     ... | ||||
| 
 | ||||
| @acm | ||||
| async def open_storage_client( | ||||
|     fqsn: str, | ||||
|     period: Optional[Union[int, str]] = None,  # in seconds | ||||
| 
 | ||||
| ) -> tuple[Storage, dict[str, np.ndarray]]: | ||||
|     ''' | ||||
|     Load a series by key and deliver in ``numpy`` struct array format. | ||||
| 
 | ||||
|     ''' | ||||
|     async with ( | ||||
|         # eventually a storage backend endpoint | ||||
|         get_client() as client, | ||||
|     ): | ||||
|         # slap on our wrapper api | ||||
|         yield Storage(client) | ||||
| 
 | ||||
| 
 | ||||
| async def tsdb_history_update( | ||||
|     fqsn: Optional[str] = None, | ||||
| 
 | ||||
| ) -> list[str]: | ||||
| 
 | ||||
|     # TODO: real-time dedicated task for ensuring | ||||
|     # history consistency between the tsdb, shm and real-time feed.. | ||||
| 
 | ||||
|     # update sequence design notes: | ||||
| 
 | ||||
|     # - load existing highest frequency data from mkts | ||||
|     #   * how do we want to offer this to the UI? | ||||
|     #    - lazy loading? | ||||
|     #    - try to load it all and expect graphics caching/diffing | ||||
|     #      to  hide extra bits that aren't in view? | ||||
| 
 | ||||
|     # - compute the diff between latest data from broker and shm | ||||
|     #   * use sql api in mkts to determine where the backend should | ||||
|     #     start querying for data? | ||||
|     #   * append any diff with new shm length | ||||
|     #   * determine missing (gapped) history by scanning | ||||
|     #   * how far back do we look? | ||||
| 
 | ||||
|     # - begin rt update ingest and aggregation | ||||
|     #   * could start by always writing ticks to mkts instead of | ||||
|     #     worrying about a shm queue for now. | ||||
|     #   * we have a short list of shm queues worth groking: | ||||
|     #     - https://github.com/pikers/piker/issues/107 | ||||
|     #   * the original data feed arch blurb: | ||||
|     #     - https://github.com/pikers/piker/issues/98 | ||||
|     # | ||||
|     profiler = pg.debug.Profiler( | ||||
|         disabled=False,  # not pg_profile_enabled(), | ||||
|         delayed=False, | ||||
|     ) | ||||
| 
 | ||||
|     async with ( | ||||
|         open_storage_client(fqsn) as storage, | ||||
| 
 | ||||
|         maybe_open_feed( | ||||
|             [fqsn], | ||||
|             start_stream=False, | ||||
| 
 | ||||
|         ) as (feed, stream), | ||||
|     ): | ||||
|         profiler(f'opened feed for {fqsn}') | ||||
| 
 | ||||
|         to_append = feed.shm.array | ||||
|         to_prepend = None | ||||
| 
 | ||||
|         if fqsn: | ||||
|             symbol = feed.symbols.get(fqsn) | ||||
|             if symbol: | ||||
|                 fqsn = symbol.front_fqsn() | ||||
| 
 | ||||
|             # diff db history with shm and only write the missing portions | ||||
|             ohlcv = feed.shm.array | ||||
| 
 | ||||
|             # TODO: use pg profiler | ||||
|             tsdb_arrays = await storage.read_ohlcv(fqsn) | ||||
|             # hist diffing | ||||
|             if tsdb_arrays: | ||||
|                 for secs in (1, 60): | ||||
|                     ts = tsdb_arrays.get(secs) | ||||
|                     if ts is not None and len(ts): | ||||
|                         # these aren't currently used but can be referenced from | ||||
|                         # within the embedded ipython shell below. | ||||
|                         to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]] | ||||
|                         to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]] | ||||
| 
 | ||||
|             profiler('Finished db arrays diffs') | ||||
| 
 | ||||
|         syms = await storage.client.list_symbols() | ||||
|         log.info(f'Existing tsdb symbol set:\n{pformat(syms)}') | ||||
|         profiler(f'listed symbols {syms}') | ||||
| 
 | ||||
|         # TODO: ask if user wants to write history for detected | ||||
|         # available shm buffers? | ||||
|         from tractor.trionics import ipython_embed | ||||
|         await ipython_embed() | ||||
| 
 | ||||
|         # for array in [to_append, to_prepend]: | ||||
|         #     if array is None: | ||||
|         #         continue | ||||
| 
 | ||||
|         #     log.info( | ||||
|         #         f'Writing datums {array.size} -> to tsdb from shm\n' | ||||
|         #     ) | ||||
|         #     await storage.write_ohlcv(fqsn, array) | ||||
| 
 | ||||
|         # profiler('Finished db writes') | ||||
| 
 | ||||
| 
 | ||||
| async def ingest_quote_stream( | ||||
|     symbols: list[str], | ||||
|     brokername: str, | ||||
|  | @ -416,7 +699,6 @@ async def ingest_quote_stream( | |||
|     Ingest a broker quote stream into a ``marketstore`` tsdb. | ||||
| 
 | ||||
|     ''' | ||||
|     from piker.storage.marketstore import get_client | ||||
|     async with ( | ||||
|         maybe_open_feed(brokername, symbols, loglevel=loglevel) as feed, | ||||
|         get_client() as ms_client, | ||||
|  | @ -492,13 +774,12 @@ async def stream_quotes( | |||
|     async with open_websocket_url(f'ws://{host}:{port}/ws') as ws: | ||||
|         # send subs topics to server | ||||
|         resp = await ws.send_message( | ||||
| 
 | ||||
|             encode({'streams': list(tbks.values())}) | ||||
|             msgpack.dumps({'streams': list(tbks.values())}) | ||||
|         ) | ||||
|         log.info(resp) | ||||
| 
 | ||||
|         async def recv() -> dict[str, Any]: | ||||
|             return decode((await ws.get_message()), encoding='utf-8') | ||||
|             return msgpack.loads((await ws.get_message()), encoding='utf-8') | ||||
| 
 | ||||
|         streams = (await recv())['streams'] | ||||
|         log.info(f"Subscribed to {streams}") | ||||
|  | @ -1,173 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Tick event stream processing, filter-by-types, format-normalization. | ||||
| 
 | ||||
| ''' | ||||
| from itertools import chain | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncIterator, | ||||
| ) | ||||
| 
 | ||||
| # tick-type-classes template for all possible "lowest level" events | ||||
| # that can can be emitted by the "top of book" L1 queues and | ||||
| # price-matching (with eventual clearing) in a double auction | ||||
| # market (queuing) system. | ||||
| _tick_groups: dict[str, set[str]] = { | ||||
|     'clears': {'trade', 'dark_trade', 'last'}, | ||||
|     'bids': {'bid', 'bsize'}, | ||||
|     'asks': {'ask', 'asize'}, | ||||
| } | ||||
| 
 | ||||
| # XXX alo define the flattened set of all such "fundamental ticks" | ||||
| # so that it can be used as filter, eg. in the graphics display | ||||
| # loop to compute running windowed y-ranges B) | ||||
| _auction_ticks: set[str] = set.union(*_tick_groups.values()) | ||||
| 
 | ||||
| 
 | ||||
| def frame_ticks( | ||||
|     quote: dict[str, Any], | ||||
| 
 | ||||
|     ticks_by_type: dict | None = None, | ||||
|     ticks_in_order: list[dict[str, Any]] | None = None | ||||
| 
 | ||||
| ) -> dict[ | ||||
|     str, | ||||
|     list[dict[str, Any]] | ||||
| ]: | ||||
|     ''' | ||||
|     XXX: build a tick-by-type table of lists | ||||
|     of tick messages. This allows for less | ||||
|     iteration on the receiver side by allowing for | ||||
|     a single "latest tick event" look up by | ||||
|     indexing the last entry in each sub-list. | ||||
| 
 | ||||
|     tbt = { | ||||
|         'types': ['bid', 'asize', 'last', .. '<type_n>'], | ||||
| 
 | ||||
|         'bid': [tick0, tick1, tick2, .., tickn], | ||||
|         'asize': [tick0, tick1, tick2, .., tickn], | ||||
|         'last': [tick0, tick1, tick2, .., tickn], | ||||
|         ... | ||||
|         '<type_n>': [tick0, tick1, tick2, .., tickn], | ||||
|     } | ||||
| 
 | ||||
|     If `ticks_in_order` is provided, append any retrieved ticks | ||||
|     since last iteration into this array/buffer/list. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: once we decide to get fancy really we should | ||||
|     # have a shared mem tick buffer that is just | ||||
|     # continually filled and the UI just ready from it | ||||
|     # at it's display rate. | ||||
| 
 | ||||
|     tbt = ticks_by_type if ticks_by_type is not None else {} | ||||
|     if not (ticks := quote.get('ticks')): | ||||
|         return tbt | ||||
| 
 | ||||
|     # append in reverse FIFO order for in-order iteration on | ||||
|     # receiver side. | ||||
|     tick: dict[str, Any] | ||||
|     for tick in ticks: | ||||
|         tbt.setdefault( | ||||
|             tick['type'], | ||||
|             [], | ||||
|         ).append(tick) | ||||
| 
 | ||||
|     # TODO: do we need this any more or can we just | ||||
|     # expect the receiver to unwind the below | ||||
|     # `ticks_by_type: dict`? | ||||
|     # => undwinding would potentially require a | ||||
|     # `dict[str, set | list]` instead with an | ||||
|     # included `'types' field which is an (ordered) | ||||
|     # set of tick type fields in the order which | ||||
|     # types arrived? | ||||
|     if ticks_in_order: | ||||
|         ticks_in_order.extend(ticks) | ||||
| 
 | ||||
|     return tbt | ||||
| 
 | ||||
| 
 | ||||
| def iterticks( | ||||
|     quote: dict, | ||||
|     types: tuple[str] = ( | ||||
|         'trade', | ||||
|         'dark_trade', | ||||
|     ), | ||||
|     deduplicate_darks: bool = False, | ||||
|     reverse: bool = False, | ||||
| 
 | ||||
|     # TODO: should we offer delegating to `frame_ticks()` above | ||||
|     # with this? | ||||
|     frame_by_type: bool = False, | ||||
| 
 | ||||
| ) -> AsyncIterator: | ||||
|     ''' | ||||
|     Iterate through ticks delivered per quote cycle, filter and | ||||
|     yield any declared in `types`. | ||||
| 
 | ||||
|     ''' | ||||
|     if deduplicate_darks: | ||||
|         assert 'dark_trade' in types | ||||
| 
 | ||||
|     # print(f"{quote}\n\n") | ||||
|     ticks = quote.get('ticks', ()) | ||||
|     trades = {} | ||||
|     darks = {} | ||||
| 
 | ||||
|     if ticks: | ||||
| 
 | ||||
|         # do a first pass and attempt to remove duplicate dark | ||||
|         # trades with the same tick signature. | ||||
|         if deduplicate_darks: | ||||
|             for tick in ticks: | ||||
|                 ttype = tick.get('type') | ||||
| 
 | ||||
|                 time = tick.get('time', None) | ||||
|                 if time: | ||||
|                     sig = ( | ||||
|                         time, | ||||
|                         tick['price'], | ||||
|                         tick.get('size') | ||||
|                     ) | ||||
| 
 | ||||
|                     if ttype == 'dark_trade': | ||||
|                         darks[sig] = tick | ||||
| 
 | ||||
|                     elif ttype == 'trade': | ||||
|                         trades[sig] = tick | ||||
| 
 | ||||
|             # filter duplicates | ||||
|             for sig, tick in trades.items(): | ||||
|                 tick = darks.pop(sig, None) | ||||
|                 if tick: | ||||
|                     ticks.remove(tick) | ||||
|                     # print(f'DUPLICATE {tick}') | ||||
| 
 | ||||
|             # re-insert ticks | ||||
|             ticks.extend(list(chain(trades.values(), darks.values()))) | ||||
| 
 | ||||
|         # most-recent-first | ||||
|         if reverse: | ||||
|             ticks = reversed(ticks) | ||||
| 
 | ||||
|         for tick in ticks: | ||||
|             # print(f"{quote['symbol']}: {tick}") | ||||
|             ttype = tick.get('type') | ||||
|             if ttype in types: | ||||
|                 yield tick | ||||
|  | @ -0,0 +1,68 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Built-in (extension) types. | ||||
| 
 | ||||
| """ | ||||
| from typing import Optional | ||||
| from pprint import pformat | ||||
| 
 | ||||
| import msgspec | ||||
| 
 | ||||
| 
 | ||||
| class Struct( | ||||
|     msgspec.Struct, | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # tag='pikerstruct', | ||||
|     # tag=True, | ||||
| ): | ||||
|     ''' | ||||
|     A "human friendlier" (aka repl buddy) struct subtype. | ||||
| 
 | ||||
|     ''' | ||||
|     def to_dict(self) -> dict: | ||||
|         return { | ||||
|             f: getattr(self, f) | ||||
|             for f in self.__struct_fields__ | ||||
|         } | ||||
| 
 | ||||
|     def __repr__(self): | ||||
|         return f'Struct({pformat(self.to_dict())})' | ||||
| 
 | ||||
|     def copy( | ||||
|         self, | ||||
|         update: Optional[dict] = None, | ||||
| 
 | ||||
|     ) -> msgspec.Struct: | ||||
|         ''' | ||||
|         Validate-typecast all self defined fields, return a copy of us | ||||
|         with all such fields. | ||||
| 
 | ||||
|         This is kinda like the default behaviour in `pydantic.BaseModel`. | ||||
| 
 | ||||
|         ''' | ||||
|         if update: | ||||
|             for k, v in update.items(): | ||||
|                 setattr(self, k, v) | ||||
| 
 | ||||
|         # roundtrip serialize to validate | ||||
|         return msgspec.msgpack.Decoder( | ||||
|             type=type(self) | ||||
|         ).decode( | ||||
|             msgspec.msgpack.Encoder().encode(self) | ||||
|         ) | ||||
|  | @ -1,265 +0,0 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| ''' | ||||
| Data feed synchronization protocols, init msgs, and general | ||||
| data-provider-backend-agnostic schema definitions. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from decimal import Decimal | ||||
| from pprint import pformat | ||||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import field | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| from piker.accounting import ( | ||||
|     Asset, | ||||
|     MktPair, | ||||
| ) | ||||
| from ._util import log | ||||
| 
 | ||||
| 
 | ||||
| class FeedInitializationError(ValueError): | ||||
|     ''' | ||||
|     Live data feed setup failed due to API / msg incompatiblity! | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
| 
 | ||||
| class FeedInit(Struct, frozen=True): | ||||
|     ''' | ||||
|     A stringent data provider startup msg schema validator. | ||||
| 
 | ||||
|     The fields defined here are matched with those absolutely required | ||||
|     from each backend broker/data provider. | ||||
| 
 | ||||
|     ''' | ||||
|     mkt_info: MktPair | ||||
| 
 | ||||
|     # NOTE: only field we use rn in ``.data.feed`` | ||||
|     # TODO: maybe make a SamplerConfig(Struct)? | ||||
|     shm_write_opts: dict[str, Any] = field( | ||||
|         default_factory=lambda: { | ||||
|         'has_vlm': True, | ||||
|         'sum_tick_vlm': True, | ||||
|     }) | ||||
| 
 | ||||
| # XXX: we group backend endpoints into 3 | ||||
| # groups to determine "degrees" of functionality. | ||||
| _eps: dict[str, list[str]] = { | ||||
| 
 | ||||
|     # basic API `Client` layer | ||||
|     'middleware': [ | ||||
|         'get_client', | ||||
|     ], | ||||
| 
 | ||||
|     # (live) data streaming / loading / search | ||||
|     'datad': [ | ||||
|         'get_mkt_info', | ||||
|         'open_history_client', | ||||
|         'open_symbol_search', | ||||
|         'stream_quotes', | ||||
|     ], | ||||
| 
 | ||||
|     # live order control and trading | ||||
|     'brokerd': [ | ||||
|         'trades_dialogue', | ||||
|         'open_trade_dialog',  # live order ctl | ||||
|         'norm_trade', # ledger normalizer for txns | ||||
|     ], | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| def validate_backend( | ||||
|     mod: ModuleType, | ||||
|     syms: list[str], | ||||
|     init_msgs: list[FeedInit] | dict[str, dict[str, Any]], | ||||
| 
 | ||||
|     # TODO: do a module method scan and report mismatches. | ||||
|     check_eps: bool = False, | ||||
| 
 | ||||
|     api_log_msg_level: str = 'critical' | ||||
| 
 | ||||
| ) -> FeedInit: | ||||
|     ''' | ||||
|     Fail on malformed live quotes feed config/init or warn on changes | ||||
|     that haven't been implemented by this backend yet. | ||||
| 
 | ||||
|     ''' | ||||
|     for daemon_name, eps in _eps.items(): | ||||
|         for name in eps: | ||||
|             ep: Callable = getattr( | ||||
|                 mod, | ||||
|                 name, | ||||
|                 None, | ||||
|             ) | ||||
|             if ep is None: | ||||
|                 log.warning( | ||||
|                     f'Provider backend {mod.name} is missing ' | ||||
|                     f'{daemon_name} support :(\n' | ||||
|                     f'The following endpoint is missing: {name}' | ||||
|                 ) | ||||
| 
 | ||||
|     inits: list[ | ||||
|         FeedInit | dict[str, Any] | ||||
|     ] = init_msgs | ||||
| 
 | ||||
|     # convert to list if from old dict-style | ||||
|     if isinstance(init_msgs, dict): | ||||
|         inits = list(init_msgs.values()) | ||||
| 
 | ||||
|     init: FeedInit | dict[str, Any] | ||||
|     for i, init in enumerate(inits): | ||||
| 
 | ||||
|         # XXX: eventually this WILL NOT necessarily be true. | ||||
|         if i > 0: | ||||
|             assert not len(init_msgs) == 1 | ||||
|             if isinstance(init_msgs, dict): | ||||
|                 keys: set = set(init_msgs.keys()) - set(syms) | ||||
|                 raise FeedInitializationError( | ||||
|                     'TOO MANY INIT MSGS!\n' | ||||
|                     f'Unexpected keys: {keys}\n' | ||||
|                     'ALL MSGS:\n' | ||||
|                     f'{pformat(init_msgs)}\n' | ||||
|                 ) | ||||
|             else: | ||||
|                 raise FeedInitializationError( | ||||
|                     'TOO MANY INIT MSGS!\n' | ||||
|                     f'{pformat(init_msgs)}\n' | ||||
|                 ) | ||||
| 
 | ||||
|     # TODO: once all backends are updated we can remove this branching. | ||||
|     rx_msg: bool = False | ||||
|     warn_msg: str = '' | ||||
|     if not isinstance(init, FeedInit): | ||||
|         warn_msg += ( | ||||
|             '\n' | ||||
|             '--------------------------\n' | ||||
|             ':::DEPRECATED API STYLE:::\n' | ||||
|             '--------------------------\n' | ||||
|             f'`{mod.name}.stream_quotes()` should deliver ' | ||||
|             '`.started(FeedInit)`\n' | ||||
|             f'|-> CURRENTLY it is using DEPRECATED `.started(dict)` style!\n' | ||||
|             f'|-> SEE `FeedInit` in `piker.data.validate`\n' | ||||
|             '--------------------------------------------\n' | ||||
|         ) | ||||
|     else: | ||||
|         rx_msg = True | ||||
| 
 | ||||
|     # verify feed init state / schema | ||||
|     bs_fqme: str  # backend specific fqme | ||||
|     mkt: MktPair | ||||
| 
 | ||||
|     match init: | ||||
| 
 | ||||
|         # backend is using old dict msg delivery | ||||
|         case { | ||||
|             'symbol_info': dict(symbol_info), | ||||
|             'fqsn': bs_fqme, | ||||
|         } | { | ||||
|             'mkt_info': dict(symbol_info), | ||||
|             'fqsn': bs_fqme, | ||||
|         }: | ||||
|             symbol_info: dict | ||||
|             warn_msg += ( | ||||
|                 'It may also be still using the legacy `Symbol` style API\n' | ||||
|                 'IT SHOULD BE PORTED TO THE NEW ' | ||||
|                 '`.accounting._mktinfo.MktPair`\n' | ||||
|                 'STATTTTT!!!\n' | ||||
|             ) | ||||
| 
 | ||||
|             # XXX use default legacy (aka discrete precision) mkt | ||||
|             # price/size_ticks if none delivered. | ||||
|             price_tick = symbol_info.get( | ||||
|                 'price_tick_size', | ||||
|                 Decimal('0.01'), | ||||
|             ) | ||||
|             size_tick = symbol_info.get( | ||||
|                 'lot_tick_size', | ||||
|                 Decimal('1'), | ||||
|             ) | ||||
|             bs_mktid = init.get('bs_mktid') or bs_fqme | ||||
|             mkt = MktPair.from_fqme( | ||||
|                 fqme=f'{bs_fqme}.{mod.name}', | ||||
| 
 | ||||
|                 price_tick=price_tick, | ||||
|                 size_tick=size_tick, | ||||
| 
 | ||||
|                 bs_mktid=str(bs_mktid), | ||||
|                 _atype=symbol_info['asset_type'] | ||||
|             ) | ||||
| 
 | ||||
|         # backend is using new `MktPair` but not entirely | ||||
|         case { | ||||
|             'mkt_info': MktPair( | ||||
|                 dst=Asset(), | ||||
|             ) as mkt, | ||||
|             'fqsn': bs_fqme, | ||||
|         }: | ||||
|             warn_msg += ( | ||||
|                 f'{mod.name} in API compat transition?\n' | ||||
|                 "It's half dict, half man..\n" | ||||
|                 '-------------------------------------\n' | ||||
|             ) | ||||
| 
 | ||||
|         case FeedInit( | ||||
|             mkt_info=MktPair(dst=Asset()) as mkt, | ||||
|             shm_write_opts=dict(shm_opts), | ||||
|         ) as init: | ||||
|             name: str = mod.name | ||||
|             log.info( | ||||
|                 f"{name}'s `MktPair` info:\n" | ||||
|                 f'{pformat(mkt.to_dict())}\n' | ||||
|                 f'shm conf: {pformat(shm_opts)}\n' | ||||
|             ) | ||||
| 
 | ||||
|         case _: | ||||
|             raise FeedInitializationError(init) | ||||
| 
 | ||||
|     # build a msg if we received a dict for input. | ||||
|     if not rx_msg: | ||||
|         assert bs_fqme in mkt.fqme | ||||
|         init = FeedInit( | ||||
|             mkt_info=mkt, | ||||
|             shm_write_opts=init.get('shm_write_opts'), | ||||
|         ) | ||||
| 
 | ||||
|     # `MktPair` value audits | ||||
|     mkt = init.mkt_info | ||||
|     assert mkt.type_key | ||||
| 
 | ||||
|     # backend is using new `MktPair` but not embedded `Asset` types | ||||
|     # for the .src/.dst.. | ||||
|     if not isinstance(mkt.src, Asset): | ||||
|         warn_msg += ( | ||||
|             f'ALSO, {mod.name.upper()} should try to deliver\n' | ||||
|             'the new `MktPair.src: Asset` field!\n' | ||||
|             '-----------------------------------------------\n' | ||||
|         ) | ||||
| 
 | ||||
|     # complain about any non-idealities | ||||
|     if warn_msg: | ||||
|         # TODO: would be nice to register an API_COMPAT or something in | ||||
|         # maybe cyan for this in general throughput piker no? | ||||
|         logmeth = getattr(log, api_log_msg_level) | ||||
|         logmeth(warn_msg) | ||||
| 
 | ||||
|     return init.copy() | ||||
|  | @ -22,40 +22,17 @@ from typing import AsyncIterator | |||
| 
 | ||||
| import numpy as np | ||||
| 
 | ||||
| from ._api import ( | ||||
|     maybe_mk_fsp_shm, | ||||
|     Fsp, | ||||
| ) | ||||
| from ._engine import ( | ||||
|     cascade, | ||||
|     Cascade, | ||||
| ) | ||||
| from ._volume import ( | ||||
|     dolla_vlm, | ||||
|     flow_rates, | ||||
|     tina_vwap, | ||||
| ) | ||||
| from ._engine import cascade | ||||
| 
 | ||||
| __all__: list[str] = [ | ||||
|     'cascade', | ||||
|     'Cascade', | ||||
|     'maybe_mk_fsp_shm', | ||||
|     'Fsp', | ||||
|     'dolla_vlm', | ||||
|     'flow_rates', | ||||
|     'tina_vwap', | ||||
| ] | ||||
| __all__ = ['cascade'] | ||||
| 
 | ||||
| 
 | ||||
| async def latency( | ||||
|     source: 'TickStream[Dict[str, float]]',  # noqa | ||||
|     ohlcv: np.ndarray | ||||
| 
 | ||||
| ) -> AsyncIterator[np.ndarray]: | ||||
|     ''' | ||||
|     Latency measurements, broker to piker. | ||||
| 
 | ||||
|     ''' | ||||
|     """Latency measurements, broker to piker. | ||||
|     """ | ||||
|     # TODO: do we want to offer yielding this async | ||||
|     # before the rt data connection comes up? | ||||
| 
 | ||||
|  |  | |||
|  | @ -174,10 +174,18 @@ def fsp( | |||
|     return Fsp(wrapped, outputs=(wrapped.__name__,)) | ||||
| 
 | ||||
| 
 | ||||
| def mk_fsp_shm_key( | ||||
|     sym: str, | ||||
|     target: Fsp | ||||
| 
 | ||||
| ) -> str: | ||||
|     uid = tractor.current_actor().uid | ||||
|     return f'{sym}.fsp.{target.name}.{".".join(uid)}' | ||||
| 
 | ||||
| 
 | ||||
| def maybe_mk_fsp_shm( | ||||
|     sym: str, | ||||
|     target: Fsp, | ||||
|     size: int, | ||||
|     readonly: bool = True, | ||||
| 
 | ||||
| ) -> (str, ShmArray, bool): | ||||
|  | @ -186,27 +194,20 @@ def maybe_mk_fsp_shm( | |||
|     exists, otherwise load the shm already existing for that token. | ||||
| 
 | ||||
|     ''' | ||||
|     if not isinstance(sym, str): | ||||
|         raise ValueError('`sym: str` should be file-name-friendly') | ||||
|     assert isinstance(sym, str), '`sym` should be file-name-friendly `str`' | ||||
| 
 | ||||
|     # TODO: load output types from `Fsp` | ||||
|     # - should `index` be a required internal field? | ||||
|     fsp_dtype = np.dtype( | ||||
|         [('index', int)] | ||||
|         + | ||||
|         [('time', float)] | ||||
|         + | ||||
|         [('index', int)] + | ||||
|         [(field_name, float) for field_name in target.outputs] | ||||
|     ) | ||||
| 
 | ||||
|     # (attempt to) uniquely key the fsp shm buffers | ||||
|     actor_name, uuid = tractor.current_actor().uid | ||||
|     uuid_snip: str = uuid[:16] | ||||
|     key: str = f'piker.{actor_name}[{uuid_snip}].{sym}.{target.name}' | ||||
|     key = mk_fsp_shm_key(sym, target) | ||||
| 
 | ||||
|     shm, opened = maybe_open_shm_array( | ||||
|         key, | ||||
|         size=size, | ||||
|         # TODO: create entry for each time frame | ||||
|         dtype=fsp_dtype, | ||||
|         readonly=True, | ||||
|     ) | ||||
|  |  | |||
|  | @ -18,43 +18,41 @@ | |||
| core task logic for processing chains | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from dataclasses import dataclass | ||||
| from functools import partial | ||||
| from typing import ( | ||||
|     AsyncIterator, | ||||
|     Callable, | ||||
|     AsyncIterator, Callable, Optional, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| import numpy as np | ||||
| import pyqtgraph as pg | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import tractor | ||||
| from tractor.msg import NamespacePath | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| from ..log import get_logger, get_console_log | ||||
| from .. import data | ||||
| from ..data.feed import ( | ||||
|     Flume, | ||||
|     Feed, | ||||
| ) | ||||
| from ..data import attach_shm_array | ||||
| from ..data.feed import Feed | ||||
| from ..data._sharedmem import ShmArray | ||||
| from ..data._sampling import ( | ||||
|     _default_delay_s, | ||||
|     open_sample_stream, | ||||
| ) | ||||
| from ..accounting import MktPair | ||||
| from ..data._source import Symbol | ||||
| from ._api import ( | ||||
|     Fsp, | ||||
|     _load_builtins, | ||||
|     _Token, | ||||
| ) | ||||
| from ..toolz import Profiler | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| @dataclass | ||||
| class TaskTracker: | ||||
|     complete: trio.Event | ||||
|     cs: trio.CancelScope | ||||
| 
 | ||||
| 
 | ||||
| async def filter_quotes_by_sym( | ||||
| 
 | ||||
|     sym: str, | ||||
|  | @ -75,190 +73,50 @@ async def filter_quotes_by_sym( | |||
|         if quote: | ||||
|             yield quote | ||||
| 
 | ||||
| # TODO: unifying the abstractions in this FSP subsys/layer: | ||||
| # -[ ] move the `.data.flows.Flume` type into this | ||||
| #   module/subsys/pkg? | ||||
| # -[ ] ideas for further abstractions as per | ||||
| #   - https://github.com/pikers/piker/issues/216, | ||||
| #   - https://github.com/pikers/piker/issues/270: | ||||
| #   - a (financial signal) ``Flow`` would be the a "collection" of such | ||||
| #     minmial cascades. Some engineering based jargon concepts: | ||||
| #     - https://en.wikipedia.org/wiki/Signal_chain | ||||
| #     - https://en.wikipedia.org/wiki/Daisy_chain_(electrical_engineering) | ||||
| #     - https://en.wikipedia.org/wiki/Audio_signal_flow | ||||
| #     - https://en.wikipedia.org/wiki/Digital_signal_processing#Implementation | ||||
| #     - https://en.wikipedia.org/wiki/Dataflow_programming | ||||
| #     - https://en.wikipedia.org/wiki/Signal_programming | ||||
| #     - https://en.wikipedia.org/wiki/Incremental_computing | ||||
| #     - https://en.wikipedia.org/wiki/Signal-flow_graph | ||||
| #     - https://en.wikipedia.org/wiki/Signal-flow_graph#Basic_components | ||||
| 
 | ||||
| # -[ ] we probably want to eval THE BELOW design and unify with the | ||||
| #   proto `TaskManager` in the `tractor` dev branch as well as with | ||||
| #   our below idea for `Cascade`: | ||||
| #   - https://github.com/goodboy/tractor/pull/363 | ||||
| class Cascade(Struct): | ||||
|     ''' | ||||
|     As per sig-proc engineering parlance, this is a chaining of | ||||
|     `Flume`s, which are themselves collections of "Streams" | ||||
|     implemented currently via `ShmArray`s. | ||||
| async def fsp_compute( | ||||
| 
 | ||||
|     A `Cascade` is be the minimal "connection" of 2 `Flumes` | ||||
|     as per circuit parlance: | ||||
|     https://en.wikipedia.org/wiki/Two-port_network#Cascade_connection | ||||
| 
 | ||||
|     TODO: | ||||
|       -[ ] could cover the combination of our `FspAdmin` and the | ||||
|         backend `.fsp._engine` related machinery to "connect" one flume | ||||
|         to another? | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: make these `Flume`s | ||||
|     src: Flume | ||||
|     dst: Flume | ||||
|     tn: trio.Nursery | ||||
|     fsp: Fsp  # UI-side middleware ctl API | ||||
| 
 | ||||
|     # filled during cascade/.bind_func() (fsp_compute) init phases | ||||
|     bind_func: Callable | None = None | ||||
|     complete: trio.Event | None = None | ||||
|     cs: trio.CancelScope | None = None | ||||
|     client_stream: tractor.MsgStream | None = None | ||||
| 
 | ||||
|     async def resync(self) -> int: | ||||
|         # TODO: adopt an incremental update engine/approach | ||||
|         # where possible here eventually! | ||||
|         log.info(f're-syncing fsp {self.fsp.name} to source') | ||||
|         self.cs.cancel() | ||||
|         await self.complete.wait() | ||||
|         index: int = await self.tn.start(self.bind_func) | ||||
| 
 | ||||
|         # always trigger UI refresh after history update, | ||||
|         # see ``piker.ui._fsp.FspAdmin.open_chain()`` and | ||||
|         # ``piker.ui._display.trigger_update()``. | ||||
|         dst_shm: ShmArray = self.dst.rt_shm | ||||
|         await self.client_stream.send({ | ||||
|             'fsp_update': { | ||||
|                 'key': dst_shm.token, | ||||
|                 'first': dst_shm._first.value, | ||||
|                 'last': dst_shm._last.value, | ||||
|             } | ||||
|         }) | ||||
|         return index | ||||
| 
 | ||||
|     def is_synced(self) -> tuple[bool, int, int]: | ||||
|         ''' | ||||
|         Predicate to dertmine if a destination FSP | ||||
|         output array is aligned to its source array. | ||||
| 
 | ||||
|         ''' | ||||
|         src_shm: ShmArray = self.src.rt_shm | ||||
|         dst_shm: ShmArray = self.dst.rt_shm | ||||
|         step_diff = src_shm.index - dst_shm.index | ||||
|         len_diff = abs(len(src_shm.array) - len(dst_shm.array)) | ||||
|         synced: bool = not ( | ||||
|             # the source is likely backfilling and we must | ||||
|             # sync history calculations | ||||
|             len_diff > 2 | ||||
| 
 | ||||
|             # we aren't step synced to the source and may be | ||||
|             # leading/lagging by a step | ||||
|             or step_diff > 1 | ||||
|             or step_diff < 0 | ||||
|         ) | ||||
|         if not synced: | ||||
|             fsp: Fsp = self.fsp | ||||
|             log.warning( | ||||
|                 '***DESYNCED FSP***\n' | ||||
|                 f'{fsp.ns_path}@{src_shm.token}\n' | ||||
|                 f'step_diff: {step_diff}\n' | ||||
|                 f'len_diff: {len_diff}\n' | ||||
|             ) | ||||
|         return ( | ||||
|             synced, | ||||
|             step_diff, | ||||
|             len_diff, | ||||
|         ) | ||||
| 
 | ||||
|     async def poll_and_sync_to_step(self) -> int: | ||||
|         synced, step_diff, _ = self.is_synced() | ||||
|         while not synced: | ||||
|             await self.resync() | ||||
|             synced, step_diff, _ = self.is_synced() | ||||
| 
 | ||||
|         return step_diff | ||||
| 
 | ||||
|     @acm | ||||
|     async def open_edge( | ||||
|         self, | ||||
|         bind_func: Callable, | ||||
|     ) -> int: | ||||
|         self.bind_func = bind_func | ||||
|         index = await self.tn.start(bind_func) | ||||
|         yield index | ||||
|         # TODO: what do we want on teardown/error? | ||||
|         # -[ ] dynamic reconnection after update? | ||||
| 
 | ||||
| 
 | ||||
| async def connect_streams( | ||||
|     casc: Cascade, | ||||
|     mkt: MktPair, | ||||
|     symbol: Symbol, | ||||
|     feed: Feed, | ||||
|     quote_stream: trio.abc.ReceiveChannel, | ||||
|     src: Flume, | ||||
|     dst: Flume, | ||||
| 
 | ||||
|     edge_func: Callable, | ||||
|     src: ShmArray, | ||||
|     dst: ShmArray, | ||||
| 
 | ||||
|     func: Callable, | ||||
| 
 | ||||
|     # attach_stream: bool = False, | ||||
|     task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Stream and per-sample compute and write the cascade of | ||||
|     2 `Flumes`/streams given some operating `func`. | ||||
| 
 | ||||
|     https://en.wikipedia.org/wiki/Signal-flow_graph#Basic_components | ||||
| 
 | ||||
|     Not literally, but something like: | ||||
| 
 | ||||
|         edge_func(Flume_in) -> Flume_out | ||||
| 
 | ||||
|     ''' | ||||
|     profiler = Profiler( | ||||
|     profiler = pg.debug.Profiler( | ||||
|         delayed=False, | ||||
|         disabled=True | ||||
|     ) | ||||
| 
 | ||||
|     # TODO: just pull it from src.mkt.fqme no? | ||||
|     # fqme: str = mkt.fqme | ||||
|     fqme: str = src.mkt.fqme | ||||
| 
 | ||||
|     # TODO: dynamic introspection of what the underlying (vertex) | ||||
|     # function actually requires from input node (flumes) then | ||||
|     # deliver those inputs as part of a graph "compilation" step? | ||||
|     out_stream = edge_func( | ||||
|     fqsn = symbol.front_fqsn() | ||||
|     out_stream = func( | ||||
| 
 | ||||
|         # TODO: do we even need this if we do the feed api right? | ||||
|         # shouldn't a local stream do this before we get a handle | ||||
|         # to the async iterable? it's that or we do some kinda | ||||
|         # async itertools style? | ||||
|         filter_quotes_by_sym(fqme, quote_stream), | ||||
|         filter_quotes_by_sym(fqsn, quote_stream), | ||||
| 
 | ||||
|         # XXX: currently the ``ohlcv`` arg, but we should allow | ||||
|         # (dynamic) requests for src flume (node) streams? | ||||
|         src.rt_shm, | ||||
|         # XXX: currently the ``ohlcv`` arg | ||||
|         feed.shm, | ||||
|     ) | ||||
| 
 | ||||
|     # HISTORY COMPUTE PHASE | ||||
|     # conduct a single iteration of fsp with historical bars input | ||||
|     # and get historical output. | ||||
|     history_output: ( | ||||
|         dict[str, np.ndarray]  # multi-output case | ||||
|         | np.ndarray,  # single output case | ||||
|     ) | ||||
|     # Conduct a single iteration of fsp with historical bars input | ||||
|     # and get historical output | ||||
|     history_output: Union[ | ||||
|         dict[str, np.ndarray],  # multi-output case | ||||
|         np.ndarray,  # single output case | ||||
|     ] | ||||
|     history_output = await anext(out_stream) | ||||
| 
 | ||||
|     func_name = edge_func.__name__ | ||||
|     func_name = func.__name__ | ||||
|     profiler(f'{func_name} generated history') | ||||
| 
 | ||||
|     # build struct array with an 'index' field to push as history | ||||
|  | @ -266,17 +124,11 @@ async def connect_streams( | |||
|     # TODO: push using a[['f0', 'f1', .., 'fn']] = .. syntax no? | ||||
|     # if the output array is multi-field then push | ||||
|     # each respective field. | ||||
|     dst_shm: ShmArray = dst.rt_shm | ||||
|     fields = getattr(dst_shm.array.dtype, 'fields', None).copy() | ||||
|     fields = getattr(dst.array.dtype, 'fields', None).copy() | ||||
|     fields.pop('index') | ||||
|     history_by_field: np.ndarray | None = None | ||||
|     src_shm: ShmArray = src.rt_shm | ||||
|     src_time = src_shm.array['time'] | ||||
|     history: Optional[np.ndarray] = None  # TODO: nptyping here! | ||||
| 
 | ||||
|     if ( | ||||
|         fields and | ||||
|         len(fields) > 1 | ||||
|     ): | ||||
|     if fields and len(fields) > 1 and fields: | ||||
|         if not isinstance(history_output, dict): | ||||
|             raise ValueError( | ||||
|                 f'`{func_name}` is a multi-output FSP and should yield a ' | ||||
|  | @ -287,25 +139,25 @@ async def connect_streams( | |||
|             if key in history_output: | ||||
|                 output = history_output[key] | ||||
| 
 | ||||
|                 if history_by_field is None: | ||||
|                 if history is None: | ||||
| 
 | ||||
|                     if output is None: | ||||
|                         length = len(src_shm.array) | ||||
|                         length = len(src.array) | ||||
|                     else: | ||||
|                         length = len(output) | ||||
| 
 | ||||
|                     # using the first output, determine | ||||
|                     # the length of the struct-array that | ||||
|                     # will be pushed to shm. | ||||
|                     history_by_field = np.zeros( | ||||
|                     history = np.zeros( | ||||
|                         length, | ||||
|                         dtype=dst_shm.array.dtype | ||||
|                         dtype=dst.array.dtype | ||||
|                     ) | ||||
| 
 | ||||
|                 if output is None: | ||||
|                     continue | ||||
| 
 | ||||
|                 history_by_field[key] = output | ||||
|                 history[key] = output | ||||
| 
 | ||||
|     # single-key output stream | ||||
|     else: | ||||
|  | @ -314,15 +166,11 @@ async def connect_streams( | |||
|                 f'`{func_name}` is a single output FSP and should yield an ' | ||||
|                 '`np.ndarray` for history' | ||||
|             ) | ||||
|         history_by_field = np.zeros( | ||||
|         history = np.zeros( | ||||
|             len(history_output), | ||||
|             dtype=dst_shm.array.dtype | ||||
|             dtype=dst.array.dtype | ||||
|         ) | ||||
|         history_by_field[func_name] = history_output | ||||
| 
 | ||||
|     history_by_field['time'] = src_time[-len(history_by_field):] | ||||
| 
 | ||||
|     history_output['time'] = src_shm.array['time'] | ||||
|         history[func_name] = history_output | ||||
| 
 | ||||
|     # TODO: XXX: | ||||
|     # THERE'S A BIG BUG HERE WITH THE `index` field since we're | ||||
|  | @ -335,14 +183,11 @@ async def connect_streams( | |||
|     #   is `index` aware such that historical data can be indexed | ||||
|     #   relative to the true first datum? Not sure if this is sane | ||||
|     #   for incremental compuations. | ||||
|     first = dst_shm._first.value = src_shm._first.value | ||||
|     first = dst._first.value = src._first.value | ||||
| 
 | ||||
|     # TODO: can we use this `start` flag instead of the manual | ||||
|     # setting above? | ||||
|     index = dst_shm.push( | ||||
|         history_by_field, | ||||
|         start=first, | ||||
|     ) | ||||
|     index = dst.push(history, start=first) | ||||
| 
 | ||||
|     profiler(f'{func_name} pushed history') | ||||
|     profiler.finish() | ||||
|  | @ -350,9 +195,12 @@ async def connect_streams( | |||
|     # setup a respawn handle | ||||
|     with trio.CancelScope() as cs: | ||||
| 
 | ||||
|         casc.cs = cs | ||||
|         casc.complete = trio.Event() | ||||
|         task_status.started(index) | ||||
|         # TODO: might be better to just make a "restart" method where | ||||
|         # the target task is spawned implicitly and then the event is | ||||
|         # set via some higher level api? At that poing we might as well | ||||
|         # be writing a one-cancels-one nursery though right? | ||||
|         tracker = TaskTracker(trio.Event(), cs) | ||||
|         task_status.started((tracker, index)) | ||||
| 
 | ||||
|         profiler(f'{func_name} yield last index') | ||||
| 
 | ||||
|  | @ -365,14 +213,8 @@ async def connect_streams( | |||
| 
 | ||||
|                 log.debug(f"{func_name}: {processed}") | ||||
|                 key, output = processed | ||||
|                 # dst.array[-1][key] = output | ||||
|                 dst_shm.array[[key, 'time']][-1] = ( | ||||
|                     output, | ||||
|                     # TODO: what about pushing ``time.time_ns()`` | ||||
|                     # in which case we'll need to round at the graphics | ||||
|                     # processing / sampling layer? | ||||
|                     src_shm.array[-1]['time'] | ||||
|                 ) | ||||
|                 index = src.index | ||||
|                 dst.array[-1][key] = output | ||||
| 
 | ||||
|                 # NOTE: for now we aren't streaming this to the consumer | ||||
|                 # stream latest array index entry which basically just acts | ||||
|  | @ -383,7 +225,6 @@ async def connect_streams( | |||
|                 # N-consumers who subscribe for the real-time output, | ||||
|                 # which we'll likely want to implement using local-mem | ||||
|                 # chans for the fan out? | ||||
|                 # index = src_shm.index | ||||
|                 # if attach_stream: | ||||
|                 #     await client_stream.send(index) | ||||
| 
 | ||||
|  | @ -393,7 +234,7 @@ async def connect_streams( | |||
|                 #     log.info(f'FSP quote too fast: {hz}') | ||||
|                 # last = time.time() | ||||
|         finally: | ||||
|             casc.complete.set() | ||||
|             tracker.complete.set() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -402,17 +243,17 @@ async def cascade( | |||
|     ctx: tractor.Context, | ||||
| 
 | ||||
|     # data feed key | ||||
|     fqme: str, | ||||
|     fqsn: str, | ||||
| 
 | ||||
|     src_shm_token: dict, | ||||
|     dst_shm_token: tuple[str, np.dtype], | ||||
| 
 | ||||
|     # flume pair cascaded using an "edge function" | ||||
|     src_flume_addr: dict, | ||||
|     dst_flume_addr: dict, | ||||
|     ns_path: NamespacePath, | ||||
| 
 | ||||
|     shm_registry: dict[str, _Token], | ||||
| 
 | ||||
|     zero_on_step: bool = False, | ||||
|     loglevel: str | None = None, | ||||
|     loglevel: Optional[str] = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|  | @ -420,7 +261,7 @@ async def cascade( | |||
|     destination shm array buffer. | ||||
| 
 | ||||
|     ''' | ||||
|     profiler = Profiler( | ||||
|     profiler = pg.debug.Profiler( | ||||
|         delayed=False, | ||||
|         disabled=False | ||||
|     ) | ||||
|  | @ -428,14 +269,8 @@ async def cascade( | |||
|     if loglevel: | ||||
|         get_console_log(loglevel) | ||||
| 
 | ||||
|     src: Flume = Flume.from_msg(src_flume_addr) | ||||
|     dst: Flume = Flume.from_msg( | ||||
|         dst_flume_addr, | ||||
|         readonly=False, | ||||
|     ) | ||||
| 
 | ||||
|     # src: ShmArray = attach_shm_array(token=src_shm_token) | ||||
|     # dst: ShmArray = attach_shm_array(readonly=False, token=dst_shm_token) | ||||
|     src = attach_shm_array(token=src_shm_token) | ||||
|     dst = attach_shm_array(readonly=False, token=dst_shm_token) | ||||
| 
 | ||||
|     reg = _load_builtins() | ||||
|     lines = '\n'.join([f'{key.rpartition(":")[2]} => {key}' for key in reg]) | ||||
|  | @ -443,11 +278,11 @@ async def cascade( | |||
|         f'Registered FSP set:\n{lines}' | ||||
|     ) | ||||
| 
 | ||||
|     # NOTE XXX: update actorlocal flows table which registers | ||||
|     # readonly "instances" of this fsp for symbol/source so that | ||||
|     # consumer fsps can look it up by source + fsp. | ||||
|     # TODO: ugh i hate this wind/unwind to list over the wire but | ||||
|     # not sure how else to do it. | ||||
|     # update actorlocal flows table which registers | ||||
|     # readonly "instances" of this fsp for symbol/source | ||||
|     # so that consumer fsps can look it up by source + fsp. | ||||
|     # TODO: ugh i hate this wind/unwind to list over the wire | ||||
|     # but not sure how else to do it. | ||||
|     for (token, fsp_name, dst_token) in shm_registry: | ||||
|         Fsp._flow_registry[( | ||||
|             _Token.from_msg(token), | ||||
|  | @ -457,19 +292,15 @@ async def cascade( | |||
|     fsp: Fsp = reg.get( | ||||
|         NamespacePath(ns_path) | ||||
|     ) | ||||
|     func: Callable = fsp.func | ||||
|     func = fsp.func | ||||
| 
 | ||||
|     if not func: | ||||
|         # TODO: assume it's a func target path | ||||
|         raise ValueError(f'Unknown fsp target: {ns_path}') | ||||
| 
 | ||||
|     _fqme: str = src.mkt.fqme | ||||
|     assert _fqme == fqme | ||||
| 
 | ||||
|     # open a data feed stream with requested broker | ||||
|     feed: Feed | ||||
|     async with data.feed.maybe_open_feed( | ||||
|         [fqme], | ||||
|         [fqsn], | ||||
| 
 | ||||
|         # TODO throttle tick outputs from *this* daemon since | ||||
|         # it'll emit tons of ticks due to the throttle only | ||||
|  | @ -477,144 +308,155 @@ async def cascade( | |||
|         # needs to get throttled the ticks we generate. | ||||
|         # tick_throttle=60, | ||||
| 
 | ||||
|     ) as feed: | ||||
| 
 | ||||
|         flume: Flume = feed.flumes[fqme] | ||||
|         # XXX: can't do this since flume.feed will be set XD | ||||
|         # assert flume == src | ||||
|         assert flume.mkt == src.mkt | ||||
|         mkt: MktPair = flume.mkt | ||||
| 
 | ||||
|         # NOTE: FOR NOW, sanity checks around the feed as being | ||||
|         # always the src flume (until we get to fancier/lengthier | ||||
|         # chains/graphs. | ||||
|         assert src.rt_shm.token == flume.rt_shm.token | ||||
| 
 | ||||
|         # XXX: won't work bc the _hist_shm_token value will be | ||||
|         # list[list] after IPC.. | ||||
|         # assert flume.to_msg() == src_flume_addr | ||||
|     ) as (feed, quote_stream): | ||||
|         symbol = feed.symbols[fqsn] | ||||
| 
 | ||||
|         profiler(f'{func}: feed up') | ||||
| 
 | ||||
|         func_name: str = func.__name__ | ||||
|         assert src.token == feed.shm.token | ||||
|         # last_len = new_len = len(src.array) | ||||
| 
 | ||||
|         func_name = func.__name__ | ||||
|         async with ( | ||||
|             trio.open_nursery() as tn, | ||||
|             trio.open_nursery() as n, | ||||
|         ): | ||||
|             # TODO: might be better to just make a "restart" method where | ||||
|             # the target task is spawned implicitly and then the event is | ||||
|             # set via some higher level api? At that poing we might as well | ||||
|             # be writing a one-cancels-one nursery though right? | ||||
|             casc = Cascade( | ||||
|                 src, | ||||
|                 dst, | ||||
|                 tn, | ||||
|                 fsp, | ||||
|             ) | ||||
| 
 | ||||
|             # TODO: this seems like it should be wrapped somewhere? | ||||
|             fsp_target = partial( | ||||
|                 connect_streams, | ||||
|                 casc=casc, | ||||
|                 mkt=mkt, | ||||
|                 quote_stream=flume.stream, | ||||
| 
 | ||||
|                 # flumes and shm passthrough | ||||
|                 fsp_compute, | ||||
|                 symbol=symbol, | ||||
|                 feed=feed, | ||||
|                 quote_stream=quote_stream, | ||||
| 
 | ||||
|                 # shm | ||||
|                 src=src, | ||||
|                 dst=dst, | ||||
| 
 | ||||
|                 # chain function which takes src flume input(s) | ||||
|                 # and renders dst flume output(s) | ||||
|                 edge_func=func | ||||
|                 # target | ||||
|                 func=func | ||||
|             ) | ||||
|             async with casc.open_edge( | ||||
|                 bind_func=fsp_target, | ||||
|             ) as index: | ||||
|                 # casc.bind_func = fsp_target | ||||
|                 # index = await tn.start(fsp_target) | ||||
|                 dst_shm: ShmArray = dst.rt_shm | ||||
|                 src_shm: ShmArray = src.rt_shm | ||||
| 
 | ||||
|                 if zero_on_step: | ||||
|                     last = dst.rt_shm.array[-1:] | ||||
|                     zeroed = np.zeros(last.shape, dtype=last.dtype) | ||||
|             tracker, index = await n.start(fsp_target) | ||||
| 
 | ||||
|                 profiler(f'{func_name}: fsp up') | ||||
|             if zero_on_step: | ||||
|                 last = dst.array[-1:] | ||||
|                 zeroed = np.zeros(last.shape, dtype=last.dtype) | ||||
| 
 | ||||
|                 # sync to client-side actor | ||||
|                 await ctx.started(index) | ||||
|             profiler(f'{func_name}: fsp up') | ||||
| 
 | ||||
|                 # XXX:  rt stream with client which we MUST | ||||
|                 # open here (and keep it open) in order to make | ||||
|                 # incremental "updates" as history prepends take | ||||
|                 # place. | ||||
|                 async with ctx.open_stream() as client_stream: | ||||
|                     casc.client_stream: tractor.MsgStream = client_stream | ||||
|             # sync client | ||||
|             await ctx.started(index) | ||||
| 
 | ||||
|                     s, step, ld = casc.is_synced() | ||||
|             # XXX:  rt stream with client which we MUST | ||||
|             # open here (and keep it open) in order to make | ||||
|             # incremental "updates" as history prepends take | ||||
|             # place. | ||||
|             async with ctx.open_stream() as client_stream: | ||||
| 
 | ||||
|                     # detect sample period step for subscription to increment | ||||
|                     # signal | ||||
|                     times = src.rt_shm.array['time'] | ||||
|                     if len(times) > 1: | ||||
|                         last_ts = times[-1] | ||||
|                         delay_s: float = float(last_ts - times[times != last_ts][-1]) | ||||
|                     else: | ||||
|                         # our default "HFT" sample rate. | ||||
|                         delay_s: float = _default_delay_s | ||||
|                 # TODO: these likely should all become | ||||
|                 # methods of this ``TaskLifetime`` or wtv | ||||
|                 # abstraction.. | ||||
|                 async def resync( | ||||
|                     tracker: TaskTracker, | ||||
| 
 | ||||
|                     # sub and increment the underlying shared memory buffer | ||||
|                     # on every step msg received from the global `samplerd` | ||||
|                     # service. | ||||
|                     async with open_sample_stream( | ||||
|                         float(delay_s) | ||||
|                     ) as istream: | ||||
|                 ) -> tuple[TaskTracker, int]: | ||||
|                     # TODO: adopt an incremental update engine/approach | ||||
|                     # where possible here eventually! | ||||
|                     log.debug(f're-syncing fsp {func_name} to source') | ||||
|                     tracker.cs.cancel() | ||||
|                     await tracker.complete.wait() | ||||
|                     tracker, index = await n.start(fsp_target) | ||||
| 
 | ||||
|                         profiler(f'{func_name}: sample stream up') | ||||
|                         profiler.finish() | ||||
|                     # always trigger UI refresh after history update, | ||||
|                     # see ``piker.ui._fsp.FspAdmin.open_chain()`` and | ||||
|                     # ``piker.ui._display.trigger_update()``. | ||||
|                     await client_stream.send({ | ||||
|                         'fsp_update': { | ||||
|                             'key': dst_shm_token, | ||||
|                             'first': dst._first.value, | ||||
|                             'last': dst._last.value, | ||||
|                         } | ||||
|                     }) | ||||
|                     return tracker, index | ||||
| 
 | ||||
|                         async for i in istream: | ||||
|                             # print(f'FSP incrementing {i}') | ||||
|                 def is_synced( | ||||
|                     src: ShmArray, | ||||
|                     dst: ShmArray | ||||
|                 ) -> tuple[bool, int, int]: | ||||
|                     '''Predicate to dertmine if a destination FSP | ||||
|                     output array is aligned to its source array. | ||||
| 
 | ||||
|                             # respawn the compute task if the source | ||||
|                             # array has been updated such that we compute | ||||
|                             # new history from the (prepended) source. | ||||
|                             synced, step_diff, _ = casc.is_synced() | ||||
|                             if not synced: | ||||
|                                 step_diff: int = await casc.poll_and_sync_to_step() | ||||
|                     ''' | ||||
|                     step_diff = src.index - dst.index | ||||
|                     len_diff = abs(len(src.array) - len(dst.array)) | ||||
|                     return not ( | ||||
|                         # the source is likely backfilling and we must | ||||
|                         # sync history calculations | ||||
|                         len_diff > 2 or | ||||
| 
 | ||||
|                                 # skip adding a last bar since we should already | ||||
|                                 # be step alinged | ||||
|                                 if step_diff == 0: | ||||
|                                     continue | ||||
|                         # we aren't step synced to the source and may be | ||||
|                         # leading/lagging by a step | ||||
|                         step_diff > 1 or | ||||
|                         step_diff < 0 | ||||
|                     ), step_diff, len_diff | ||||
| 
 | ||||
|                             # read out last shm row, copy and write new row | ||||
|                             array = dst_shm.array | ||||
|                 async def poll_and_sync_to_step( | ||||
| 
 | ||||
|                             # some metrics like vlm should be reset | ||||
|                             # to zero every step. | ||||
|                             if zero_on_step: | ||||
|                                 last = zeroed | ||||
|                             else: | ||||
|                                 last = array[-1:].copy() | ||||
|                     tracker: TaskTracker, | ||||
|                     src: ShmArray, | ||||
|                     dst: ShmArray, | ||||
| 
 | ||||
|                             dst.rt_shm.push(last) | ||||
|                 ) -> tuple[TaskTracker, int]: | ||||
| 
 | ||||
|                             # sync with source buffer's time step | ||||
|                             src_l2 = src_shm.array[-2:] | ||||
|                             src_li, src_lt = src_l2[-1][['index', 'time']] | ||||
|                             src_2li, src_2lt = src_l2[-2][['index', 'time']] | ||||
|                             dst_shm._array['time'][src_li] = src_lt | ||||
|                             dst_shm._array['time'][src_2li] = src_2lt | ||||
|                     synced, step_diff, _ = is_synced(src, dst) | ||||
|                     while not synced: | ||||
|                         tracker, index = await resync(tracker) | ||||
|                         synced, step_diff, _ = is_synced(src, dst) | ||||
| 
 | ||||
|                             # last2 = dst.array[-2:] | ||||
|                             # if ( | ||||
|                             #     last2[-1]['index'] != src_li | ||||
|                             #     or last2[-2]['index'] != src_2li | ||||
|                             # ): | ||||
|                             #     dstl2 = list(last2) | ||||
|                             #     srcl2 = list(src_l2) | ||||
|                             #     print( | ||||
|                             #         # f'{dst.token}\n' | ||||
|                             #         f'src: {srcl2}\n' | ||||
|                             #         f'dst: {dstl2}\n' | ||||
|                             #     ) | ||||
|                     return tracker, step_diff | ||||
| 
 | ||||
|                 s, step, ld = is_synced(src, dst) | ||||
| 
 | ||||
|                 # detect sample period step for subscription to increment | ||||
|                 # signal | ||||
|                 times = src.array['time'] | ||||
|                 delay_s = times[-1] - times[times != times[-1]][-1] | ||||
| 
 | ||||
|                 # Increment the underlying shared memory buffer on every | ||||
|                 # "increment" msg received from the underlying data feed. | ||||
|                 async with feed.index_stream( | ||||
|                     int(delay_s) | ||||
|                 ) as istream: | ||||
| 
 | ||||
|                     profiler(f'{func_name}: sample stream up') | ||||
|                     profiler.finish() | ||||
| 
 | ||||
|                     async for _ in istream: | ||||
| 
 | ||||
|                         # respawn the compute task if the source | ||||
|                         # array has been updated such that we compute | ||||
|                         # new history from the (prepended) source. | ||||
|                         synced, step_diff, _ = is_synced(src, dst) | ||||
|                         if not synced: | ||||
|                             tracker, step_diff = await poll_and_sync_to_step( | ||||
|                                 tracker, | ||||
|                                 src, | ||||
|                                 dst, | ||||
|                             ) | ||||
| 
 | ||||
|                             # skip adding a last bar since we should already | ||||
|                             # be step alinged | ||||
|                             if step_diff == 0: | ||||
|                                 continue | ||||
| 
 | ||||
|                         # read out last shm row, copy and write new row | ||||
|                         array = dst.array | ||||
| 
 | ||||
|                         # some metrics like vlm should be reset | ||||
|                         # to zero every step. | ||||
|                         if zero_on_step: | ||||
|                             last = zeroed | ||||
|                         else: | ||||
|                             last = array[-1:].copy() | ||||
| 
 | ||||
|                         dst.push(last) | ||||
|  |  | |||
|  | @ -24,7 +24,7 @@ import numpy as np | |||
| from numba import jit, float64, optional, int64 | ||||
| 
 | ||||
| from ._api import fsp | ||||
| from ..data import iterticks | ||||
| from ..data._normalize import iterticks | ||||
| from ..data._sharedmem import ShmArray | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue