Compare commits
	
		
			202 Commits 
		
	
	
		
			py311_ib_f
			...
			gitea_feat
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | 5e371f1d73 | |
|  | 6c221bb348 | |
|  | e391c896f8 | |
|  | 5633f5614d | |
|  | 76735189de | |
|  | d49608f74e | |
|  | bf0ac93aa3 | |
|  | d7179d47b0 | |
|  | c390e87536 | |
|  | 5e4a6d61c7 | |
|  | 3caaa30b03 | |
|  | 1e3942fdc2 | |
|  | 49ea380503 | |
|  | 933f169938 | |
|  | 51337052a4 | |
|  | 8abe55dcc6 | |
|  | c933f2ad56 | |
|  | 00108010c9 | |
|  | 8a4901c517 | |
|  | d7f6a5ab63 | |
|  | e0fdabf651 | |
|  | cb88dfc9da | |
|  | bb41dd6d18 | |
|  | 99e90129ad | |
|  | cceb7a37b9 | |
|  | 5382815b2d | |
|  | cb1ba8a05f | |
|  | 6c65ec4d3b | |
|  | 12e371b027 | |
|  | 014bd58db4 | |
|  | 844544ed8e | |
|  | f479252d26 | |
|  | 033ef2e35e | |
|  | 2cdece244c | |
|  | 018694bbdb | |
|  | 128a2d507f | |
|  | 430650a6a7 | |
|  | 1da3cf5698 | |
|  | a348603fc4 | |
|  | 86047824d8 | |
|  | cb92abbc38 | |
|  | 70332e375b | |
|  | 4940aabe05 | |
|  | 4f9998e9fb | |
|  | c92a236196 | |
|  | e4cd1f85f6 | |
|  | 129cf58d41 | |
|  | 1fd8654ca5 | |
|  | d0170982bf | |
|  | 821e73a409 | |
|  | 3d03781810 | |
|  | 83d1f117a8 | |
|  | e4ce79f720 | |
|  | 264246d89b | |
|  | 7c96c9fafe | |
|  | 52b349fe79 | |
|  | 6959429af8 | |
|  | 05f874001a | |
|  | fc216d37de | |
|  | 03e429abf8 | |
|  | 7ae7cc829f | |
|  | b23d44e21a | |
|  | 2669db785c | |
|  | d3e7b5cd0e | |
|  | 9be29a707d | |
|  | c82ca812a8 | |
|  | a7ad50cf8f | |
|  | 661805695e | |
|  | 3de7c9a9eb | |
|  | 59536bd284 | |
|  | 5702e422d8 | |
|  | 07331a160e | |
|  | 0d18cb65c3 | |
|  | ad565936ec | |
|  | d4b07cc95a | |
|  | 1231c459aa | |
|  | 88f415e5b8 | |
|  | d9c574e291 | |
|  | a86573b5a2 | |
|  | 1d7e97a295 | |
|  | bbb98597a0 | |
|  | e33d6333ec | |
|  | 263a5a8d07 | |
|  | a681b2f0bb | |
|  | 5b0c94933b | |
|  | 61e52213b2 | |
|  | b064a5f94d | |
|  | e7fa841263 | |
|  | 1f346483a0 | |
|  | d006ecce7e | |
|  | 69368f20c2 | |
|  | 31fa0b02f5 | |
|  | 5a60974990 | |
|  | 8d324acf91 | |
|  | ab84303da7 | |
|  | 659649ec48 | |
|  | f7cc43ee0b | |
|  | f5dc21d3f4 | |
|  | 4568c55f17 | |
|  | d5d68f75ea | |
|  | 1f9a497637 | |
|  | 40c5d88a9b | |
|  | 8989c73a93 | |
|  | 3639f360c3 | |
|  | afd0781b62 | |
|  | ba154ef413 | |
|  | 97e2403fb1 | |
|  | a4084d6a0b | |
|  | 83bdca46a2 | |
|  | c129f5bb4a | |
|  | c4853a3fee | |
|  | f274c3db3b | |
|  | b95932ea09 | |
|  | e8bf4c6e04 | |
|  | 8e4d1a48ed | |
|  | b03eceebef | |
|  | f7a8d79b7b | |
|  | 49c458710e | |
|  | b94582cb35 | |
|  | 7311000846 | |
|  | e719733f97 | |
|  | cb941a5554 | |
|  | 2d72a052aa | |
|  | 2eeef2a123 | |
|  | b6d2550f33 | |
|  | b9af6176c5 | |
|  | dd0167b9a5 | |
|  | 9e71e0768f | |
|  | 6029f39a3f | |
|  | 656e2c6a88 | |
|  | b8065a413b | |
|  | 9245d24b47 | |
|  | 22bd83943b | |
|  | b94931bbdd | |
|  | 239c1c457e | |
|  | 24a54a7085 | |
|  | ebd1eb114e | |
|  | 29ce8de462 | |
|  | d3dab17939 | |
|  | cadc200818 | |
|  | 363c8dfdb1 | |
|  | 00c046c280 | |
|  | 9165515811 | |
|  | 543c11f377 | |
|  | 637d33d7cc | |
|  | e5fdb33e31 | |
|  | 81a8cd1685 | |
|  | a382f01c85 | |
|  | 653348fcd8 | |
|  | e139d2e259 | |
|  | 7258d57c69 | |
|  | 5d081a40d5 | |
|  | fcececce19 | |
|  | b6ac6069fe | |
|  | a98f5877bc | |
|  | 50ddef0985 | |
|  | b1cde3df49 | |
|  | 57010d479d | |
|  | f94244aad4 | |
|  | 261c331602 | |
|  | 3b4a4db7b6 | |
|  | ad59a581c7 | |
|  | c312f90c0c | |
|  | 1a859bc1a2 | |
|  | e9887cb611 | |
|  | 0ba75df877 | |
|  | a97a0ced8c | |
|  | 46d83e9ca9 | |
|  | d4833eba21 | |
|  | 14f124164a | |
|  | 05959eaf70 | |
|  | 30d55fdb27 | |
|  | 2c88ebe697 | |
|  | 4a180019f0 | |
|  | 4d274b16d8 | |
|  | 481618cc51 | |
|  | 778d26067d | |
|  | e54c3dc523 | |
|  | ad37cfbe2f | |
|  | 8369f557c7 | |
|  | 461764419d | |
|  | 1002ce1e10 | |
|  | 546049b62f | |
|  | e9517cdb02 | |
|  | 2b8cd031e8 | |
|  | 2e6b1330f3 | |
|  | 995d1534b6 | |
|  | 9d31941d42 | |
|  | a695208992 | |
|  | fed89562dc | |
|  | 9005335e18 | |
|  | c3f8b089be | |
|  | 0068119a6d | |
|  | 94540ce1cf | |
|  | ea9a5e524c | |
|  | 6b22024570 | |
|  | 847cb7740c | |
|  | 84dd0ae4ce | |
|  | 6b90e2e3ee | |
|  | 482ad1cc83 | |
|  | 6e8d07852c | |
|  | 4aa04e1c8e | 
							
								
								
									
										234
									
								
								README.rst
								
								
								
								
							
							
						
						
									
										234
									
								
								README.rst
								
								
								
								
							|  | @ -1,162 +1,161 @@ | ||||||
| piker | piker | ||||||
| ----- | ----- | ||||||
| trading gear for hackers. | trading gear for hackers | ||||||
| 
 | 
 | ||||||
| |gh_actions| | |gh_actions| | ||||||
| 
 | 
 | ||||||
| .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpikers%2Fpiker%2Fbadge&style=popout-square | .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpikers%2Fpiker%2Fbadge&style=popout-square | ||||||
|     :target: https://actions-badge.atrox.dev/piker/pikers/goto |     :target: https://actions-badge.atrox.dev/piker/pikers/goto | ||||||
| 
 | 
 | ||||||
| ``piker`` is a broker agnostic, next-gen FOSS toolset for real-time | ``piker`` is a broker agnostic, next-gen FOSS toolset and runtime for | ||||||
| computational trading targeted at `hardcore Linux users <comp_trader>`_ . | real-time computational trading targeted at `hardcore Linux users | ||||||
|  | <comp_trader>`_ . | ||||||
| 
 | 
 | ||||||
| we use as much bleeding edge tech as possible including (but not limited to): | we use much bleeding edge tech including (but not limited to): | ||||||
| 
 | 
 | ||||||
| - latest python for glue_ | - latest python for glue_ | ||||||
| - trio_ & tractor_ for our distributed, multi-core, real-time streaming | - uv_ for packaging and distribution | ||||||
|   `structured concurrency`_ runtime B) | - trio_ & tractor_ for our distributed `structured concurrency`_ runtime | ||||||
| - Qt_ for pristine high performance UIs | - Qt_ for pristine low latency UIs | ||||||
| - pyqtgraph_ for real-time charting | - pyqtgraph_ (which we've extended) for real-time charting and graphics | ||||||
| - ``polars`` ``numpy`` and ``numba`` for `fast numerics`_ | - ``polars`` ``numpy`` and ``numba`` for redic `fast numerics`_ | ||||||
| - `apache arrow and parquet`_ for time series history management | - `apache arrow and parquet`_ for time-series storage | ||||||
|   persistence and sharing |  | ||||||
| - (prototyped) techtonicdb_ for L2 book storage |  | ||||||
| 
 | 
 | ||||||
| .. |travis| image:: https://img.shields.io/travis/pikers/piker/master.svg | potential projects we might integrate with soon, | ||||||
|     :target: https://travis-ci.org/pikers/piker | 
 | ||||||
|  | - (already prototyped in ) techtonicdb_ for L2 book storage | ||||||
|  | 
 | ||||||
|  | .. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/ | ||||||
|  | .. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue | ||||||
|  | .. _uv: https://docs.astral.sh/uv/ | ||||||
| .. _trio: https://github.com/python-trio/trio | .. _trio: https://github.com/python-trio/trio | ||||||
| .. _tractor: https://github.com/goodboy/tractor | .. _tractor: https://github.com/goodboy/tractor | ||||||
| .. _structured concurrency: https://trio.discourse.group/ | .. _structured concurrency: https://trio.discourse.group/ | ||||||
| .. _marketstore: https://github.com/alpacahq/marketstore |  | ||||||
| .. _techtonicdb: https://github.com/0b01/tectonicdb |  | ||||||
| .. _Qt: https://www.qt.io/ | .. _Qt: https://www.qt.io/ | ||||||
| .. _pyqtgraph: https://github.com/pyqtgraph/pyqtgraph | .. _pyqtgraph: https://github.com/pyqtgraph/pyqtgraph | ||||||
| .. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue |  | ||||||
| .. _apache arrow and parquet: https://arrow.apache.org/faq/ | .. _apache arrow and parquet: https://arrow.apache.org/faq/ | ||||||
| .. _fast numerics: https://zerowithdot.com/python-numpy-and-pandas-performance/ | .. _fast numerics: https://zerowithdot.com/python-numpy-and-pandas-performance/ | ||||||
| .. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/ | .. _techtonicdb: https://github.com/0b01/tectonicdb | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| focus and features: | focus and feats: | ||||||
| ******************* | **************** | ||||||
| - 100% federated: your code, your hardware, your data feeds, your broker fills. | fitting with these tenets, we're always open to new | ||||||
| - zero web: low latency, native software that doesn't try to re-invent the OS | framework/lib/service interop suggestions and ideas! | ||||||
| - maximal **privacy**: prevent brokers and mms from knowing your |  | ||||||
|   planz; smack their spreads with dark volume. |  | ||||||
| - zero clutter: modal, context oriented UIs that echew minimalism, reduce |  | ||||||
|   thought noise and encourage un-emotion. |  | ||||||
| - first class parallelism: built from the ground up on next-gen structured concurrency |  | ||||||
|   primitives. |  | ||||||
| - traders first: broker/exchange/asset-class agnostic |  | ||||||
| - systems grounded: real-time financial signal processing that will |  | ||||||
|   make any queuing or DSP eng juice their shorts. |  | ||||||
| - non-tina UX: sleek, powerful keyboard driven interaction with expected use in tiling wms |  | ||||||
| - data collaboration: every process and protocol is multi-host scalable. |  | ||||||
| - fight club ready: zero interest in adoption by suits; no corporate friendly license, ever. |  | ||||||
| 
 | 
 | ||||||
| fitting with these tenets, we're always open to new framework suggestions and ideas. | - **100% federated**: | ||||||
|  |   your code, your hardware, your data feeds, your broker fills. | ||||||
| 
 | 
 | ||||||
| building the best looking, most reliable, keyboard friendly trading | - **zero web**: | ||||||
| platform is the dream; join the cause. |   low latency as a prime objective, native UIs and modern IPC | ||||||
|  |   protocols without trying to re-invent the "OS-as-an-app".. | ||||||
|  | 
 | ||||||
|  | - **maximal privacy**: | ||||||
|  |   prevent brokers and mms from knowing your planz; smack their | ||||||
|  |   spreads with dark volume from a VPN tunnel. | ||||||
|  | 
 | ||||||
|  | - **zero clutter**: | ||||||
|  |   modal, context oriented UIs that echew minimalism, reduce thought | ||||||
|  |   noise and encourage un-emotion. | ||||||
|  | 
 | ||||||
|  | - **first class parallelism**: | ||||||
|  |   built from the ground up on a next-gen structured concurrency | ||||||
|  |   supervision sys. | ||||||
|  | 
 | ||||||
|  | - **traders first**: | ||||||
|  |   broker/exchange/venue/asset-class/money-sys agnostic | ||||||
|  | 
 | ||||||
|  | - **systems grounded**: | ||||||
|  |   real-time financial signal processing (fsp) that will make any | ||||||
|  |   queuing or DSP eng juice their shorts. | ||||||
|  | 
 | ||||||
|  | - **non-tina UX**: | ||||||
|  |   sleek, powerful keyboard driven interaction with expected use in | ||||||
|  |   tiling wms (or maybe even a DDE). | ||||||
|  | 
 | ||||||
|  | - **data collab at scale**: | ||||||
|  |   every actor-process and protocol is multi-host aware. | ||||||
|  | 
 | ||||||
|  | - **fight club ready**: | ||||||
|  |   zero interest in adoption by suits; no corporate friendly license, | ||||||
|  |   ever. | ||||||
|  | 
 | ||||||
|  | building the hottest looking, fastest, most reliable, keyboard | ||||||
|  | friendly FOSS trading platform is the dream; join the cause. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| sane install with `poetry` | a sane install with `uv` | ||||||
| ************************** | ************************ | ||||||
| TODO! | bc why install with `python` when you can faster with `rust` :: | ||||||
| 
 | 
 | ||||||
| 
 |     uv lock | ||||||
| rigorous install on ``nixos`` using ``poetry2nix`` |  | ||||||
| ************************************************** |  | ||||||
| TODO! |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| hacky install on nixos | hacky install on nixos | ||||||
| ********************** | ********************** | ||||||
| `NixOS` is our core devs' distro of choice for which we offer | ``NixOS`` is our core devs' distro of choice for which we offer | ||||||
| a stringently defined development shell envoirment that can be loaded with:: | a stringently defined development shell envoirment that can be loaded with:: | ||||||
| 
 | 
 | ||||||
|     nix-shell develop.nix |     nix-shell default.nix | ||||||
| 
 |  | ||||||
| this will setup the required python environment to run piker, make sure to |  | ||||||
| run:: |  | ||||||
| 
 |  | ||||||
|     pip install -r requirements.txt -e . |  | ||||||
| 
 |  | ||||||
| once after loading the shell |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| install wild-west style via `pip` | start a chart | ||||||
| ********************************* | ************* | ||||||
| ``piker`` is currently under heavy pre-alpha development and as such | run a realtime OHLCV chart stand-alone:: | ||||||
| should be cloned from this repo and hacked on directly. |  | ||||||
| 
 | 
 | ||||||
| for a development install:: |     piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken | ||||||
| 
 | 
 | ||||||
|     git clone git@github.com:pikers/piker.git | this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes | ||||||
|     cd piker | overlayed on the same graph. Use of `piker` without first starting | ||||||
|     virtualenv env | a daemon (`pikerd` - see below) means there is an implicit spawning of the | ||||||
|     source ./env/bin/activate | multi-actor-runtime (implemented as a `tractor` app). | ||||||
|     pip install -r requirements.txt -e . | 
 | ||||||
|  | For additional subsystem feats available through our chart UI see the | ||||||
|  | various sub-readmes: | ||||||
|  | 
 | ||||||
|  | - order control using a mouse-n-keyboard UX B) | ||||||
|  | - cross venue market-pair (what most call "symbol") search, select, overlay Bo | ||||||
|  | - financial-signal-processing (`piker.fsp`) write-n-reload to sub-chart BO | ||||||
|  | - src-asset derivatives scan for anal, like the infamous "max pain" XO | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| check out our charts | spawn a daemon standalone | ||||||
| ******************** | ************************* | ||||||
| bet you weren't expecting this from the foss:: | we call the root actor-process the ``pikerd``. it can be (and is | ||||||
| 
 | recommended normally to be) started separately from the ``piker | ||||||
|     piker -l info -b kraken -b binance chart btcusdt.binance --pdb | chart`` program:: | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| this runs the main chart (currently with 1m sampled OHLC) in in debug |  | ||||||
| mode and you can practice paper trading using the following |  | ||||||
| micro-manual: |  | ||||||
| 
 |  | ||||||
| ``order_mode`` ( |  | ||||||
|     edge triggered activation by any of the following keys, |  | ||||||
|     ``mouse-click`` on y-level to submit at that price |  | ||||||
|     ): |  | ||||||
| 
 |  | ||||||
|     - ``f``/ ``ctl-f`` to stage buy |  | ||||||
|     - ``d``/ ``ctl-d`` to stage sell |  | ||||||
|     - ``a`` to stage alert |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| ``search_mode`` ( |  | ||||||
|     ``ctl-l`` or ``ctl-space`` to open, |  | ||||||
|     ``ctl-c`` or ``ctl-space`` to close |  | ||||||
|     ) : |  | ||||||
| 
 |  | ||||||
|     - begin typing to have symbol search automatically lookup |  | ||||||
|       symbols from all loaded backend (broker) providers |  | ||||||
|     - arrow keys and mouse click to navigate selection |  | ||||||
|     - vi-like ``ctl-[hjkl]`` for navigation |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| you can also configure your position allocation limits from the |  | ||||||
| sidepane. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| run in distributed mode |  | ||||||
| *********************** |  | ||||||
| start the service manager and data feed daemon in the background and |  | ||||||
| connect to it:: |  | ||||||
| 
 | 
 | ||||||
|     pikerd -l info --pdb |     pikerd -l info --pdb | ||||||
| 
 | 
 | ||||||
|  | the daemon does nothing until a ``piker``-client (like ``piker | ||||||
|  | chart``) connects and requests some particular sub-system. for | ||||||
|  | a connecting chart ``pikerd`` will spawn and manage at least, | ||||||
| 
 | 
 | ||||||
| connect your chart:: | - a data-feed daemon: ``datad`` which does all the work of comms with | ||||||
|  |   the backend provider (in this case the ``binance`` cex). | ||||||
|  | - a paper-trading engine instance, ``paperboi.binance``, (if no live | ||||||
|  |   account has been configured) which allows for auto/manual order | ||||||
|  |   control against the live quote stream. | ||||||
| 
 | 
 | ||||||
|     piker -l info -b kraken -b binance chart xmrusdt.binance --pdb | *using* an actor-service (aka micro-daemon) manager which dynamically | ||||||
|  | supervises various sub-subsystems-as-services throughout the ``piker`` | ||||||
|  | runtime-stack. | ||||||
| 
 | 
 | ||||||
|  | now you can (implicitly) connect your chart:: | ||||||
| 
 | 
 | ||||||
| enjoy persistent real-time data feeds tied to daemon lifetime. the next |     piker chart btcusdt.spot.binance | ||||||
| time you spawn a chart it will load much faster since the data feed has | 
 | ||||||
| been cached and is now always running live in the background until you | since ``pikerd`` was started separately you can now enjoy a persistent | ||||||
| kill ``pikerd``. | real-time data stream tied to the daemon-tree's lifetime. i.e. the next | ||||||
|  | time you spawn a chart it will obviously not only load much faster | ||||||
|  | (since the underlying ``datad.binance`` is left running with its | ||||||
|  | in-memory IPC data structures) but also the data-feed and any order | ||||||
|  | mgmt states should be persistent until you finally cancel ``pikerd``. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if anyone asks you what this project is about | if anyone asks you what this project is about | ||||||
| ********************************************* | ********************************************* | ||||||
| you don't talk about it. | you don't talk about it; just use it. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| how do i get involved? | how do i get involved? | ||||||
|  | @ -166,6 +165,15 @@ enter the matrix. | ||||||
| 
 | 
 | ||||||
| how come there ain't that many docs | how come there ain't that many docs | ||||||
| *********************************** | *********************************** | ||||||
| suck it up, learn the code; no one is trying to sell you on anything. | i mean we want/need them but building the core right has been higher | ||||||
| also, we need lotsa help so if you want to start somewhere and can't | prio then marketting (and likely will stay that way Bp). | ||||||
| necessarily write serious code, this might be the place for you! | 
 | ||||||
|  | soo, suck it up bc, | ||||||
|  | 
 | ||||||
|  | - no one is trying to sell you on anything | ||||||
|  | - learning the code base is prolly way more valuable | ||||||
|  | - the UI/UXs are intended to be "intuitive" for any hacker.. | ||||||
|  | 
 | ||||||
|  | we obviously need tonz help so if you want to start somewhere and | ||||||
|  | can't necessarily write "advanced" concurrent python/rust code, this | ||||||
|  | helping document literally anything might be the place for you! | ||||||
|  |  | ||||||
|  | @ -0,0 +1,134 @@ | ||||||
|  | with (import <nixpkgs> {}); | ||||||
|  | let | ||||||
|  |   glibStorePath = lib.getLib glib; | ||||||
|  |   zlibStorePath = lib.getLib zlib; | ||||||
|  |   zstdStorePath = lib.getLib zstd; | ||||||
|  |   dbusStorePath = lib.getLib dbus; | ||||||
|  |   libGLStorePath = lib.getLib libGL; | ||||||
|  |   freetypeStorePath = lib.getLib freetype; | ||||||
|  |   qt6baseStorePath = lib.getLib qt6.qtbase; | ||||||
|  |   fontconfigStorePath = lib.getLib fontconfig; | ||||||
|  |   libxkbcommonStorePath = lib.getLib libxkbcommon; | ||||||
|  |   xcbutilcursorStorePath = lib.getLib xcb-util-cursor; | ||||||
|  | 
 | ||||||
|  |   qtpyStorePath = lib.getLib python312Packages.qtpy; | ||||||
|  |   pyqt6StorePath = lib.getLib python312Packages.pyqt6; | ||||||
|  |   pyqt6SipStorePath = lib.getLib python312Packages.pyqt6-sip; | ||||||
|  |   rapidfuzzStorePath = lib.getLib python312Packages.rapidfuzz; | ||||||
|  |   qdarkstyleStorePath = lib.getLib python312Packages.qdarkstyle; | ||||||
|  | 
 | ||||||
|  |   xorgLibX11StorePath = lib.getLib xorg.libX11; | ||||||
|  |   xorgLibxcbStorePath = lib.getLib xorg.libxcb; | ||||||
|  |   xorgxcbutilwmStorePath = lib.getLib xorg.xcbutilwm; | ||||||
|  |   xorgxcbutilimageStorePath = lib.getLib xorg.xcbutilimage; | ||||||
|  |   xorgxcbutilerrorsStorePath = lib.getLib xorg.xcbutilerrors; | ||||||
|  |   xorgxcbutilkeysymsStorePath = lib.getLib xorg.xcbutilkeysyms; | ||||||
|  |   xorgxcbutilrenderutilStorePath = lib.getLib xorg.xcbutilrenderutil; | ||||||
|  | in | ||||||
|  | stdenv.mkDerivation { | ||||||
|  |   name = "piker-qt6-uv"; | ||||||
|  |   buildInputs = [ | ||||||
|  |     # System requirements. | ||||||
|  |     glib | ||||||
|  |     zlib | ||||||
|  |     dbus | ||||||
|  |     zstd | ||||||
|  |     libGL | ||||||
|  |     freetype | ||||||
|  |     qt6.qtbase | ||||||
|  |     libgcc.lib | ||||||
|  |     fontconfig | ||||||
|  |     libxkbcommon | ||||||
|  | 
 | ||||||
|  |     # Xorg requirements | ||||||
|  |     xcb-util-cursor | ||||||
|  |     xorg.libxcb | ||||||
|  |     xorg.libX11 | ||||||
|  |     xorg.xcbutilwm | ||||||
|  |     xorg.xcbutilimage | ||||||
|  |     xorg.xcbutilerrors | ||||||
|  |     xorg.xcbutilkeysyms | ||||||
|  |     xorg.xcbutilrenderutil | ||||||
|  | 
 | ||||||
|  |     # Python requirements. | ||||||
|  |     python312Full | ||||||
|  |     python312Packages.uv | ||||||
|  |     python312Packages.qdarkstyle | ||||||
|  |     python312Packages.rapidfuzz | ||||||
|  |     python312Packages.pyqt6 | ||||||
|  |     python312Packages.qtpy | ||||||
|  |   ]; | ||||||
|  |   src = null; | ||||||
|  |   shellHook = '' | ||||||
|  |     set -e | ||||||
|  | 
 | ||||||
|  |     # Set the Qt plugin path | ||||||
|  |     # export QT_DEBUG_PLUGINS=1 | ||||||
|  | 
 | ||||||
|  |     QTBASE_PATH="${qt6baseStorePath}/lib" | ||||||
|  |     QT_PLUGIN_PATH="$QTBASE_PATH/qt-6/plugins" | ||||||
|  |     QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms" | ||||||
|  | 
 | ||||||
|  |     LIB_GCC_PATH="${libgcc.lib}/lib" | ||||||
|  |     GLIB_PATH="${glibStorePath}/lib" | ||||||
|  |     ZSTD_PATH="${zstdStorePath}/lib" | ||||||
|  |     ZLIB_PATH="${zlibStorePath}/lib" | ||||||
|  |     DBUS_PATH="${dbusStorePath}/lib" | ||||||
|  |     LIBGL_PATH="${libGLStorePath}/lib" | ||||||
|  |     FREETYPE_PATH="${freetypeStorePath}/lib" | ||||||
|  |     FONTCONFIG_PATH="${fontconfigStorePath}/lib" | ||||||
|  |     LIB_XKB_COMMON_PATH="${libxkbcommonStorePath}/lib" | ||||||
|  | 
 | ||||||
|  |     XCB_UTIL_CURSOR_PATH="${xcbutilcursorStorePath}/lib" | ||||||
|  |     XORG_LIB_X11_PATH="${xorgLibX11StorePath}/lib" | ||||||
|  |     XORG_LIB_XCB_PATH="${xorgLibxcbStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_IMAGE_PATH="${xorgxcbutilimageStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_WM_PATH="${xorgxcbutilwmStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_RENDER_UTIL_PATH="${xorgxcbutilrenderutilStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_KEYSYMS_PATH="${xorgxcbutilkeysymsStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_ERRORS_PATH="${xorgxcbutilerrorsStorePath}/lib" | ||||||
|  | 
 | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QTBASE_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_PLUGIN_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_QPA_PLATFORM_PLUGIN_PATH" | ||||||
|  | 
 | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_GCC_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$DBUS_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GLIB_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZLIB_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZSTD_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIBGL_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FONTCONFIG_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FREETYPE_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_XKB_COMMON_PATH" | ||||||
|  | 
 | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XCB_UTIL_CURSOR_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_X11_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_XCB_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_IMAGE_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_WM_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_RENDER_UTIL_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_KEYSYMS_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_ERRORS_PATH" | ||||||
|  | 
 | ||||||
|  |     export LD_LIBRARY_PATH | ||||||
|  | 
 | ||||||
|  |     RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages" | ||||||
|  |     QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages" | ||||||
|  |     QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages" | ||||||
|  |     PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages" | ||||||
|  |     PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages" | ||||||
|  | 
 | ||||||
|  |     PATCH="$PATCH:$RPDFUZZ_PATH" | ||||||
|  |     PATCH="$PATCH:$QDRKSTYLE_PATH" | ||||||
|  |     PATCH="$PATCH:$QTPY_PATH" | ||||||
|  |     PATCH="$PATCH:$PYQT6_PATH" | ||||||
|  |     PATCH="$PATCH:$PYQT6_SIP_PATH" | ||||||
|  | 
 | ||||||
|  |     export PATCH | ||||||
|  | 
 | ||||||
|  |     # Install deps | ||||||
|  |     uv lock | ||||||
|  | 
 | ||||||
|  |   ''; | ||||||
|  | } | ||||||
							
								
								
									
										37
									
								
								develop.nix
								
								
								
								
							
							
						
						
									
										37
									
								
								develop.nix
								
								
								
								
							|  | @ -1,28 +1,34 @@ | ||||||
| with (import <nixpkgs> {}); | with (import <nixpkgs> {}); | ||||||
| with python310Packages; | 
 | ||||||
| stdenv.mkDerivation { | stdenv.mkDerivation { | ||||||
|   name = "pip-env"; |   name = "poetry-env"; | ||||||
|   buildInputs = [ |   buildInputs = [ | ||||||
|     # System requirements. |     # System requirements. | ||||||
|     readline |     readline | ||||||
| 
 | 
 | ||||||
|     # TODO: hacky non-poetry install stuff we need to get rid of!! |     # TODO: hacky non-poetry install stuff we need to get rid of!! | ||||||
|     virtualenv |     poetry | ||||||
|     setuptools |     # virtualenv | ||||||
|     pip |     # setuptools | ||||||
| 
 |     # pip | ||||||
|     # obviously, and see below for hacked linking |  | ||||||
|     pyqt5 |  | ||||||
| 
 | 
 | ||||||
|     # Python requirements (enough to get a virtualenv going). |     # Python requirements (enough to get a virtualenv going). | ||||||
|     python310Full |     python311Full | ||||||
|  | 
 | ||||||
|  |     # obviously, and see below for hacked linking | ||||||
|  |     python311Packages.pyqt5 | ||||||
|  |     python311Packages.pyqt5_sip | ||||||
|  |     # python311Packages.qtpy | ||||||
| 
 | 
 | ||||||
|     # numerics deps |     # numerics deps | ||||||
|     python310Packages.python-Levenshtein |     python311Packages.levenshtein | ||||||
|     python310Packages.fastparquet |     python311Packages.fastparquet | ||||||
|     python310Packages.polars |     python311Packages.polars | ||||||
| 
 | 
 | ||||||
|   ]; |   ]; | ||||||
|  |   # environment.sessionVariables = { | ||||||
|  |   #   LD_LIBRARY_PATH = "${pkgs.stdenv.cc.cc.lib}/lib"; | ||||||
|  |   # }; | ||||||
|   src = null; |   src = null; | ||||||
|   shellHook = '' |   shellHook = '' | ||||||
|     # Allow the use of wheels. |     # Allow the use of wheels. | ||||||
|  | @ -30,13 +36,12 @@ stdenv.mkDerivation { | ||||||
| 
 | 
 | ||||||
|     # Augment the dynamic linker path |     # Augment the dynamic linker path | ||||||
|     export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${R}/lib/R/lib:${readline}/lib |     export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${R}/lib/R/lib:${readline}/lib | ||||||
| 
 |  | ||||||
|     export QT_QPA_PLATFORM_PLUGIN_PATH="${qt5.qtbase.bin}/lib/qt-${qt5.qtbase.version}/plugins"; |     export QT_QPA_PLATFORM_PLUGIN_PATH="${qt5.qtbase.bin}/lib/qt-${qt5.qtbase.version}/plugins"; | ||||||
| 
 | 
 | ||||||
|     if [ ! -d "venv" ]; then |     if [ ! -d ".venv" ]; then | ||||||
|         virtualenv venv |         poetry install --with uis | ||||||
|     fi |     fi | ||||||
| 
 | 
 | ||||||
|     source venv/bin/activate |     poetry shell | ||||||
|   ''; |   ''; | ||||||
| } | } | ||||||
|  |  | ||||||
|  | @ -19,8 +19,9 @@ services: | ||||||
| 
 | 
 | ||||||
|     # other image tags available: |     # other image tags available: | ||||||
|     # https://github.com/waytrade/ib-gateway-docker#supported-tags |     # https://github.com/waytrade/ib-gateway-docker#supported-tags | ||||||
|     # image: waytrade/ib-gateway:981.3j |     # image: waytrade/ib-gateway:1012.2i | ||||||
|     image: waytrade/ib-gateway:1012.2i |     image: ghcr.io/gnzsnz/ib-gateway:latest | ||||||
|  | 
 | ||||||
|     restart: 'no'  # restart on boot whenev there's a crash or user clicsk |     restart: 'no'  # restart on boot whenev there's a crash or user clicsk | ||||||
|     network_mode: 'host' |     network_mode: 'host' | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -117,9 +117,57 @@ SecondFactorDevice= | ||||||
| 
 | 
 | ||||||
| # If you use the IBKR Mobile app for second factor authentication, | # If you use the IBKR Mobile app for second factor authentication, | ||||||
| # and you fail to complete the process before the time limit imposed | # and you fail to complete the process before the time limit imposed | ||||||
| # by IBKR, you can use this setting to tell IBC to exit: arrangements | # by IBKR, this setting tells IBC whether to automatically restart | ||||||
| # can then be made to automatically restart IBC in order to initiate | # the login sequence, giving you another opportunity to complete | ||||||
| # the login sequence afresh. Otherwise, manual intervention at TWS's | # second factor authentication.  | ||||||
|  | # | ||||||
|  | # Permitted values are 'yes' and 'no'. | ||||||
|  | # | ||||||
|  | # If this setting is not present or has no value, then the value | ||||||
|  | # of the deprecated ExitAfterSecondFactorAuthenticationTimeout is | ||||||
|  | # used instead. If this also has no value, then this setting defaults | ||||||
|  | # to 'no'. | ||||||
|  | # | ||||||
|  | # NB: you must be using IBC v3.14.0 or later to use this setting: | ||||||
|  | # earlier versions ignore it. | ||||||
|  | 
 | ||||||
|  | ReloginAfterSecondFactorAuthenticationTimeout= | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # This setting is only relevant if | ||||||
|  | # ReloginAfterSecondFactorAuthenticationTimeout is set to 'yes', | ||||||
|  | # or if ExitAfterSecondFactorAuthenticationTimeout is set to 'yes'. | ||||||
|  | # | ||||||
|  | # It controls how long (in seconds) IBC waits for login to complete | ||||||
|  | # after the user acknowledges the second factor authentication | ||||||
|  | # alert at the IBKR Mobile app. If login has not completed after | ||||||
|  | # this time, IBC terminates. | ||||||
|  | # The default value is 60. | ||||||
|  | 
 | ||||||
|  | SecondFactorAuthenticationExitInterval= | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # This setting specifies the timeout for second factor authentication | ||||||
|  | # imposed by IB. The value is in seconds. You should not change this | ||||||
|  | # setting unless you have reason to believe that IB has changed the | ||||||
|  | # timeout. The default value is 180. | ||||||
|  | 
 | ||||||
|  | SecondFactorAuthenticationTimeout=180 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # DEPRECATED SETTING | ||||||
|  | # ------------------ | ||||||
|  | # | ||||||
|  | # ExitAfterSecondFactorAuthenticationTimeout - THIS SETTING WILL BE | ||||||
|  | # REMOVED IN A FUTURE RELEASE. For IBC version 3.14.0 and later, see | ||||||
|  | # the notes for ReloginAfterSecondFactorAuthenticationTimeout above. | ||||||
|  | # | ||||||
|  | # For IBC versions earlier than 3.14.0: If you use the IBKR Mobile | ||||||
|  | # app for second factor authentication, and you fail to complete the | ||||||
|  | # process before the time limit imposed by IBKR, you can use this | ||||||
|  | # setting to tell IBC to exit: arrangements can then be made to | ||||||
|  | # automatically restart IBC in order to initiate the login sequence | ||||||
|  | # afresh. Otherwise, manual intervention at TWS's | ||||||
| # Second Factor Authentication dialog is needed to complete the | # Second Factor Authentication dialog is needed to complete the | ||||||
| # login. | # login. | ||||||
| # | # | ||||||
|  | @ -132,29 +180,18 @@ SecondFactorDevice= | ||||||
| ExitAfterSecondFactorAuthenticationTimeout=no | ExitAfterSecondFactorAuthenticationTimeout=no | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # This setting is only relevant if |  | ||||||
| # ExitAfterSecondFactorAuthenticationTimeout is set to 'yes'. |  | ||||||
| # |  | ||||||
| # It controls how long (in seconds) IBC waits for login to complete |  | ||||||
| # after the user acknowledges the second factor authentication |  | ||||||
| # alert at the IBKR Mobile app. If login has not completed after |  | ||||||
| # this time, IBC terminates. |  | ||||||
| # The default value is 40. |  | ||||||
| 
 |  | ||||||
| SecondFactorAuthenticationExitInterval= |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # Trading Mode | # Trading Mode | ||||||
| # ------------ | # ------------ | ||||||
| # | # | ||||||
| # TWS 955 introduced a new Trading Mode combo box on its login | # This indicates whether the live account or the paper trading | ||||||
| # dialog. This indicates whether the live account or the paper | # account corresponding to the supplied credentials is to be used. | ||||||
| # trading account corresponding to the supplied credentials is | # The allowed values are 'live' (the default) and 'paper'. | ||||||
| # to be used. The allowed values are 'live' (the default) and | # | ||||||
| # 'paper'. For earlier versions of TWS this setting has no | # If this is set to 'live', then the credentials for the live | ||||||
| # effect. | # account must be supplied. If it is set to 'paper', then either | ||||||
|  | # the live or the paper-trading credentials may be supplied. | ||||||
| 
 | 
 | ||||||
| TradingMode= | TradingMode=paper | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Paper-trading Account Warning | # Paper-trading Account Warning | ||||||
|  | @ -188,7 +225,7 @@ AcceptNonBrokerageAccountWarning=yes | ||||||
| # | # | ||||||
| # The default value is 60. | # The default value is 60. | ||||||
| 
 | 
 | ||||||
| LoginDialogDisplayTimeout=20 | LoginDialogDisplayTimeout=60 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -217,7 +254,15 @@ LoginDialogDisplayTimeout=20 | ||||||
| # but they are acceptable. | # but they are acceptable. | ||||||
| # | # | ||||||
| # The default is the current working directory when IBC is | # The default is the current working directory when IBC is | ||||||
| # started. | # started, unless the TWS_SETTINGS_PATH setting in the relevant | ||||||
|  | # start script is set. | ||||||
|  | # | ||||||
|  | # If both this setting and TWS_SETTINGS_PATH are set, then this | ||||||
|  | # setting takes priority. Note that if they have different values, | ||||||
|  | # auto-restart will not work. | ||||||
|  | # | ||||||
|  | # NB: this setting is now DEPRECATED. You should use the | ||||||
|  | # TWS_SETTINGS_PATH setting in the relevant start script. | ||||||
| 
 | 
 | ||||||
| IbDir=/root/Jts | IbDir=/root/Jts | ||||||
| 
 | 
 | ||||||
|  | @ -286,13 +331,30 @@ ExistingSessionDetectedAction=primary | ||||||
| # | # | ||||||
| # If OverrideTwsApiPort is set to an integer, IBC changes the  | # If OverrideTwsApiPort is set to an integer, IBC changes the  | ||||||
| # 'Socket port' in TWS's API configuration to that number shortly  | # 'Socket port' in TWS's API configuration to that number shortly  | ||||||
| # after startup. Leaving the setting blank will make no change to | # after startup (but note that for the FIX Gateway, this setting is | ||||||
|  | # actually stored in jts.ini rather than the Gateway's settings | ||||||
|  | # file). Leaving the setting blank will make no change to  | ||||||
| # the current setting. This setting is only intended for use in  | # the current setting. This setting is only intended for use in  | ||||||
| # certain specialized situations where the port number needs to  | # certain specialized situations where the port number needs to  | ||||||
|  | # be set dynamically at run-time, and for the FIX Gateway: most | ||||||
|  | # non-FIX users will never need it, so don't use it unless you know | ||||||
|  | # you need it. | ||||||
|  | 
 | ||||||
|  | OverrideTwsApiPort=4000 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Override TWS Master Client ID | ||||||
|  | # ----------------------------- | ||||||
|  | # | ||||||
|  | # If OverrideTwsMasterClientID is set to an integer, IBC changes the | ||||||
|  | # 'Master Client ID' value in TWS's API configuration to that  | ||||||
|  | # value shortly after startup. Leaving the setting blank will make | ||||||
|  | # no change to the current setting. This setting is only intended  | ||||||
|  | # for use in certain specialized situations where the value needs to | ||||||
| # be set dynamically at run-time: most users will never need it, | # be set dynamically at run-time: most users will never need it, | ||||||
| # so don't use it unless you know you need it. | # so don't use it unless you know you need it. | ||||||
| 
 | 
 | ||||||
| ; OverrideTwsApiPort=4002 | OverrideTwsMasterClientID= | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Read-only Login | # Read-only Login | ||||||
|  | @ -302,11 +364,13 @@ ExistingSessionDetectedAction=primary | ||||||
| # account security programme, the user will not be asked to perform | # account security programme, the user will not be asked to perform | ||||||
| # the second factor authentication action, and login to TWS will | # the second factor authentication action, and login to TWS will | ||||||
| # occur automatically in read-only mode: in this mode, placing or | # occur automatically in read-only mode: in this mode, placing or | ||||||
| # managing orders is not allowed. If set to 'no', and the user is | # managing orders is not allowed.  | ||||||
| # enrolled in IB's account security programme, the user must perform | # | ||||||
| # the relevant second factor authentication action to complete the | # If set to 'no', and the user is enrolled in IB's account security | ||||||
| # login.  | # programme, the second factor authentication process is handled | ||||||
| 
 | # according to the Second Factor Authentication Settings described | ||||||
|  | # elsewhere in this file. | ||||||
|  | # | ||||||
| # If the user is not enrolled in IB's account security programme, | # If the user is not enrolled in IB's account security programme, | ||||||
| # this setting is ignored. The default is 'no'. | # this setting is ignored. The default is 'no'. | ||||||
| 
 | 
 | ||||||
|  | @ -326,7 +390,44 @@ ReadOnlyLogin=no | ||||||
| # set the relevant checkbox (this only needs to be done once) and | # set the relevant checkbox (this only needs to be done once) and | ||||||
| # not provide a value for this setting. | # not provide a value for this setting. | ||||||
| 
 | 
 | ||||||
| ReadOnlyApi=no | ReadOnlyApi= | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # API Precautions | ||||||
|  | # --------------- | ||||||
|  | #  | ||||||
|  | # These settings relate to the corresponding 'Precautions' checkboxes in the | ||||||
|  | # API section of the Global Configuration dialog. | ||||||
|  | # | ||||||
|  | # For all of these, the accepted values are: | ||||||
|  | # - 'yes' sets the checkbox | ||||||
|  | # - 'no' clears the checkbox | ||||||
|  | # - if not set, the existing TWS/Gateway configuration is unchanged | ||||||
|  | # | ||||||
|  | # NB: thess settings are really only supplied for the benefit of new TWS | ||||||
|  | # or Gateway instances that are being automatically installed and | ||||||
|  | # started without user intervention, or where user settings are not preserved | ||||||
|  | # between sessions (eg some Docker containers). Where a user is involved, they | ||||||
|  | # should use the Global Configuration to set the relevant checkboxes and not | ||||||
|  | # provide values for these settings. | ||||||
|  | 
 | ||||||
|  | BypassOrderPrecautions= | ||||||
|  | 
 | ||||||
|  | BypassBondWarning= | ||||||
|  | 
 | ||||||
|  | BypassNegativeYieldToWorstConfirmation= | ||||||
|  | 
 | ||||||
|  | BypassCalledBondWarning= | ||||||
|  | 
 | ||||||
|  | BypassSameActionPairTradeWarning= | ||||||
|  | 
 | ||||||
|  | BypassPriceBasedVolatilityRiskWarning= | ||||||
|  | 
 | ||||||
|  | BypassUSStocksMarketDataInSharesWarning= | ||||||
|  | 
 | ||||||
|  | BypassRedirectOrderWarning= | ||||||
|  | 
 | ||||||
|  | BypassNoOverfillProtectionPrecaution= | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Market data size for US stocks - lots or shares | # Market data size for US stocks - lots or shares | ||||||
|  | @ -381,54 +482,145 @@ AcceptBidAskLastSizeDisplayUpdateNotification=accept | ||||||
| SendMarketDataInLotsForUSstocks= | SendMarketDataInLotsForUSstocks= | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # Trusted API Client IPs | ||||||
|  | # ---------------------- | ||||||
|  | # | ||||||
|  | # NB: THIS SETTING IS ONLY RELEVANT FOR THE GATEWAY, AND ONLY WHEN FIX=yes. | ||||||
|  | # In all other cases it is ignored. | ||||||
|  | # | ||||||
|  | # This is a list of IP addresses separated by commas. API clients with IP | ||||||
|  | # addresses in this list are able to connect to the API without Gateway | ||||||
|  | # generating the 'Incoming connection' popup. | ||||||
|  | # | ||||||
|  | # Note that 127.0.0.1 is always permitted to connect, so do not include it | ||||||
|  | # in this setting. | ||||||
|  | 
 | ||||||
|  | TrustedTwsApiClientIPs= | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Reset Order ID Sequence | ||||||
|  | # ----------------------- | ||||||
|  | # | ||||||
|  | # The setting resets the order id sequence for orders submitted via the API, so | ||||||
|  | # that the next invocation of the `NextValidId` API callback will return the | ||||||
|  | # value 1. The reset occurs when TWS starts. | ||||||
|  | # | ||||||
|  | # Note that order ids are reset for all API clients, except those that have | ||||||
|  | # outstanding (ie incomplete) orders: their order id sequence carries on as | ||||||
|  | # before. | ||||||
|  | # | ||||||
|  | # Valid values are 'yes', 'true', 'false' and 'no'. The default is 'no'. | ||||||
|  | 
 | ||||||
|  | ResetOrderIdsAtStart= | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # This setting specifies IBC's action when TWS displays the dialog asking for | ||||||
|  | # confirmation of a request to reset the API order id sequence. | ||||||
|  | # | ||||||
|  | # Note that the Gateway never displays this dialog, so this setting is ignored | ||||||
|  | # for a Gateway session. | ||||||
|  | # | ||||||
|  | # Valid values consist of two strings separated by a solidus '/'. The first | ||||||
|  | # value specifies the action to take when the order id reset request resulted | ||||||
|  | # from setting ResetOrderIdsAtStart=yes. The second specifies the action to | ||||||
|  | # take when the order id reset request is a result of the user clicking the | ||||||
|  | # 'Reset API order ID sequence' button in the API configuration. Each value | ||||||
|  | # must be one of the following: | ||||||
|  | # | ||||||
|  | #    'confirm'  | ||||||
|  | #        order ids will be reset | ||||||
|  | # | ||||||
|  | #    'reject'  | ||||||
|  | #        order ids will not be reset | ||||||
|  | # | ||||||
|  | #    'ignore'  | ||||||
|  | #        IBC will ignore the dialog. The user must take action. | ||||||
|  | # | ||||||
|  | #    The default setting is ignore/ignore | ||||||
|  | 
 | ||||||
|  | # Examples: | ||||||
|  | # | ||||||
|  | #    'confirm/reject' - confirm order id reset only if ResetOrderIdsAtStart=yes | ||||||
|  | #                       and reject any user-initiated requests | ||||||
|  | # | ||||||
|  | #    'ignore/confirm' - user must decide what to do if ResetOrderIdsAtStart=yes | ||||||
|  | #                       and confirm user-initiated requests | ||||||
|  | # | ||||||
|  | #    'reject/ignore'  - reject order id reset if  ResetOrderIdsAtStart=yes but | ||||||
|  | #                       allow user to handle user-initiated requests  | ||||||
|  | 
 | ||||||
|  | ConfirmOrderIdReset= | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| # ============================================================================= | # ============================================================================= | ||||||
| # 4.   TWS Auto-Closedown | # 4.   TWS Auto-Logoff and Auto-Restart | ||||||
| # ============================================================================= | # ============================================================================= | ||||||
| # | # | ||||||
| # IMPORTANT NOTE: Starting with TWS 974, this setting no longer | # TWS and Gateway insist on being restarted every day. Two alternative | ||||||
| # works properly, because IB have changed the way TWS handles its | # automatic options are offered:  | ||||||
| # autologoff mechanism.  |  | ||||||
| # | # | ||||||
| # You should now configure the TWS autologoff time to something | #    - Auto-Logoff: at a specified time, TWS shuts down tidily, without | ||||||
| # convenient for you, and restart IBC each day. | #      restarting. | ||||||
| # | # | ||||||
| # Alternatively, discontinue use of IBC and use the auto-relogin | #    - Auto-Restart: at a specified time, TWS shuts down and then restarts | ||||||
| # mechanism within TWS 974 and later versions (note that the  | #      without the user having to re-autheticate. | ||||||
| # auto-relogin mechanism provided by IB is not available if you | # | ||||||
| # use IBC). | # The normal way to configure the time at which this happens is via the Lock | ||||||
|  | # and Exit section of the Configuration dialog. Once this time has been | ||||||
|  | # configured in this way, the setting persists until the user changes it again. | ||||||
|  | # | ||||||
|  | # However, there are situations where there is no user available to do this | ||||||
|  | # configuration, or where there is no persistent storage (for example some | ||||||
|  | # Docker images). In such cases, the auto-restart or auto-logoff time can be | ||||||
|  | # set whenever IBC starts with the settings below. | ||||||
|  | # | ||||||
|  | # The value, if specified, must be a time in HH:MM AM/PM format, for example | ||||||
|  | # 08:00 AM or 10:00 PM. Note that there must be a single space between the | ||||||
|  | # two parts of this value; also that midnight is "12:00 AM" and midday is | ||||||
|  | # "12:00 PM". | ||||||
|  | # | ||||||
|  | # If no value is specified for either setting, the currently configured | ||||||
|  | # settings will apply. If a value is supplied for one setting, the other | ||||||
|  | # setting is cleared. If values are supplied for both settings, only the | ||||||
|  | # auto-restart time is set, and the auto-logoff time is cleared. | ||||||
|  | # | ||||||
|  | # Note that for a normal TWS/Gateway installation with persistent storage | ||||||
|  | # (for example on a desktop computer) the value will be persisted as if the | ||||||
|  | # user had set it via the configuration dialog. | ||||||
|  | # | ||||||
|  | # If you choose to auto-restart, you should take note of the considerations | ||||||
|  | # described at the link below. Note that where this information mentions | ||||||
|  | # 'manual authentication', restarting IBC will do the job (IBKR does not | ||||||
|  | # recognise the existence of IBC in its docuemntation). | ||||||
|  | # | ||||||
|  | #  https://www.interactivebrokers.com/en/software/tws/twsguide.htm#usersguidebook/configuretws/auto_restart_info.htm | ||||||
|  | # | ||||||
|  | # If you use the "RESTART" command via the IBC command server, and IBC is | ||||||
|  | # running any version of the Gateway (or a version of TWS earlier than 1018), | ||||||
|  | # note that this will set the Auto-Restart time in Gateway/TWS's configuration | ||||||
|  | # dialog to the time at which the restart actually happens (which may be up to | ||||||
|  | # a minute after the RESTART command is issued). To prevent future auto- | ||||||
|  | # restarts at this time, you must make sure you have set AutoLogoffTime or | ||||||
|  | # AutoRestartTime to your desired value before running IBC. NB: this does not | ||||||
|  | # apply to TWS from version 1018 onwards. | ||||||
| 
 | 
 | ||||||
| # Set to yes or no (lower case). | AutoLogoffTime= | ||||||
| # |  | ||||||
| #   yes   means allow TWS to shut down automatically at its |  | ||||||
| # 	  specified shutdown time, which is set via the TWS |  | ||||||
| #	  configuration menu. |  | ||||||
| # |  | ||||||
| #   no    means TWS never shuts down automatically. |  | ||||||
| # |  | ||||||
| # NB: IB recommends that you do not keep TWS running |  | ||||||
| # continuously. If you set this setting to 'no', you may |  | ||||||
| # experience incorrect TWS operation. |  | ||||||
| # |  | ||||||
| # NB: the default for this setting is 'no'. Since this will |  | ||||||
| # only work properly with TWS versions earlier than 974, you |  | ||||||
| # should explicitly set this to 'yes' for version 974 and later. |  | ||||||
| 
 |  | ||||||
| IbAutoClosedown=yes |  | ||||||
| 
 | 
 | ||||||
|  | AutoRestartTime= | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # ============================================================================= | # ============================================================================= | ||||||
| # 5.   TWS Tidy Closedown Time | # 5.   TWS Tidy Closedown Time | ||||||
| # ============================================================================= | # ============================================================================= | ||||||
| # | # | ||||||
| # NB: starting with TWS 974 this is no longer a useful option | # Specifies a time at which TWS will close down tidily, with no restart. | ||||||
| # because both TWS and Gateway now have the same auto-logoff |  | ||||||
| # mechanism, and IBC can no longer avoid this. |  | ||||||
| # | # | ||||||
| # Note that giving this setting a value does not change TWS's | # There is little reason to use this setting. It is similar to AutoLogoffTime, | ||||||
| # auto-logoff in any way: any setting will be additional to the | # but can include a day-of-the-week, whereas AutoLogoffTime and AutoRestartTime | ||||||
| # TWS auto-logoff. | # apply every day. So for example you could use ClosedownAt in conjunction with | ||||||
|  | # AutoRestartTime to shut down TWS on Friday evenings after the markets | ||||||
|  | # close, without it running on Saturday as well. | ||||||
| # | # | ||||||
| # To tell IBC to tidily close TWS at a specified time every | # To tell IBC to tidily close TWS at a specified time every | ||||||
| # day, set this value to <hh:mm>, for example: | # day, set this value to <hh:mm>, for example: | ||||||
|  | @ -487,7 +679,7 @@ AcceptIncomingConnectionAction=reject | ||||||
| #   no    means the dialog remains on display and must be | #   no    means the dialog remains on display and must be | ||||||
| #         handled by the user. | #         handled by the user. | ||||||
| 
 | 
 | ||||||
| AllowBlindTrading=yes | AllowBlindTrading=no | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Save Settings on a Schedule | # Save Settings on a Schedule | ||||||
|  | @ -530,6 +722,26 @@ AllowBlindTrading=yes | ||||||
| SaveTwsSettingsAt= | SaveTwsSettingsAt= | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # Confirm Crypto Currency Orders Automatically | ||||||
|  | # -------------------------------------------- | ||||||
|  | # | ||||||
|  | # When you place an order for a cryptocurrency contract, a dialog is displayed | ||||||
|  | # asking you to confirm that you want to place the order, and notifying you | ||||||
|  | # that you are placing an order to trade cryptocurrency with Paxos, a New York | ||||||
|  | # limited trust company, and not at Interactive Brokers. | ||||||
|  | # | ||||||
|  | #   transmit    means that the order will be placed automatically, and the | ||||||
|  | #               dialog will then be closed | ||||||
|  | # | ||||||
|  | #   cancel      means that the order will not be placed, and the dialog will | ||||||
|  | #               then be closed | ||||||
|  | # | ||||||
|  | #   manual      means that IBC will take no action and the user must deal | ||||||
|  | #               with the dialog | ||||||
|  | 
 | ||||||
|  | ConfirmCryptoCurrencyOrders=transmit | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| # ============================================================================= | # ============================================================================= | ||||||
| # 7.   Settings Specific to Indian Versions of TWS | # 7.   Settings Specific to Indian Versions of TWS | ||||||
|  | @ -566,13 +778,17 @@ DismissNSEComplianceNotice=yes | ||||||
| # | # | ||||||
| # The port number that IBC listens on for commands | # The port number that IBC listens on for commands | ||||||
| # such as "STOP". DO NOT set this to the port number | # such as "STOP". DO NOT set this to the port number | ||||||
| # used for TWS API connections. There is no good reason | # used for TWS API connections. | ||||||
| # to change this setting unless the port is used by | # | ||||||
| # some other application (typically another instance of | # The convention is to use 7462 for this port, | ||||||
| # IBC). The default value is 0, which tells IBC not to | # but it must be set to a different value from any other | ||||||
| # start the command server | # IBC instance that might run at the same time. | ||||||
|  | # | ||||||
|  | # The default value is 0, which tells IBC not to start | ||||||
|  | # the command server | ||||||
| 
 | 
 | ||||||
| #CommandServerPort=7462 | #CommandServerPort=7462 | ||||||
|  | CommandServerPort=0 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Permitted Command Sources | # Permitted Command Sources | ||||||
|  | @ -583,19 +799,19 @@ DismissNSEComplianceNotice=yes | ||||||
| # IBC.  Commands can always be sent from the | # IBC.  Commands can always be sent from the | ||||||
| # same host as IBC is running on. | # same host as IBC is running on. | ||||||
| 
 | 
 | ||||||
| ControlFrom=127.0.0.1 | ControlFrom= | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Address for Receiving Commands | # Address for Receiving Commands | ||||||
| # ------------------------------ | # ------------------------------ | ||||||
| # | # | ||||||
| # Specifies the IP address on which the Command Server | # Specifies the IP address on which the Command Server | ||||||
| # is so listen. For a multi-homed host, this can be used | # is to listen. For a multi-homed host, this can be used | ||||||
| # to specify that connection requests are only to be | # to specify that connection requests are only to be | ||||||
| # accepted on the specified address. The default is to | # accepted on the specified address. The default is to | ||||||
| # accept connection requests on all local addresses. | # accept connection requests on all local addresses. | ||||||
| 
 | 
 | ||||||
| BindAddress=127.0.0.1 | BindAddress= | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Command Prompt | # Command Prompt | ||||||
|  | @ -621,7 +837,7 @@ CommandPrompt= | ||||||
| # information is sent. The default is that such information | # information is sent. The default is that such information | ||||||
| # is not sent. | # is not sent. | ||||||
| 
 | 
 | ||||||
| SuppressInfoMessages=no | SuppressInfoMessages=yes | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -651,10 +867,10 @@ SuppressInfoMessages=no | ||||||
| # The LogStructureScope setting indicates which windows are | # The LogStructureScope setting indicates which windows are | ||||||
| # eligible for structure logging: | # eligible for structure logging: | ||||||
| # | # | ||||||
| #    - if set to 'known', only windows that IBC recognizes | #    - (default value) if set to 'known', only windows that | ||||||
| #      are eligible - these are windows that IBC has some | #      IBC recognizes are eligible - these are windows that | ||||||
| #      interest in monitoring, usually to take some action | #      IBC has some interest in monitoring, usually to take | ||||||
| #      on the user's behalf; | #      some action on the user's behalf; | ||||||
| # | # | ||||||
| #    - if set to 'unknown', only windows that IBC does not | #    - if set to 'unknown', only windows that IBC does not | ||||||
| #      recognize are eligible. Most windows displayed by | #      recognize are eligible. Most windows displayed by | ||||||
|  | @ -667,9 +883,8 @@ SuppressInfoMessages=no | ||||||
| #    - if set to 'all', then every window displayed by TWS | #    - if set to 'all', then every window displayed by TWS | ||||||
| #      is eligible. | #      is eligible. | ||||||
| # | # | ||||||
| # The default value is 'known'. |  | ||||||
| 
 | 
 | ||||||
| LogStructureScope=all | LogStructureScope=known | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # When to Log Window Structure | # When to Log Window Structure | ||||||
|  | @ -682,13 +897,15 @@ LogStructureScope=all | ||||||
| #       structure of an eligible window the first time it | #       structure of an eligible window the first time it | ||||||
| #       is encountered; | #       is encountered; | ||||||
| # | # | ||||||
|  | #     - if set to 'openclose', the structure is logged every | ||||||
|  | #       time an eligible window is opened or closed; | ||||||
|  | # | ||||||
| #    - if set to 'activate', the structure is logged every | #    - if set to 'activate', the structure is logged every | ||||||
| #      time an eligible window is made active; | #      time an eligible window is made active; | ||||||
| # | # | ||||||
| #    - if set to 'never' or 'no' or 'false', structure | #    - (default value) if set to 'never' or 'no' or 'false', | ||||||
| #      information is never logged. | #      structure information is never logged. | ||||||
| # | # | ||||||
| # The default value is 'never'. |  | ||||||
| 
 | 
 | ||||||
| LogStructureWhen=never | LogStructureWhen=never | ||||||
| 
 | 
 | ||||||
|  | @ -708,4 +925,3 @@ LogStructureWhen=never | ||||||
| #LogComponents= | #LogComponents= | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|  |  | ||||||
|  | @ -0,0 +1,138 @@ | ||||||
|  | { | ||||||
|  |   "nodes": { | ||||||
|  |     "flake-utils": { | ||||||
|  |       "inputs": { | ||||||
|  |         "systems": "systems" | ||||||
|  |       }, | ||||||
|  |       "locked": { | ||||||
|  |         "lastModified": 1689068808, | ||||||
|  |         "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", | ||||||
|  |         "owner": "numtide", | ||||||
|  |         "repo": "flake-utils", | ||||||
|  |         "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", | ||||||
|  |         "type": "github" | ||||||
|  |       }, | ||||||
|  |       "original": { | ||||||
|  |         "owner": "numtide", | ||||||
|  |         "repo": "flake-utils", | ||||||
|  |         "type": "github" | ||||||
|  |       } | ||||||
|  |     }, | ||||||
|  |     "flake-utils_2": { | ||||||
|  |       "inputs": { | ||||||
|  |         "systems": "systems_2" | ||||||
|  |       }, | ||||||
|  |       "locked": { | ||||||
|  |         "lastModified": 1689068808, | ||||||
|  |         "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", | ||||||
|  |         "owner": "numtide", | ||||||
|  |         "repo": "flake-utils", | ||||||
|  |         "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", | ||||||
|  |         "type": "github" | ||||||
|  |       }, | ||||||
|  |       "original": { | ||||||
|  |         "owner": "numtide", | ||||||
|  |         "repo": "flake-utils", | ||||||
|  |         "type": "github" | ||||||
|  |       } | ||||||
|  |     }, | ||||||
|  |     "nix-github-actions": { | ||||||
|  |       "inputs": { | ||||||
|  |         "nixpkgs": [ | ||||||
|  |           "poetry2nix", | ||||||
|  |           "nixpkgs" | ||||||
|  |         ] | ||||||
|  |       }, | ||||||
|  |       "locked": { | ||||||
|  |         "lastModified": 1688870561, | ||||||
|  |         "narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=", | ||||||
|  |         "owner": "nix-community", | ||||||
|  |         "repo": "nix-github-actions", | ||||||
|  |         "rev": "165b1650b753316aa7f1787f3005a8d2da0f5301", | ||||||
|  |         "type": "github" | ||||||
|  |       }, | ||||||
|  |       "original": { | ||||||
|  |         "owner": "nix-community", | ||||||
|  |         "repo": "nix-github-actions", | ||||||
|  |         "type": "github" | ||||||
|  |       } | ||||||
|  |     }, | ||||||
|  |     "nixpkgs": { | ||||||
|  |       "locked": { | ||||||
|  |         "lastModified": 1692174805, | ||||||
|  |         "narHash": "sha256-xmNPFDi/AUMIxwgOH/IVom55Dks34u1g7sFKKebxUm0=", | ||||||
|  |         "owner": "NixOS", | ||||||
|  |         "repo": "nixpkgs", | ||||||
|  |         "rev": "caac0eb6bdcad0b32cb2522e03e4002c8975c62e", | ||||||
|  |         "type": "github" | ||||||
|  |       }, | ||||||
|  |       "original": { | ||||||
|  |         "owner": "NixOS", | ||||||
|  |         "ref": "nixos-unstable", | ||||||
|  |         "repo": "nixpkgs", | ||||||
|  |         "type": "github" | ||||||
|  |       } | ||||||
|  |     }, | ||||||
|  |     "poetry2nix": { | ||||||
|  |       "inputs": { | ||||||
|  |         "flake-utils": "flake-utils_2", | ||||||
|  |         "nix-github-actions": "nix-github-actions", | ||||||
|  |         "nixpkgs": [ | ||||||
|  |           "nixpkgs" | ||||||
|  |         ] | ||||||
|  |       }, | ||||||
|  |       "locked": { | ||||||
|  |         "lastModified": 1692048894, | ||||||
|  |         "narHash": "sha256-cDw03rso2V4CDc3Mll0cHN+ztzysAvdI8pJ7ybbz714=", | ||||||
|  |         "ref": "refs/heads/pyqt6", | ||||||
|  |         "rev": "b059ad4c3051f45d6c912e17747aae37a9ec1544", | ||||||
|  |         "revCount": 2276, | ||||||
|  |         "type": "git", | ||||||
|  |         "url": "file:///home/lord_fomo/repos/poetry2nix" | ||||||
|  |       }, | ||||||
|  |       "original": { | ||||||
|  |         "type": "git", | ||||||
|  |         "url": "file:///home/lord_fomo/repos/poetry2nix" | ||||||
|  |       } | ||||||
|  |     }, | ||||||
|  |     "root": { | ||||||
|  |       "inputs": { | ||||||
|  |         "flake-utils": "flake-utils", | ||||||
|  |         "nixpkgs": "nixpkgs", | ||||||
|  |         "poetry2nix": "poetry2nix" | ||||||
|  |       } | ||||||
|  |     }, | ||||||
|  |     "systems": { | ||||||
|  |       "locked": { | ||||||
|  |         "lastModified": 1681028828, | ||||||
|  |         "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", | ||||||
|  |         "owner": "nix-systems", | ||||||
|  |         "repo": "default", | ||||||
|  |         "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", | ||||||
|  |         "type": "github" | ||||||
|  |       }, | ||||||
|  |       "original": { | ||||||
|  |         "owner": "nix-systems", | ||||||
|  |         "repo": "default", | ||||||
|  |         "type": "github" | ||||||
|  |       } | ||||||
|  |     }, | ||||||
|  |     "systems_2": { | ||||||
|  |       "locked": { | ||||||
|  |         "lastModified": 1681028828, | ||||||
|  |         "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", | ||||||
|  |         "owner": "nix-systems", | ||||||
|  |         "repo": "default", | ||||||
|  |         "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", | ||||||
|  |         "type": "github" | ||||||
|  |       }, | ||||||
|  |       "original": { | ||||||
|  |         "owner": "nix-systems", | ||||||
|  |         "repo": "default", | ||||||
|  |         "type": "github" | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   }, | ||||||
|  |   "root": "root", | ||||||
|  |   "version": 7 | ||||||
|  | } | ||||||
|  | @ -0,0 +1,180 @@ | ||||||
|  | # NOTE: to convert to a poetry2nix env like this here are the | ||||||
|  | # steps: | ||||||
|  | # - install poetry in your system nix config | ||||||
|  | # - convert the repo to use poetry using `poetry init`: | ||||||
|  | #   https://python-poetry.org/docs/basic-usage/#initialising-a-pre-existing-project | ||||||
|  | # - then manually ensuring all deps are converted over: | ||||||
|  | # - add this file to the repo and commit it | ||||||
|  | # -  | ||||||
|  | 
 | ||||||
|  | # GROKin tips: | ||||||
|  | # - CLI eps are (ostensibly) added via an `entry_points.txt`: | ||||||
|  | #   - https://packaging.python.org/en/latest/specifications/entry-points/#file-format | ||||||
|  | #   - https://github.com/nix-community/poetry2nix/blob/master/editable.nix#L49 | ||||||
|  | { | ||||||
|  |   description = "piker: trading gear for hackers (pkged with poetry2nix)"; | ||||||
|  | 
 | ||||||
|  |   inputs.flake-utils.url = "github:numtide/flake-utils"; | ||||||
|  |   inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; | ||||||
|  | 
 | ||||||
|  |   # see https://github.com/nix-community/poetry2nix/tree/master#api | ||||||
|  |   inputs.poetry2nix = { | ||||||
|  |     # url = "github:nix-community/poetry2nix"; | ||||||
|  |     # url = "github:K900/poetry2nix/qt5-explicit-deps"; | ||||||
|  |     url = "/home/lord_fomo/repos/poetry2nix"; | ||||||
|  | 
 | ||||||
|  |     inputs.nixpkgs.follows = "nixpkgs"; | ||||||
|  |   }; | ||||||
|  | 
 | ||||||
|  |   outputs = { | ||||||
|  |     self, | ||||||
|  |     nixpkgs, | ||||||
|  |     flake-utils, | ||||||
|  |     poetry2nix, | ||||||
|  |   }: | ||||||
|  |     # TODO: build cross-OS and use the `${system}` var thingy.. | ||||||
|  |     flake-utils.lib.eachDefaultSystem (system: | ||||||
|  |       let | ||||||
|  |         # use PWD as sources | ||||||
|  |         projectDir = ./.; | ||||||
|  |         pyproject = ./pyproject.toml; | ||||||
|  |         poetrylock = ./poetry.lock; | ||||||
|  | 
 | ||||||
|  |         # TODO: port to 3.11 and support both versions? | ||||||
|  |         python = "python3.10"; | ||||||
|  | 
 | ||||||
|  |         # for more functions and examples. | ||||||
|  |         # inherit | ||||||
|  |         # (poetry2nix.legacyPackages.${system}) | ||||||
|  |         # mkPoetryApplication; | ||||||
|  |         # pkgs = nixpkgs.legacyPackages.${system}; | ||||||
|  | 
 | ||||||
|  |         pkgs = nixpkgs.legacyPackages.x86_64-linux; | ||||||
|  |         lib = pkgs.lib; | ||||||
|  |         p2npkgs = poetry2nix.legacyPackages.x86_64-linux; | ||||||
|  | 
 | ||||||
|  |         # define all pkg overrides per dep, see edgecases.md: | ||||||
|  |         # https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md | ||||||
|  |         # TODO: add these into the json file: | ||||||
|  |         # https://github.com/nix-community/poetry2nix/blob/master/overrides/build-systems.json | ||||||
|  |         pypkgs-build-requirements = { | ||||||
|  |           asyncvnc = [ "setuptools" ]; | ||||||
|  |           eventkit = [ "setuptools" ]; | ||||||
|  |           ib-insync = [ "setuptools" "flake8" ]; | ||||||
|  |           msgspec = [ "setuptools"]; | ||||||
|  |           pdbp = [ "setuptools" ]; | ||||||
|  |           pyqt6-sip = [ "setuptools" ]; | ||||||
|  |           tabcompleter = [ "setuptools" ]; | ||||||
|  |           tractor = [ "setuptools" ]; | ||||||
|  |           tricycle = [ "setuptools" ]; | ||||||
|  |           trio-typing = [ "setuptools" ]; | ||||||
|  |           trio-util = [ "setuptools" ]; | ||||||
|  |           xonsh = [ "setuptools" ]; | ||||||
|  |         }; | ||||||
|  | 
 | ||||||
|  |         # auto-generate override entries | ||||||
|  |         p2n-overrides = p2npkgs.defaultPoetryOverrides.extend (self: super: | ||||||
|  |           builtins.mapAttrs (package: build-requirements: | ||||||
|  |             (builtins.getAttr package super).overridePythonAttrs (old: { | ||||||
|  |               buildInputs = ( | ||||||
|  |                 old.buildInputs or [ ] | ||||||
|  |               ) ++ ( | ||||||
|  |                 builtins.map ( | ||||||
|  |                   pkg: if builtins.isString pkg then builtins.getAttr pkg super else pkg | ||||||
|  |                   ) build-requirements | ||||||
|  |               ); | ||||||
|  |             }) | ||||||
|  |           ) pypkgs-build-requirements | ||||||
|  |         ); | ||||||
|  | 
 | ||||||
|  |         # override some ahead-of-time compiled extensions | ||||||
|  |         # to be built with their wheels. | ||||||
|  |         ahot_overrides = p2n-overrides.extend( | ||||||
|  |           final: prev: { | ||||||
|  | 
 | ||||||
|  |             # llvmlite = prev.llvmlite.override { | ||||||
|  |             #   preferWheel = false; | ||||||
|  |             # }; | ||||||
|  | 
 | ||||||
|  |             # TODO: get this workin with p2n and nixpkgs.. | ||||||
|  |             # pyqt6 = prev.pyqt6.override { | ||||||
|  |             #   preferWheel = true; | ||||||
|  |             # }; | ||||||
|  | 
 | ||||||
|  |             # NOTE: this DOESN'T work atm but after a fix | ||||||
|  |             # to poetry2nix, it will and actually this line | ||||||
|  |             # won't be needed - thanks @k900: | ||||||
|  |             # https://github.com/nix-community/poetry2nix/pull/1257 | ||||||
|  |             pyqt5 = prev.pyqt5.override { | ||||||
|  |               # withWebkit = false; | ||||||
|  |               preferWheel = true; | ||||||
|  |             }; | ||||||
|  | 
 | ||||||
|  |             # see PR from @k900: | ||||||
|  |             # https://github.com/nix-community/poetry2nix/pull/1257 | ||||||
|  |             # pyqt5-qt5 = prev.pyqt5-qt5.override { | ||||||
|  |             #   withWebkit = false; | ||||||
|  |             #   preferWheel = true; | ||||||
|  |             # }; | ||||||
|  | 
 | ||||||
|  |             # TODO: patch in an override for polars to build | ||||||
|  |             # from src! See the details likely needed from | ||||||
|  |             # the cryptography entry: | ||||||
|  |             # https://github.com/nix-community/poetry2nix/blob/master/overrides/default.nix#L426-L435 | ||||||
|  |             polars = prev.polars.override { | ||||||
|  |               preferWheel = true; | ||||||
|  |             }; | ||||||
|  |           } | ||||||
|  |       ); | ||||||
|  | 
 | ||||||
|  |       # WHY!? -> output-attrs that `nix develop` scans for: | ||||||
|  |       # https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-develop.html#flake-output-attributes | ||||||
|  |       in | ||||||
|  |       rec { | ||||||
|  |         packages = { | ||||||
|  |           # piker = poetry2nix.legacyPackages.x86_64-linux.mkPoetryEditablePackage { | ||||||
|  |           #   editablePackageSources = { piker = ./piker; }; | ||||||
|  | 
 | ||||||
|  |           piker = p2npkgs.mkPoetryApplication { | ||||||
|  |             projectDir = projectDir; | ||||||
|  | 
 | ||||||
|  |             # SEE ABOVE for auto-genned input set, override | ||||||
|  |             # buncha deps with extras.. like `setuptools` mostly. | ||||||
|  |             # TODO: maybe propose a patch to p2n to show that you | ||||||
|  |             # can even do this in the edgecases docs? | ||||||
|  |             overrides = ahot_overrides; | ||||||
|  | 
 | ||||||
|  |             # XXX: won't work on llvmlite.. | ||||||
|  |             # preferWheels = true; | ||||||
|  |           }; | ||||||
|  |         }; | ||||||
|  | 
 | ||||||
|  |         # devShells.default = pkgs.mkShell { | ||||||
|  |         #   projectDir = projectDir; | ||||||
|  |         #   python = "python3.10"; | ||||||
|  |         #   overrides = ahot_overrides; | ||||||
|  |         #   inputsFrom = [ self.packages.x86_64-linux.piker ]; | ||||||
|  |         #   packages = packages; | ||||||
|  |         #   # packages = [ poetry2nix.packages.${system}.poetry ]; | ||||||
|  |         # }; | ||||||
|  | 
 | ||||||
|  |         # TODO: grok the difference here.. | ||||||
|  |         # - avoid re-cloning git repos on every develop entry.. | ||||||
|  |         # - ideally allow hacking on the src code of some deps | ||||||
|  |         #   (tractor, pyqtgraph, tomlkit, etc.) WITHOUT having to | ||||||
|  |         #   re-install them every time a change is made. | ||||||
|  |         # - boot a usable xonsh inside the poetry virtualenv when | ||||||
|  |         #   defined via a custom entry point? | ||||||
|  |         devShells.default = p2npkgs.mkPoetryEnv { | ||||||
|  |         # env = p2npkgs.mkPoetryEnv { | ||||||
|  |             projectDir = projectDir; | ||||||
|  |             python = pkgs.python310; | ||||||
|  |             overrides = ahot_overrides; | ||||||
|  |             editablePackageSources = packages; | ||||||
|  |               # piker = "./"; | ||||||
|  |               # tractor = "../tractor/"; | ||||||
|  |             # };  # wut? | ||||||
|  |         }; | ||||||
|  |       } | ||||||
|  |     );  # end of .outputs scope | ||||||
|  | } | ||||||
|  | @ -327,7 +327,11 @@ class MktPair(Struct, frozen=True): | ||||||
|     ) -> dict: |     ) -> dict: | ||||||
|         d = super().to_dict(**kwargs) |         d = super().to_dict(**kwargs) | ||||||
|         d['src'] = self.src.to_dict(**kwargs) |         d['src'] = self.src.to_dict(**kwargs) | ||||||
|  | 
 | ||||||
|  |         if not isinstance(self.dst, str): | ||||||
|             d['dst'] = self.dst.to_dict(**kwargs) |             d['dst'] = self.dst.to_dict(**kwargs) | ||||||
|  |         else: | ||||||
|  |             d['dst'] = str(self.dst) | ||||||
| 
 | 
 | ||||||
|         d['price_tick'] = str(self.price_tick) |         d['price_tick'] = str(self.price_tick) | ||||||
|         d['size_tick'] = str(self.size_tick) |         d['size_tick'] = str(self.size_tick) | ||||||
|  | @ -349,11 +353,16 @@ class MktPair(Struct, frozen=True): | ||||||
|         Constructor for a received msg-dict normally received over IPC. |         Constructor for a received msg-dict normally received over IPC. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         dst_asset_msg = msg.pop('dst') |         if not isinstance( | ||||||
|         dst = Asset.from_msg(dst_asset_msg)  # .copy() |             dst_asset_msg := msg.pop('dst'), | ||||||
|  |             str, | ||||||
|  |         ): | ||||||
|  |             dst: Asset = Asset.from_msg(dst_asset_msg)  # .copy() | ||||||
|  |         else: | ||||||
|  |             dst: str = dst_asset_msg | ||||||
| 
 | 
 | ||||||
|         src_asset_msg = msg.pop('src') |         src_asset_msg: dict = msg.pop('src') | ||||||
|         src = Asset.from_msg(src_asset_msg)  # .copy() |         src: Asset = Asset.from_msg(src_asset_msg)  # .copy() | ||||||
| 
 | 
 | ||||||
|         # XXX NOTE: ``msgspec`` can encode `Decimal` but it doesn't |         # XXX NOTE: ``msgspec`` can encode `Decimal` but it doesn't | ||||||
|         # decide to it by default since we aren't spec-cing these |         # decide to it by default since we aren't spec-cing these | ||||||
|  |  | ||||||
|  | @ -543,6 +543,11 @@ class Account(Struct): | ||||||
| 
 | 
 | ||||||
|             if not (pos := pps.get(bs_mktid)): |             if not (pos := pps.get(bs_mktid)): | ||||||
| 
 | 
 | ||||||
|  |                 assert isinstance( | ||||||
|  |                     mkt, | ||||||
|  |                     MktPair, | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|                 # if no existing pos, allocate fresh one. |                 # if no existing pos, allocate fresh one. | ||||||
|                 pos = pps[bs_mktid] = Position( |                 pos = pps[bs_mktid] = Position( | ||||||
|                     mkt=mkt, |                     mkt=mkt, | ||||||
|  |  | ||||||
|  | @ -353,7 +353,7 @@ def open_ledger_dfs( | ||||||
|     can update the ledger on exit. |     can update the ledger on exit. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     from tractor._debug import open_crash_handler |     from piker.toolz import open_crash_handler | ||||||
|     with open_crash_handler(): |     with open_crash_handler(): | ||||||
|         if not ledger: |         if not ledger: | ||||||
|             import time |             import time | ||||||
|  |  | ||||||
|  | @ -50,7 +50,7 @@ __brokers__: list[str] = [ | ||||||
|     'binance', |     'binance', | ||||||
|     'ib', |     'ib', | ||||||
|     'kraken', |     'kraken', | ||||||
|     'kucoin' |     'kucoin', | ||||||
| 
 | 
 | ||||||
|     # broken but used to work |     # broken but used to work | ||||||
|     # 'questrade', |     # 'questrade', | ||||||
|  | @ -71,7 +71,7 @@ def get_brokermod(brokername: str) -> ModuleType: | ||||||
|     Return the imported broker module by name. |     Return the imported broker module by name. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     module = import_module('.' + brokername, 'piker.brokers') |     module: ModuleType = import_module('.' + brokername, 'piker.brokers') | ||||||
|     # we only allow monkeying because it's for internal keying |     # we only allow monkeying because it's for internal keying | ||||||
|     module.name = module.__name__.split('.')[-1] |     module.name = module.__name__.split('.')[-1] | ||||||
|     return module |     return module | ||||||
|  |  | ||||||
|  | @ -179,9 +179,6 @@ def broker_init( | ||||||
|         subpath: str = f'{modpath}.{submodname}' |         subpath: str = f'{modpath}.{submodname}' | ||||||
|         enabled.append(subpath) |         enabled.append(subpath) | ||||||
| 
 | 
 | ||||||
|         # TODO XXX: DO WE NEED THIS? |  | ||||||
|         # enabled.append('piker.data.feed') |  | ||||||
| 
 |  | ||||||
|     return ( |     return ( | ||||||
|         brokermod, |         brokermod, | ||||||
|         start_actor_kwargs,  # to `ActorNursery.start_actor()` |         start_actor_kwargs,  # to `ActorNursery.start_actor()` | ||||||
|  |  | ||||||
|  | @ -18,10 +18,11 @@ | ||||||
| Handy cross-broker utils. | Handy cross-broker utils. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
|  | from __future__ import annotations | ||||||
| from functools import partial | from functools import partial | ||||||
| 
 | 
 | ||||||
| import json | import json | ||||||
| import asks | import httpx | ||||||
| import logging | import logging | ||||||
| 
 | 
 | ||||||
| from ..log import ( | from ..log import ( | ||||||
|  | @ -50,6 +51,7 @@ class SymbolNotFound(BrokerError): | ||||||
|     "Symbol not found by broker search" |     "Symbol not found by broker search" | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO: these should probably be moved to `.tsp/.data`? | ||||||
| class NoData(BrokerError): | class NoData(BrokerError): | ||||||
|     ''' |     ''' | ||||||
|     Symbol data not permitted or no data |     Symbol data not permitted or no data | ||||||
|  | @ -59,14 +61,15 @@ class NoData(BrokerError): | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         *args, |         *args, | ||||||
|         frame_size: int = 1000, |         info: dict|None = None, | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         super().__init__(*args) |         super().__init__(*args) | ||||||
|  |         self.info: dict|None = info | ||||||
| 
 | 
 | ||||||
|         # when raised, machinery can check if the backend |         # when raised, machinery can check if the backend | ||||||
|         # set a "frame size" for doing datetime calcs. |         # set a "frame size" for doing datetime calcs. | ||||||
|         self.frame_size: int = 1000 |         # self.frame_size: int = 1000 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class DataUnavailable(BrokerError): | class DataUnavailable(BrokerError): | ||||||
|  | @ -88,16 +91,18 @@ class DataThrottle(BrokerError): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def resproc( | def resproc( | ||||||
|     resp: asks.response_objects.Response, |     resp: httpx.Response, | ||||||
|     log: logging.Logger, |     log: logging.Logger, | ||||||
|     return_json: bool = True, |     return_json: bool = True, | ||||||
|     log_resp: bool = False, |     log_resp: bool = False, | ||||||
| 
 | 
 | ||||||
| ) -> asks.response_objects.Response: | ) -> httpx.Response: | ||||||
|     """Process response and return its json content. |     ''' | ||||||
|  |     Process response and return its json content. | ||||||
| 
 | 
 | ||||||
|     Raise the appropriate error on non-200 OK responses. |     Raise the appropriate error on non-200 OK responses. | ||||||
|     """ | 
 | ||||||
|  |     ''' | ||||||
|     if not resp.status_code == 200: |     if not resp.status_code == 200: | ||||||
|         raise BrokerError(resp.body) |         raise BrokerError(resp.body) | ||||||
|     try: |     try: | ||||||
|  |  | ||||||
|  | @ -25,6 +25,7 @@ from __future__ import annotations | ||||||
| from collections import ChainMap | from collections import ChainMap | ||||||
| from contextlib import ( | from contextlib import ( | ||||||
|     asynccontextmanager as acm, |     asynccontextmanager as acm, | ||||||
|  |     AsyncExitStack, | ||||||
| ) | ) | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from pprint import pformat | from pprint import pformat | ||||||
|  | @ -41,8 +42,7 @@ import trio | ||||||
| from pendulum import ( | from pendulum import ( | ||||||
|     now, |     now, | ||||||
| ) | ) | ||||||
| import asks | import httpx | ||||||
| from fuzzywuzzy import process as fuzzy |  | ||||||
| import numpy as np | import numpy as np | ||||||
| 
 | 
 | ||||||
| from piker import config | from piker import config | ||||||
|  | @ -52,9 +52,13 @@ from piker.clearing._messages import ( | ||||||
| from piker.accounting import ( | from piker.accounting import ( | ||||||
|     Asset, |     Asset, | ||||||
|     digits_to_dec, |     digits_to_dec, | ||||||
|  |     MktPair, | ||||||
| ) | ) | ||||||
| from piker.types import Struct | from piker.types import Struct | ||||||
| from piker.data import def_iohlcv_fields | from piker.data import ( | ||||||
|  |     def_iohlcv_fields, | ||||||
|  |     match_from_pairs, | ||||||
|  | ) | ||||||
| from piker.brokers import ( | from piker.brokers import ( | ||||||
|     resproc, |     resproc, | ||||||
|     SymbolNotFound, |     SymbolNotFound, | ||||||
|  | @ -64,7 +68,6 @@ from .venues import ( | ||||||
|     PAIRTYPES, |     PAIRTYPES, | ||||||
|     Pair, |     Pair, | ||||||
|     MarketType, |     MarketType, | ||||||
| 
 |  | ||||||
|     _spot_url, |     _spot_url, | ||||||
|     _futes_url, |     _futes_url, | ||||||
|     _testnet_futes_url, |     _testnet_futes_url, | ||||||
|  | @ -74,16 +77,18 @@ from .venues import ( | ||||||
| log = get_logger('piker.brokers.binance') | log = get_logger('piker.brokers.binance') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_config() -> dict: | def get_config() -> dict[str, Any]: | ||||||
| 
 |  | ||||||
|     conf: dict |     conf: dict | ||||||
|     path: Path |     path: Path | ||||||
|     conf, path = config.load(touch_if_dne=True) |     conf, path = config.load( | ||||||
| 
 |         conf_name='brokers', | ||||||
|     section = conf.get('binance') |         touch_if_dne=True, | ||||||
| 
 |     ) | ||||||
|  |     section: dict = conf.get('binance') | ||||||
|     if not section: |     if not section: | ||||||
|         log.warning(f'No config section found for binance in {path}') |         log.warning( | ||||||
|  |             f'No config section found for binance in {path}' | ||||||
|  |         ) | ||||||
|         return {} |         return {} | ||||||
| 
 | 
 | ||||||
|     return section |     return section | ||||||
|  | @ -139,7 +144,7 @@ def binance_timestamp( | ||||||
| 
 | 
 | ||||||
| class Client: | class Client: | ||||||
|     ''' |     ''' | ||||||
|     Async ReST API client using ``trio`` + ``asks`` B) |     Async ReST API client using `trio` + `httpx` B) | ||||||
| 
 | 
 | ||||||
|     Supports all of the spot, margin and futures endpoints depending |     Supports all of the spot, margin and futures endpoints depending | ||||||
|     on method. |     on method. | ||||||
|  | @ -148,10 +153,17 @@ class Client: | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
| 
 | 
 | ||||||
|  |         venue_sessions: dict[ | ||||||
|  |             str,  # venue key | ||||||
|  |             tuple[httpx.AsyncClient, str]  # session, eps path | ||||||
|  |         ], | ||||||
|  |         conf: dict[str, Any], | ||||||
|         # TODO: change this to `Client.[mkt_]venue: MarketType`? |         # TODO: change this to `Client.[mkt_]venue: MarketType`? | ||||||
|         mkt_mode: MarketType = 'spot', |         mkt_mode: MarketType = 'spot', | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|  |         self.conf = conf | ||||||
|  | 
 | ||||||
|         # build out pair info tables for each market type |         # build out pair info tables for each market type | ||||||
|         # and wrap in a chain-map view for search / query. |         # and wrap in a chain-map view for search / query. | ||||||
|         self._spot_pairs: dict[str, Pair] = {}  # spot info table |         self._spot_pairs: dict[str, Pair] = {}  # spot info table | ||||||
|  | @ -178,44 +190,13 @@ class Client: | ||||||
|         # market symbols for use by search. See `.exch_info()`. |         # market symbols for use by search. See `.exch_info()`. | ||||||
|         self._pairs: ChainMap[str, Pair] = ChainMap() |         self._pairs: ChainMap[str, Pair] = ChainMap() | ||||||
| 
 | 
 | ||||||
|         # spot EPs sesh |  | ||||||
|         self._sesh = asks.Session(connections=4) |  | ||||||
|         self._sesh.base_location: str = _spot_url |  | ||||||
|         # spot testnet |  | ||||||
|         self._test_sesh: asks.Session = asks.Session(connections=4) |  | ||||||
|         self._test_sesh.base_location: str = _testnet_spot_url |  | ||||||
| 
 |  | ||||||
|         # margin and extended spot endpoints session. |  | ||||||
|         self._sapi_sesh = asks.Session(connections=4) |  | ||||||
|         self._sapi_sesh.base_location: str = _spot_url |  | ||||||
| 
 |  | ||||||
|         # futes EPs sesh |  | ||||||
|         self._fapi_sesh = asks.Session(connections=4) |  | ||||||
|         self._fapi_sesh.base_location: str = _futes_url |  | ||||||
|         # futes testnet |  | ||||||
|         self._test_fapi_sesh: asks.Session = asks.Session(connections=4) |  | ||||||
|         self._test_fapi_sesh.base_location: str = _testnet_futes_url |  | ||||||
| 
 |  | ||||||
|         # global client "venue selection" mode. |         # global client "venue selection" mode. | ||||||
|         # set this when you want to switch venues and not have to |         # set this when you want to switch venues and not have to | ||||||
|         # specify the venue for the next request. |         # specify the venue for the next request. | ||||||
|         self.mkt_mode: MarketType = mkt_mode |         self.mkt_mode: MarketType = mkt_mode | ||||||
| 
 | 
 | ||||||
|         # per 8 |         # per-mkt-venue API client table | ||||||
|         self.venue_sesh: dict[ |         self.venue_sesh = venue_sessions | ||||||
|             str,  # venue key |  | ||||||
|             tuple[asks.Session, str]  # session, eps path |  | ||||||
|         ] = { |  | ||||||
|             'spot': (self._sesh, '/api/v3/'), |  | ||||||
|             'spot_testnet': (self._test_sesh, '/fapi/v1/'), |  | ||||||
| 
 |  | ||||||
|             'margin': (self._sapi_sesh, '/sapi/v1/'), |  | ||||||
| 
 |  | ||||||
|             'usdtm_futes': (self._fapi_sesh, '/fapi/v1/'), |  | ||||||
|             'usdtm_futes_testnet': (self._test_fapi_sesh, '/fapi/v1/'), |  | ||||||
| 
 |  | ||||||
|             # 'futes_coin': self._dapi,  # TODO |  | ||||||
|         } |  | ||||||
| 
 | 
 | ||||||
|         # lookup for going from `.mkt_mode: str` to the config |         # lookup for going from `.mkt_mode: str` to the config | ||||||
|         # subsection `key: str` |         # subsection `key: str` | ||||||
|  | @ -230,40 +211,6 @@ class Client: | ||||||
|             'futes': ['usdtm_futes'], |             'futes': ['usdtm_futes'], | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         # for creating API keys see, |  | ||||||
|         # https://www.binance.com/en/support/faq/how-to-create-api-keys-on-binance-360002502072 |  | ||||||
|         self.conf: dict = get_config() |  | ||||||
| 
 |  | ||||||
|         for key, subconf in self.conf.items(): |  | ||||||
|             if api_key := subconf.get('api_key', ''): |  | ||||||
|                 venue_keys: list[str] = self.confkey2venuekeys[key] |  | ||||||
| 
 |  | ||||||
|                 venue_key: str |  | ||||||
|                 sesh: asks.Session |  | ||||||
|                 for venue_key in venue_keys: |  | ||||||
|                     sesh, _ = self.venue_sesh[venue_key] |  | ||||||
| 
 |  | ||||||
|                     api_key_header: dict = { |  | ||||||
|                         # taken from official: |  | ||||||
|                         # https://github.com/binance/binance-futures-connector-python/blob/main/binance/api.py#L47 |  | ||||||
|                         "Content-Type": "application/json;charset=utf-8", |  | ||||||
| 
 |  | ||||||
|                         # TODO: prolly should just always query and copy |  | ||||||
|                         # in the real latest ver? |  | ||||||
|                         "User-Agent": "binance-connector/6.1.6smbz6", |  | ||||||
|                         "X-MBX-APIKEY": api_key, |  | ||||||
|                     } |  | ||||||
|                     sesh.headers.update(api_key_header) |  | ||||||
| 
 |  | ||||||
|                     # if `.use_tesnet = true` in the config then |  | ||||||
|                     # also add headers for the testnet session which |  | ||||||
|                     # will be used for all order control |  | ||||||
|                     if subconf.get('use_testnet', False): |  | ||||||
|                         testnet_sesh, _ = self.venue_sesh[ |  | ||||||
|                             venue_key + '_testnet' |  | ||||||
|                         ] |  | ||||||
|                         testnet_sesh.headers.update(api_key_header) |  | ||||||
| 
 |  | ||||||
|     def _mk_sig( |     def _mk_sig( | ||||||
|         self, |         self, | ||||||
|         data: dict, |         data: dict, | ||||||
|  | @ -282,7 +229,6 @@ class Client: | ||||||
|                 'to define the creds for auth-ed endpoints!?' |                 'to define the creds for auth-ed endpoints!?' | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|         # XXX: Info on security and authentification |         # XXX: Info on security and authentification | ||||||
|         # https://binance-docs.github.io/apidocs/#endpoint-security-type |         # https://binance-docs.github.io/apidocs/#endpoint-security-type | ||||||
|         if not (api_secret := subconf.get('api_secret')): |         if not (api_secret := subconf.get('api_secret')): | ||||||
|  | @ -322,8 +268,9 @@ class Client: | ||||||
|         - /fapi/v3/ USD-M FUTURES, or |         - /fapi/v3/ USD-M FUTURES, or | ||||||
|         - /api/v3/ SPOT/MARGIN |         - /api/v3/ SPOT/MARGIN | ||||||
| 
 | 
 | ||||||
|         account/market endpoint request depending on either passed in `venue: str` |         account/market endpoint request depending on either passed in | ||||||
|         or the current setting `.mkt_mode: str` setting, default `'spot'`. |         `venue: str` or the current setting `.mkt_mode: str` setting, | ||||||
|  |         default `'spot'`. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|         Docs per venue API: |         Docs per venue API: | ||||||
|  | @ -352,9 +299,6 @@ class Client: | ||||||
|                 venue=venue_key, |                 venue=venue_key, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         sesh: asks.Session |  | ||||||
|         path: str |  | ||||||
| 
 |  | ||||||
|         # Check if we're configured to route order requests to the |         # Check if we're configured to route order requests to the | ||||||
|         # venue equivalent's testnet. |         # venue equivalent's testnet. | ||||||
|         use_testnet: bool = False |         use_testnet: bool = False | ||||||
|  | @ -379,11 +323,12 @@ class Client: | ||||||
|             # ctl machinery B) |             # ctl machinery B) | ||||||
|             venue_key += '_testnet' |             venue_key += '_testnet' | ||||||
| 
 | 
 | ||||||
|         sesh, path = self.venue_sesh[venue_key] |         client: httpx.AsyncClient | ||||||
| 
 |         path: str | ||||||
|         meth: Callable = getattr(sesh, method) |         client, path = self.venue_sesh[venue_key] | ||||||
|  |         meth: Callable = getattr(client, method) | ||||||
|         resp = await meth( |         resp = await meth( | ||||||
|             path=path + endpoint, |             url=path + endpoint, | ||||||
|             params=params, |             params=params, | ||||||
|             timeout=float('inf'), |             timeout=float('inf'), | ||||||
|         ) |         ) | ||||||
|  | @ -425,7 +370,15 @@ class Client: | ||||||
|                 item['filters'] = filters |                 item['filters'] = filters | ||||||
| 
 | 
 | ||||||
|             pair_type: Type = PAIRTYPES[venue] |             pair_type: Type = PAIRTYPES[venue] | ||||||
|  |             try: | ||||||
|                 pair: Pair = pair_type(**item) |                 pair: Pair = pair_type(**item) | ||||||
|  |             except Exception as e: | ||||||
|  |                 e.add_note( | ||||||
|  |                     "\nDon't panic, prolly stupid binance changed their symbology schema again..\n" | ||||||
|  |                     'Check out their API docs here:\n\n' | ||||||
|  |                     'https://binance-docs.github.io/apidocs/spot/en/#exchange-information' | ||||||
|  |                 ) | ||||||
|  |                 raise | ||||||
|             pair_table[pair.symbol.upper()] = pair |             pair_table[pair.symbol.upper()] = pair | ||||||
| 
 | 
 | ||||||
|             # update an additional top-level-cross-venue-table |             # update an additional top-level-cross-venue-table | ||||||
|  | @ -520,7 +473,9 @@ class Client: | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         pair_table: dict[str, Pair] = self._venue2pairs[ |         pair_table: dict[str, Pair] = self._venue2pairs[ | ||||||
|             venue or self.mkt_mode |             venue | ||||||
|  |             or | ||||||
|  |             self.mkt_mode | ||||||
|         ] |         ] | ||||||
|         if ( |         if ( | ||||||
|             expiry |             expiry | ||||||
|  | @ -539,9 +494,9 @@ class Client: | ||||||
|             venues: list[str] = [venue] |             venues: list[str] = [venue] | ||||||
| 
 | 
 | ||||||
|         # batch per-venue download of all exchange infos |         # batch per-venue download of all exchange infos | ||||||
|         async with trio.open_nursery() as rn: |         async with trio.open_nursery() as tn: | ||||||
|             for ven in venues: |             for ven in venues: | ||||||
|                 rn.start_soon( |                 tn.start_soon( | ||||||
|                     self._cache_pairs, |                     self._cache_pairs, | ||||||
|                     ven, |                     ven, | ||||||
|                 ) |                 ) | ||||||
|  | @ -549,7 +504,7 @@ class Client: | ||||||
|         if sym: |         if sym: | ||||||
|             return pair_table[sym] |             return pair_table[sym] | ||||||
|         else: |         else: | ||||||
|             self._pairs |             return self._pairs | ||||||
| 
 | 
 | ||||||
|     async def get_assets( |     async def get_assets( | ||||||
|         self, |         self, | ||||||
|  | @ -594,20 +549,32 @@ class Client: | ||||||
| 
 | 
 | ||||||
|     ) -> dict[str, Any]: |     ) -> dict[str, Any]: | ||||||
| 
 | 
 | ||||||
|         fq_pairs: dict = await self.exch_info() |         fq_pairs: dict[str, Pair] = await self.exch_info() | ||||||
| 
 | 
 | ||||||
|         matches = fuzzy.extractBests( |         # TODO: cache this list like we were in | ||||||
|             pattern, |         # `open_symbol_search()`? | ||||||
|             fq_pairs, |         # keys: list[str] = list(fq_pairs) | ||||||
|  | 
 | ||||||
|  |         return match_from_pairs( | ||||||
|  |             pairs=fq_pairs, | ||||||
|  |             query=pattern.upper(), | ||||||
|             score_cutoff=50, |             score_cutoff=50, | ||||||
|         ) |         ) | ||||||
|         # repack in dict form | 
 | ||||||
|         return {item[0]['symbol']: item[0] |     def pair2venuekey( | ||||||
|                 for item in matches} |         self, | ||||||
|  |         pair: Pair, | ||||||
|  |     ) -> str: | ||||||
|  |         return { | ||||||
|  |             'USDTM': 'usdtm_futes', | ||||||
|  |             'SPOT': 'spot', | ||||||
|  |             # 'COINM': 'coin_futes', | ||||||
|  |             # ^-TODO-^ bc someone might want it..? | ||||||
|  |         }[pair.venue] | ||||||
| 
 | 
 | ||||||
|     async def bars( |     async def bars( | ||||||
|         self, |         self, | ||||||
|         symbol: str, |         mkt: MktPair, | ||||||
| 
 | 
 | ||||||
|         start_dt: datetime | None = None, |         start_dt: datetime | None = None, | ||||||
|         end_dt: datetime | None = None, |         end_dt: datetime | None = None, | ||||||
|  | @ -637,16 +604,20 @@ class Client: | ||||||
|         start_time = binance_timestamp(start_dt) |         start_time = binance_timestamp(start_dt) | ||||||
|         end_time = binance_timestamp(end_dt) |         end_time = binance_timestamp(end_dt) | ||||||
| 
 | 
 | ||||||
|  |         bs_pair: Pair = self._pairs[mkt.bs_fqme.upper()] | ||||||
|  | 
 | ||||||
|         # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data |         # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data | ||||||
|         bars = await self._api( |         bars = await self._api( | ||||||
|             'klines', |             'klines', | ||||||
|             params={ |             params={ | ||||||
|                 'symbol': symbol.upper(), |                 # NOTE: always query using their native symbology! | ||||||
|  |                 'symbol': mkt.bs_mktid.upper(), | ||||||
|                 'interval': '1m', |                 'interval': '1m', | ||||||
|                 'startTime': start_time, |                 'startTime': start_time, | ||||||
|                 'endTime': end_time, |                 'endTime': end_time, | ||||||
|                 'limit': limit |                 'limit': limit | ||||||
|             }, |             }, | ||||||
|  |             venue=self.pair2venuekey(bs_pair), | ||||||
|             allow_testnet=False, |             allow_testnet=False, | ||||||
|         ) |         ) | ||||||
|         new_bars: list[tuple] = [] |         new_bars: list[tuple] = [] | ||||||
|  | @ -963,17 +934,148 @@ class Client: | ||||||
|         await self.close_listen_key(key) |         await self.close_listen_key(key) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | _venue_urls: dict[str, str] = { | ||||||
| async def get_client() -> Client: |     'spot': ( | ||||||
|  |         _spot_url, | ||||||
|  |         '/api/v3/', | ||||||
|  |     ), | ||||||
|  |     'spot_testnet': ( | ||||||
|  |         _testnet_spot_url, | ||||||
|  |         '/fapi/v1/' | ||||||
|  |     ), | ||||||
|  |     # margin and extended spot endpoints session. | ||||||
|  |     # TODO: did this ever get implemented fully? | ||||||
|  |     # 'margin': ( | ||||||
|  |     #     _spot_url, | ||||||
|  |     #     '/sapi/v1/' | ||||||
|  |     # ), | ||||||
| 
 | 
 | ||||||
|     client = Client() |     'usdtm_futes': ( | ||||||
|     await client.exch_info() |         _futes_url, | ||||||
|     log.info( |         '/fapi/v1/', | ||||||
|         f'{client} in {client.mkt_mode} mode: caching exchange infos..\n' |     ), | ||||||
|         'Cached multi-market pairs:\n' | 
 | ||||||
|         f'spot: {len(client._spot_pairs)}\n' |     'usdtm_futes_testnet': ( | ||||||
|         f'usdtm_futes: {len(client._ufutes_pairs)}\n' |         _testnet_futes_url, | ||||||
|         f'Total: {len(client._pairs)}\n' |         '/fapi/v1/', | ||||||
|  |     ), | ||||||
|  | 
 | ||||||
|  |     # TODO: for anyone who actually needs it ;P | ||||||
|  |     # 'coin_futes': () | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def init_api_keys( | ||||||
|  |     client: Client, | ||||||
|  |     conf: dict[str, Any], | ||||||
|  | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     Set up per-venue API keys each http client according to the user's | ||||||
|  |     `brokers.conf`. | ||||||
|  | 
 | ||||||
|  |     For ex, to use spot-testnet and live usdt futures APIs: | ||||||
|  | 
 | ||||||
|  |     ```toml | ||||||
|  |         [binance] | ||||||
|  |         # spot test net | ||||||
|  |         spot.use_testnet = true | ||||||
|  |         spot.api_key = '<spot_api_key_from_binance_account>' | ||||||
|  |         spot.api_secret = '<spot_api_key_password>' | ||||||
|  | 
 | ||||||
|  |         # futes live | ||||||
|  |         futes.use_testnet = false | ||||||
|  |         accounts.usdtm = 'futes' | ||||||
|  |         futes.api_key = '<futes_api_key_from_binance>' | ||||||
|  |         futes.api_secret = '<futes_api_key_password>'' | ||||||
|  | 
 | ||||||
|  |         # if uncommented will use the built-in paper engine and not | ||||||
|  |         # connect to `binance` API servers for order ctl. | ||||||
|  |         # accounts.paper = 'paper' | ||||||
|  |     ``` | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     for key, subconf in conf.items(): | ||||||
|  |         if api_key := subconf.get('api_key', ''): | ||||||
|  |             venue_keys: list[str] = client.confkey2venuekeys[key] | ||||||
|  | 
 | ||||||
|  |             venue_key: str | ||||||
|  |             client: httpx.AsyncClient | ||||||
|  |             for venue_key in venue_keys: | ||||||
|  |                 client, _ = client.venue_sesh[venue_key] | ||||||
|  | 
 | ||||||
|  |                 api_key_header: dict = { | ||||||
|  |                     # taken from official: | ||||||
|  |                     # https://github.com/binance/binance-futures-connector-python/blob/main/binance/api.py#L47 | ||||||
|  |                     "Content-Type": "application/json;charset=utf-8", | ||||||
|  | 
 | ||||||
|  |                     # TODO: prolly should just always query and copy | ||||||
|  |                     # in the real latest ver? | ||||||
|  |                     "User-Agent": "binance-connector/6.1.6smbz6", | ||||||
|  |                     "X-MBX-APIKEY": api_key, | ||||||
|  |                 } | ||||||
|  |                 client.headers.update(api_key_header) | ||||||
|  | 
 | ||||||
|  |                 # if `.use_tesnet = true` in the config then | ||||||
|  |                 # also add headers for the testnet session which | ||||||
|  |                 # will be used for all order control | ||||||
|  |                 if subconf.get('use_testnet', False): | ||||||
|  |                     testnet_sesh, _ = client.venue_sesh[ | ||||||
|  |                         venue_key + '_testnet' | ||||||
|  |                     ] | ||||||
|  |                     testnet_sesh.headers.update(api_key_header) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def get_client( | ||||||
|  |     mkt_mode: MarketType = 'spot', | ||||||
|  | ) -> Client: | ||||||
|  |     ''' | ||||||
|  |     Construct an single `piker` client which composes multiple underlying venue | ||||||
|  |     specific API clients both for live and test networks. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     venue_sessions: dict[ | ||||||
|  |         str,  # venue key | ||||||
|  |         tuple[httpx.AsyncClient, str]  # session, eps path | ||||||
|  |     ] = {} | ||||||
|  |     async with AsyncExitStack() as client_stack: | ||||||
|  |         for name, (base_url, path) in _venue_urls.items(): | ||||||
|  |             api: httpx.AsyncClient = await client_stack.enter_async_context( | ||||||
|  |                 httpx.AsyncClient( | ||||||
|  |                     base_url=base_url, | ||||||
|  |                     # headers={}, | ||||||
|  | 
 | ||||||
|  |                     # TODO: is there a way to numerate this? | ||||||
|  |                     # https://www.python-httpx.org/advanced/clients/#why-use-a-client | ||||||
|  |                     # connections=4 | ||||||
|  |                 ) | ||||||
|  |             ) | ||||||
|  |             venue_sessions[name] = ( | ||||||
|  |                 api, | ||||||
|  |                 path, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|  |         conf: dict[str, Any] = get_config() | ||||||
|  |         # for creating API keys see, | ||||||
|  |         # https://www.binance.com/en/support/faq/how-to-create-api-keys-on-binance-360002502072 | ||||||
|  |         client = Client( | ||||||
|  |             venue_sessions=venue_sessions, | ||||||
|  |             conf=conf, | ||||||
|  |             mkt_mode=mkt_mode, | ||||||
|  |         ) | ||||||
|  |         init_api_keys( | ||||||
|  |             client=client, | ||||||
|  |             conf=conf, | ||||||
|  |         ) | ||||||
|  |         fq_pairs: dict[str, Pair] = await client.exch_info() | ||||||
|  |         assert fq_pairs | ||||||
|  |         log.info( | ||||||
|  |             f'Loaded multi-venue `Client` in mkt_mode={client.mkt_mode!r}\n\n' | ||||||
|  |             f'Symbology Summary:\n' | ||||||
|  |             f'------ - ------\n' | ||||||
|  |             f'spot: {len(client._spot_pairs)}\n' | ||||||
|  |             f'usdtm_futes: {len(client._ufutes_pairs)}\n' | ||||||
|  |             '------ - ------\n' | ||||||
|  |             f'total: {len(client._pairs)}\n' | ||||||
|  |         ) | ||||||
|         yield client |         yield client | ||||||
|  |  | ||||||
|  | @ -264,15 +264,20 @@ async def open_trade_dialog( | ||||||
|     # do a open_symcache() call.. though maybe we can hide |     # do a open_symcache() call.. though maybe we can hide | ||||||
|     # this in a new async version of open_account()? |     # this in a new async version of open_account()? | ||||||
|     async with open_cached_client('binance') as client: |     async with open_cached_client('binance') as client: | ||||||
|         subconf: dict = client.conf[venue_name] |         subconf: dict|None = client.conf.get(venue_name) | ||||||
|         use_testnet = subconf.get('use_testnet', False) |  | ||||||
| 
 | 
 | ||||||
|         # XXX: if no futes.api_key or spot.api_key has been set we |         # XXX: if no futes.api_key or spot.api_key has been set we | ||||||
|         # always fall back to the paper engine! |         # always fall back to the paper engine! | ||||||
|         if not subconf.get('api_key'): |         if ( | ||||||
|  |             not subconf | ||||||
|  |             or | ||||||
|  |             not subconf.get('api_key') | ||||||
|  |         ): | ||||||
|             await ctx.started('paper') |             await ctx.started('paper') | ||||||
|             return |             return | ||||||
| 
 | 
 | ||||||
|  |         use_testnet: bool = subconf.get('use_testnet', False) | ||||||
|  | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         open_cached_client('binance') as client, |         open_cached_client('binance') as client, | ||||||
|     ): |     ): | ||||||
|  |  | ||||||
|  | @ -42,12 +42,12 @@ from trio_typing import TaskStatus | ||||||
| from pendulum import ( | from pendulum import ( | ||||||
|     from_timestamp, |     from_timestamp, | ||||||
| ) | ) | ||||||
| from fuzzywuzzy import process as fuzzy |  | ||||||
| import numpy as np | import numpy as np | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| from piker.brokers import ( | from piker.brokers import ( | ||||||
|     open_cached_client, |     open_cached_client, | ||||||
|  |     NoData, | ||||||
| ) | ) | ||||||
| from piker._cacheables import ( | from piker._cacheables import ( | ||||||
|     async_lifo_cache, |     async_lifo_cache, | ||||||
|  | @ -110,6 +110,7 @@ class AggTrade(Struct, frozen=True): | ||||||
| 
 | 
 | ||||||
| async def stream_messages( | async def stream_messages( | ||||||
|     ws: NoBsWs, |     ws: NoBsWs, | ||||||
|  | 
 | ||||||
| ) -> AsyncGenerator[NoBsWs, dict]: | ) -> AsyncGenerator[NoBsWs, dict]: | ||||||
| 
 | 
 | ||||||
|     # TODO: match syntax here! |     # TODO: match syntax here! | ||||||
|  | @ -220,6 +221,8 @@ def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO, why aren't frame resp `log.info()`s showing in upstream | ||||||
|  | # code?! | ||||||
| @acm | @acm | ||||||
| async def open_history_client( | async def open_history_client( | ||||||
|     mkt: MktPair, |     mkt: MktPair, | ||||||
|  | @ -252,24 +255,30 @@ async def open_history_client( | ||||||
|             else: |             else: | ||||||
|                 client.mkt_mode = 'spot' |                 client.mkt_mode = 'spot' | ||||||
| 
 | 
 | ||||||
|             # NOTE: always query using their native symbology! |             array: np.ndarray = await client.bars( | ||||||
|             mktid: str = mkt.bs_mktid |                 mkt=mkt, | ||||||
|             array = await client.bars( |  | ||||||
|                 mktid, |  | ||||||
|                 start_dt=start_dt, |                 start_dt=start_dt, | ||||||
|                 end_dt=end_dt, |                 end_dt=end_dt, | ||||||
|             ) |             ) | ||||||
|  |             if array.size == 0: | ||||||
|  |                 raise NoData( | ||||||
|  |                     f'No frame for {start_dt} -> {end_dt}\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|             times = array['time'] |             times = array['time'] | ||||||
|             if ( |             if not times.any(): | ||||||
|                 end_dt is None |                 raise ValueError( | ||||||
|             ): |                     'Bad frame with null-times?\n\n' | ||||||
|                 inow = round(time.time()) |                     f'{times}' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             if end_dt is None: | ||||||
|  |                 inow: int = round(time.time()) | ||||||
|                 if (inow - times[-1]) > 60: |                 if (inow - times[-1]) > 60: | ||||||
|                     await tractor.pause() |                     await tractor.pause() | ||||||
| 
 | 
 | ||||||
|             start_dt = from_timestamp(times[0]) |             start_dt = from_timestamp(times[0]) | ||||||
|             end_dt = from_timestamp(times[-1]) |             end_dt = from_timestamp(times[-1]) | ||||||
| 
 |  | ||||||
|             return array, start_dt, end_dt |             return array, start_dt, end_dt | ||||||
| 
 | 
 | ||||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} |         yield get_ohlc, {'erlangs': 3, 'rate': 3} | ||||||
|  | @ -456,6 +465,8 @@ async def stream_quotes( | ||||||
|     ): |     ): | ||||||
|         init_msgs: list[FeedInit] = [] |         init_msgs: list[FeedInit] = [] | ||||||
|         for sym in symbols: |         for sym in symbols: | ||||||
|  |             mkt: MktPair | ||||||
|  |             pair: Pair | ||||||
|             mkt, pair = await get_mkt_info(sym) |             mkt, pair = await get_mkt_info(sym) | ||||||
| 
 | 
 | ||||||
|             # build out init msgs according to latest spec |             # build out init msgs according to latest spec | ||||||
|  | @ -504,7 +515,6 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|             # start streaming |             # start streaming | ||||||
|             async for typ, quote in msg_gen: |             async for typ, quote in msg_gen: | ||||||
| 
 |  | ||||||
|                 # period = time.time() - last |                 # period = time.time() - last | ||||||
|                 # hz = 1/period if period else float('inf') |                 # hz = 1/period if period else float('inf') | ||||||
|                 # if hz > 60: |                 # if hz > 60: | ||||||
|  | @ -533,14 +543,15 @@ async def open_symbol_search( | ||||||
| 
 | 
 | ||||||
|             pattern: str |             pattern: str | ||||||
|             async for pattern in stream: |             async for pattern in stream: | ||||||
|                 matches = fuzzy.extractBests( |                 # NOTE: pattern fuzzy-matching is done within | ||||||
|  |                 # the methd impl. | ||||||
|  |                 pairs: dict[str, Pair] = await client.search_symbols( | ||||||
|                     pattern, |                     pattern, | ||||||
|                     client._pairs, |  | ||||||
|                     score_cutoff=50, |  | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|                 # repack in dict form |                 # repack in fqme-keyed table | ||||||
|                 await stream.send({ |                 byfqme: dict[str, Pair] = {} | ||||||
|                     item[0].bs_fqme: item[0] |                 for pair in pairs.values(): | ||||||
|                     for item in matches |                     byfqme[pair.bs_fqme] = pair | ||||||
|                 }) | 
 | ||||||
|  |                 await stream.send(byfqme) | ||||||
|  |  | ||||||
|  | @ -137,10 +137,12 @@ class SpotPair(Pair, frozen=True): | ||||||
|     quoteOrderQtyMarketAllowed: bool |     quoteOrderQtyMarketAllowed: bool | ||||||
|     isSpotTradingAllowed: bool |     isSpotTradingAllowed: bool | ||||||
|     isMarginTradingAllowed: bool |     isMarginTradingAllowed: bool | ||||||
|  |     otoAllowed: bool | ||||||
| 
 | 
 | ||||||
|     defaultSelfTradePreventionMode: str |     defaultSelfTradePreventionMode: str | ||||||
|     allowedSelfTradePreventionModes: list[str] |     allowedSelfTradePreventionModes: list[str] | ||||||
|     permissions: list[str] |     permissions: list[str] | ||||||
|  |     permissionSets: list[list[str]] | ||||||
| 
 | 
 | ||||||
|     # NOTE: see `.data._symcache.SymbologyCache.load()` for why |     # NOTE: see `.data._symcache.SymbologyCache.load()` for why | ||||||
|     ns_path: str = 'piker.brokers.binance:SpotPair' |     ns_path: str = 'piker.brokers.binance:SpotPair' | ||||||
|  | @ -179,7 +181,6 @@ class FutesPair(Pair): | ||||||
|     quoteAsset: str  # 'USDT', |     quoteAsset: str  # 'USDT', | ||||||
|     quotePrecision: int  # 8, |     quotePrecision: int  # 8, | ||||||
|     requiredMarginPercent: float  # '5.0000', |     requiredMarginPercent: float  # '5.0000', | ||||||
|     settlePlan: int  # 0, |  | ||||||
|     timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'], |     timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'], | ||||||
|     triggerProtect: float  # '0.0500', |     triggerProtect: float  # '0.0500', | ||||||
|     underlyingSubType: list[str]  # ['PoW'], |     underlyingSubType: list[str]  # ['PoW'], | ||||||
|  | @ -194,6 +195,42 @@ class FutesPair(Pair): | ||||||
|     def quoteAssetPrecision(self) -> int: |     def quoteAssetPrecision(self) -> int: | ||||||
|         return self.quotePrecision |         return self.quotePrecision | ||||||
| 
 | 
 | ||||||
|  |     @property | ||||||
|  |     def expiry(self) -> str: | ||||||
|  |         symbol: str = self.symbol | ||||||
|  |         contype: str = self.contractType | ||||||
|  |         match contype: | ||||||
|  |             case ( | ||||||
|  |                 'CURRENT_QUARTER' | ||||||
|  |                 | 'CURRENT_QUARTER DELIVERING' | ||||||
|  |                 | 'NEXT_QUARTER'  # su madre binance.. | ||||||
|  |             ): | ||||||
|  |                 pair, _, expiry = symbol.partition('_') | ||||||
|  |                 assert pair == self.pair  # sanity | ||||||
|  |                 return f'{expiry}' | ||||||
|  | 
 | ||||||
|  |             case 'PERPETUAL': | ||||||
|  |                 return 'PERP' | ||||||
|  | 
 | ||||||
|  |             case '': | ||||||
|  |                 subtype: list[str] = self.underlyingSubType | ||||||
|  |                 if not subtype: | ||||||
|  |                     if self.status == 'PENDING_TRADING': | ||||||
|  |                         return 'PENDING' | ||||||
|  | 
 | ||||||
|  |                 match subtype: | ||||||
|  |                     case ['DEFI']: | ||||||
|  |                         return 'PERP' | ||||||
|  | 
 | ||||||
|  |         # wow, just wow you binance guys suck.. | ||||||
|  |         if self.status == 'PENDING_TRADING': | ||||||
|  |             return 'PENDING' | ||||||
|  | 
 | ||||||
|  |         # XXX: yeah no clue then.. | ||||||
|  |         raise ValueError( | ||||||
|  |             f'Bad .expiry token match: {contype} for {symbol}' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|     @property |     @property | ||||||
|     def venue(self) -> str: |     def venue(self) -> str: | ||||||
|         symbol: str = self.symbol |         symbol: str = self.symbol | ||||||
|  | @ -202,36 +239,50 @@ class FutesPair(Pair): | ||||||
| 
 | 
 | ||||||
|         match ctype: |         match ctype: | ||||||
|             case 'PERPETUAL': |             case 'PERPETUAL': | ||||||
|                 return f'{margin}M.PERP' |                 return f'{margin}M' | ||||||
| 
 | 
 | ||||||
|             case 'CURRENT_QUARTER': |             case ( | ||||||
|  |                 'CURRENT_QUARTER' | ||||||
|  |                 | 'CURRENT_QUARTER DELIVERING' | ||||||
|  |                 | 'NEXT_QUARTER'  # su madre binance.. | ||||||
|  |             ): | ||||||
|                 _, _, expiry = symbol.partition('_') |                 _, _, expiry = symbol.partition('_') | ||||||
|                 return f'{margin}M.{expiry}' |                 return f'{margin}M' | ||||||
| 
 | 
 | ||||||
|             case '': |             case '': | ||||||
|                 subtype: list[str] = self.underlyingSubType |                 subtype: list[str] = self.underlyingSubType | ||||||
|                 if not subtype: |                 if not subtype: | ||||||
|                     if self.status == 'PENDING_TRADING': |                     if self.status == 'PENDING_TRADING': | ||||||
|                         return f'{margin}M.PENDING' |                         return f'{margin}M' | ||||||
| 
 | 
 | ||||||
|                 match subtype: |                 match subtype: | ||||||
|                     case ['DEFI']: |                     case ( | ||||||
|                         return f'{subtype[0]}.PERP' |                         ['DEFI'] | ||||||
|  |                         | ['USDC'] | ||||||
|  |                     ): | ||||||
|  |                         return f'{subtype[0]}' | ||||||
| 
 | 
 | ||||||
|         # XXX: yeah no clue then.. |         # XXX: yeah no clue then.. | ||||||
|         return 'WTF.PWNED.BBQ' |         raise ValueError( | ||||||
|  |             f'Bad .venue token match: {ctype}' | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     @property |     @property | ||||||
|     def bs_fqme(self) -> str: |     def bs_fqme(self) -> str: | ||||||
|         symbol: str = self.symbol |         symbol: str = self.symbol | ||||||
|         ctype: str = self.contractType |         ctype: str = self.contractType | ||||||
|         venue: str = self.venue |         venue: str = self.venue | ||||||
|  |         pair: str = self.pair | ||||||
| 
 | 
 | ||||||
|         match ctype: |         match ctype: | ||||||
|             case 'CURRENT_QUARTER': |             case ( | ||||||
|                 symbol, _, expiry = symbol.partition('_') |                 'CURRENT_QUARTER' | ||||||
|  |                 | 'NEXT_QUARTER'  # su madre binance.. | ||||||
|  |             ): | ||||||
|  |                 pair, _, expiry = symbol.partition('_') | ||||||
|  |                 assert pair == self.pair | ||||||
| 
 | 
 | ||||||
|         return f'{symbol}.{venue}' |         return f'{pair}.{venue}.{self.expiry}' | ||||||
| 
 | 
 | ||||||
|     @property |     @property | ||||||
|     def bs_src_asset(self) -> str: |     def bs_src_asset(self) -> str: | ||||||
|  |  | ||||||
|  | @ -454,8 +454,18 @@ def mkt_info( | ||||||
| 
 | 
 | ||||||
| @cli.command() | @cli.command() | ||||||
| @click.argument('pattern', required=True) | @click.argument('pattern', required=True) | ||||||
|  | # TODO: move this to top level click/typer context for all subs | ||||||
|  | @click.option( | ||||||
|  |     '--pdb', | ||||||
|  |     is_flag=True, | ||||||
|  |     help='Enable tractor debug mode', | ||||||
|  | ) | ||||||
| @click.pass_obj | @click.pass_obj | ||||||
| def search(config, pattern): | def search( | ||||||
|  |     config: dict, | ||||||
|  |     pattern: str, | ||||||
|  |     pdb: bool, | ||||||
|  | ): | ||||||
|     ''' |     ''' | ||||||
|     Search for symbols from broker backend(s). |     Search for symbols from broker backend(s). | ||||||
| 
 | 
 | ||||||
|  | @ -468,9 +478,12 @@ def search(config, pattern): | ||||||
| 
 | 
 | ||||||
|         async with maybe_open_pikerd( |         async with maybe_open_pikerd( | ||||||
|             loglevel=config['loglevel'], |             loglevel=config['loglevel'], | ||||||
|  |             debug_mode=pdb, | ||||||
|         ): |         ): | ||||||
|             return await func() |             return await func() | ||||||
| 
 | 
 | ||||||
|  |     from piker.toolz import open_crash_handler | ||||||
|  |     with open_crash_handler(): | ||||||
|         quotes = trio.run( |         quotes = trio.run( | ||||||
|             main, |             main, | ||||||
|             partial( |             partial( | ||||||
|  | @ -493,9 +506,11 @@ def search(config, pattern): | ||||||
| @click.option('--delete', '-d', flag_value=True, help='Delete section') | @click.option('--delete', '-d', flag_value=True, help='Delete section') | ||||||
| @click.pass_obj | @click.pass_obj | ||||||
| def brokercfg(config, section, value, delete): | def brokercfg(config, section, value, delete): | ||||||
|     """If invoked with no arguments, open an editor to edit broker configs file |     ''' | ||||||
|     or get / update an individual section. |     If invoked with no arguments, open an editor to edit broker | ||||||
|     """ |     configs file or get / update an individual section. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|     from .. import config |     from .. import config | ||||||
| 
 | 
 | ||||||
|     if section: |     if section: | ||||||
|  |  | ||||||
|  | @ -145,7 +145,11 @@ async def symbol_search( | ||||||
| 
 | 
 | ||||||
|         async with maybe_spawn_brokerd( |         async with maybe_spawn_brokerd( | ||||||
|             mod.name, |             mod.name, | ||||||
|             infect_asyncio=getattr(mod, '_infect_asyncio', False), |             infect_asyncio=getattr( | ||||||
|  |                 mod, | ||||||
|  |                 '_infect_asyncio', | ||||||
|  |                 False, | ||||||
|  |             ), | ||||||
|         ) as portal: |         ) as portal: | ||||||
| 
 | 
 | ||||||
|             results.append(( |             results.append(( | ||||||
|  |  | ||||||
|  | @ -34,7 +34,7 @@ from typing import ( | ||||||
| import pendulum | import pendulum | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| from fuzzywuzzy import process as fuzzy | from rapidfuzz import process as fuzzy | ||||||
| import numpy as np | import numpy as np | ||||||
| from tractor.trionics import ( | from tractor.trionics import ( | ||||||
|     broadcast_receiver, |     broadcast_receiver, | ||||||
|  | @ -52,8 +52,11 @@ from cryptofeed.defines import ( | ||||||
| ) | ) | ||||||
| from cryptofeed.symbols import Symbol | from cryptofeed.symbols import Symbol | ||||||
| 
 | 
 | ||||||
| from piker.data.types import Struct | from piker.data import ( | ||||||
| from piker.data import def_iohlcv_fields |     def_iohlcv_fields, | ||||||
|  |     match_from_pairs, | ||||||
|  |     Struct, | ||||||
|  | ) | ||||||
| from piker.data._web_bs import ( | from piker.data._web_bs import ( | ||||||
|     open_jsonrpc_session |     open_jsonrpc_session | ||||||
| ) | ) | ||||||
|  | @ -79,7 +82,7 @@ _testnet_ws_url = 'wss://test.deribit.com/ws/api/v2' | ||||||
| class JSONRPCResult(Struct): | class JSONRPCResult(Struct): | ||||||
|     jsonrpc: str = '2.0' |     jsonrpc: str = '2.0' | ||||||
|     id: int |     id: int | ||||||
|     result: Optional[dict] = None |     result: Optional[list[dict]] = None | ||||||
|     error: Optional[dict] = None |     error: Optional[dict] = None | ||||||
|     usIn: int |     usIn: int | ||||||
|     usOut: int |     usOut: int | ||||||
|  | @ -289,24 +292,29 @@ class Client: | ||||||
|         currency: str = 'btc',  # BTC, ETH, SOL, USDC |         currency: str = 'btc',  # BTC, ETH, SOL, USDC | ||||||
|         kind: str = 'option', |         kind: str = 'option', | ||||||
|         expired: bool = False |         expired: bool = False | ||||||
|     ) -> dict[str, Any]: |  | ||||||
|         """Get symbol info for the exchange. |  | ||||||
| 
 | 
 | ||||||
|         """ |     ) -> dict[str, dict]: | ||||||
|  |         ''' | ||||||
|  |         Get symbol infos. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|         if self._pairs: |         if self._pairs: | ||||||
|             return self._pairs |             return self._pairs | ||||||
| 
 | 
 | ||||||
|         # will retrieve all symbols by default |         # will retrieve all symbols by default | ||||||
|         params = { |         params: dict[str, str] = { | ||||||
|             'currency': currency.upper(), |             'currency': currency.upper(), | ||||||
|             'kind': kind, |             'kind': kind, | ||||||
|             'expired': str(expired).lower() |             'expired': str(expired).lower() | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         resp = await self.json_rpc('public/get_instruments', params) |         resp: JSONRPCResult = await self.json_rpc( | ||||||
|         results = resp.result |             'public/get_instruments', | ||||||
| 
 |             params, | ||||||
|         instruments = { |         ) | ||||||
|  |         # convert to symbol-keyed table | ||||||
|  |         results: list[dict] | None = resp.result | ||||||
|  |         instruments: dict[str, dict] = { | ||||||
|             item['instrument_name'].lower(): item |             item['instrument_name'].lower(): item | ||||||
|             for item in results |             for item in results | ||||||
|         } |         } | ||||||
|  | @ -319,6 +327,7 @@ class Client: | ||||||
|     async def cache_symbols( |     async def cache_symbols( | ||||||
|         self, |         self, | ||||||
|     ) -> dict: |     ) -> dict: | ||||||
|  | 
 | ||||||
|         if not self._pairs: |         if not self._pairs: | ||||||
|             self._pairs = await self.symbol_info() |             self._pairs = await self.symbol_info() | ||||||
| 
 | 
 | ||||||
|  | @ -329,17 +338,23 @@ class Client: | ||||||
|         pattern: str, |         pattern: str, | ||||||
|         limit: int = 30, |         limit: int = 30, | ||||||
|     ) -> dict[str, Any]: |     ) -> dict[str, Any]: | ||||||
|         data = await self.symbol_info() |         ''' | ||||||
|  |         Fuzzy search symbology set for pairs matching `pattern`. | ||||||
| 
 | 
 | ||||||
|         matches = fuzzy.extractBests( |         ''' | ||||||
|             pattern, |         pairs: dict[str, Any] = await self.symbol_info() | ||||||
|             data, |         matches: dict[str, Pair] = match_from_pairs( | ||||||
|  |             pairs=pairs, | ||||||
|  |             query=pattern.upper(), | ||||||
|             score_cutoff=35, |             score_cutoff=35, | ||||||
|             limit=limit |             limit=limit | ||||||
|         ) |         ) | ||||||
|         # repack in dict form | 
 | ||||||
|         return {item[0]['instrument_name'].lower(): item[0] |        # repack in name-keyed table | ||||||
|                 for item in matches} |         return { | ||||||
|  |             pair['instrument_name'].lower(): pair | ||||||
|  |             for pair in matches.values() | ||||||
|  |         } | ||||||
| 
 | 
 | ||||||
|     async def bars( |     async def bars( | ||||||
|         self, |         self, | ||||||
|  |  | ||||||
|  | @ -26,7 +26,7 @@ import time | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import pendulum | import pendulum | ||||||
| from fuzzywuzzy import process as fuzzy | from rapidfuzz import process as fuzzy | ||||||
| import numpy as np | import numpy as np | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -88,16 +88,25 @@ async def data_reset_hack( | ||||||
|     api_port: str = str(ib_client.client.port) |     api_port: str = str(ib_client.client.port) | ||||||
|     vnc_host: str |     vnc_host: str | ||||||
|     vnc_port: int |     vnc_port: int | ||||||
|     vnc_host, vnc_port = client.conf['vnc_addrs'].get( |     vnc_sockaddr: tuple[str] | None = client.conf.get('vnc_addrs') | ||||||
|         api_port, |  | ||||||
|         ('localhost', 3003) |  | ||||||
|     ) |  | ||||||
| 
 | 
 | ||||||
|     no_setup_msg:str = ( |     no_setup_msg:str = ( | ||||||
|         f'No data reset hack test setup for {vnc_host}!\n' |         f'No data reset hack test setup for {vnc_sockaddr}!\n' | ||||||
|         'See setup @\n' |         'See config setup tips @\n' | ||||||
|         'https://github.com/pikers/piker/tree/master/piker/brokers/ib' |         'https://github.com/pikers/piker/tree/master/piker/brokers/ib' | ||||||
|     ) |     ) | ||||||
|  | 
 | ||||||
|  |     if not vnc_sockaddr: | ||||||
|  |         log.warning( | ||||||
|  |             no_setup_msg | ||||||
|  |             + | ||||||
|  |             'REQUIRES A `vnc_addrs: array` ENTRY' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     vnc_host, vnc_port = vnc_sockaddr.get( | ||||||
|  |         api_port, | ||||||
|  |         ('localhost', 3003) | ||||||
|  |     ) | ||||||
|     global _reset_tech |     global _reset_tech | ||||||
| 
 | 
 | ||||||
|     match _reset_tech: |     match _reset_tech: | ||||||
|  |  | ||||||
|  | @ -41,7 +41,6 @@ import time | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     Callable, |     Callable, | ||||||
|     Union, |  | ||||||
| ) | ) | ||||||
| from types import SimpleNamespace | from types import SimpleNamespace | ||||||
| 
 | 
 | ||||||
|  | @ -49,7 +48,12 @@ from bidict import bidict | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| from tractor import to_asyncio | from tractor import to_asyncio | ||||||
| import pendulum | from pendulum import ( | ||||||
|  |     from_timestamp, | ||||||
|  |     DateTime, | ||||||
|  |     Duration, | ||||||
|  |     duration as mk_duration, | ||||||
|  | ) | ||||||
| from eventkit import Event | from eventkit import Event | ||||||
| from ib_insync import ( | from ib_insync import ( | ||||||
|     client as ib_client, |     client as ib_client, | ||||||
|  | @ -221,16 +225,20 @@ def bars_to_np(bars: list) -> np.ndarray: | ||||||
| # https://interactivebrokers.github.io/tws-api/historical_limitations.html#non-available_hd | # https://interactivebrokers.github.io/tws-api/historical_limitations.html#non-available_hd | ||||||
| _samplings: dict[int, tuple[str, str]] = { | _samplings: dict[int, tuple[str, str]] = { | ||||||
|     1: ( |     1: ( | ||||||
|  |         # ib strs | ||||||
|         '1 secs', |         '1 secs', | ||||||
|         f'{int(2e3)} S', |         f'{int(2e3)} S', | ||||||
|         pendulum.duration(seconds=2e3), | 
 | ||||||
|  |         mk_duration(seconds=2e3), | ||||||
|     ), |     ), | ||||||
|     # TODO: benchmark >1 D duration on query to see if |     # TODO: benchmark >1 D duration on query to see if | ||||||
|     # throughput can be made faster during backfilling. |     # throughput can be made faster during backfilling. | ||||||
|     60: ( |     60: ( | ||||||
|  |         # ib strs | ||||||
|         '1 min', |         '1 min', | ||||||
|         '1 D', |         '2 D', | ||||||
|         pendulum.duration(days=1), | 
 | ||||||
|  |         mk_duration(days=2), | ||||||
|     ), |     ), | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | @ -279,9 +287,31 @@ class Client: | ||||||
|         self.conf = config |         self.conf = config | ||||||
| 
 | 
 | ||||||
|         # NOTE: the ib.client here is "throttled" to 45 rps by default |         # NOTE: the ib.client here is "throttled" to 45 rps by default | ||||||
|         self.ib = ib |         self.ib: IB = ib | ||||||
|         self.ib.RaiseRequestErrors: bool = True |         self.ib.RaiseRequestErrors: bool = True | ||||||
| 
 | 
 | ||||||
|  |         # self._acnt_names: set[str] = {} | ||||||
|  |         self._acnt_names: list[str] = [] | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def acnts(self) -> list[str]: | ||||||
|  |         # return list(self._acnt_names) | ||||||
|  |         return self._acnt_names | ||||||
|  | 
 | ||||||
|  |     def __repr__(self) -> str: | ||||||
|  |         return ( | ||||||
|  |             f'<{type(self).__name__}(' | ||||||
|  |             f'ib={self.ib} ' | ||||||
|  |             f'acnts={self.acnts}' | ||||||
|  | 
 | ||||||
|  |             # TODO: we need to mask out acnt-#s and other private | ||||||
|  |             # infos if we're going to console this! | ||||||
|  |             # f' |_.conf:\n' | ||||||
|  |             # f'    {pformat(self.conf)}\n' | ||||||
|  | 
 | ||||||
|  |             ')>' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|     async def get_fills(self) -> list[Fill]: |     async def get_fills(self) -> list[Fill]: | ||||||
|         ''' |         ''' | ||||||
|         Return list of rents `Fills` from trading session. |         Return list of rents `Fills` from trading session. | ||||||
|  | @ -303,8 +333,8 @@ class Client: | ||||||
|         fqme: str, |         fqme: str, | ||||||
| 
 | 
 | ||||||
|         # EST in ISO 8601 format is required... below is EPOCH |         # EST in ISO 8601 format is required... below is EPOCH | ||||||
|         start_dt: Union[datetime, str] = "1970-01-01T00:00:00.000000-05:00", |         start_dt: datetime | str = "1970-01-01T00:00:00.000000-05:00", | ||||||
|         end_dt: Union[datetime, str] = "", |         end_dt: datetime | str = "", | ||||||
| 
 | 
 | ||||||
|         # ohlc sample period in seconds |         # ohlc sample period in seconds | ||||||
|         sample_period_s: int = 1, |         sample_period_s: int = 1, | ||||||
|  | @ -315,7 +345,7 @@ class Client: | ||||||
| 
 | 
 | ||||||
|         **kwargs, |         **kwargs, | ||||||
| 
 | 
 | ||||||
|     ) -> tuple[BarDataList, np.ndarray, pendulum.Duration]: |     ) -> tuple[BarDataList, np.ndarray, Duration]: | ||||||
|         ''' |         ''' | ||||||
|         Retreive OHLCV bars for a fqme over a range to the present. |         Retreive OHLCV bars for a fqme over a range to the present. | ||||||
| 
 | 
 | ||||||
|  | @ -324,14 +354,19 @@ class Client: | ||||||
|         # https://interactivebrokers.github.io/tws-api/historical_data.html |         # https://interactivebrokers.github.io/tws-api/historical_data.html | ||||||
|         bars_kwargs = {'whatToShow': 'TRADES'} |         bars_kwargs = {'whatToShow': 'TRADES'} | ||||||
|         bars_kwargs.update(kwargs) |         bars_kwargs.update(kwargs) | ||||||
|         bar_size, duration, dt_duration = _samplings[sample_period_s] |         ( | ||||||
|  |             bar_size, | ||||||
|  |             ib_duration_str, | ||||||
|  |             default_dt_duration, | ||||||
|  |         ) = _samplings[sample_period_s] | ||||||
| 
 | 
 | ||||||
|         global _enters |         dt_duration: Duration = ( | ||||||
|         log.info( |             duration | ||||||
|             f"REQUESTING {duration}'s worth {bar_size} BARS\n" |             or default_dt_duration | ||||||
|             f'{_enters} @ end={end_dt}"' |  | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |         # TODO: maybe remove all this? | ||||||
|  |         global _enters | ||||||
|         if not end_dt: |         if not end_dt: | ||||||
|             end_dt = '' |             end_dt = '' | ||||||
| 
 | 
 | ||||||
|  | @ -340,8 +375,8 @@ class Client: | ||||||
|         contract: Contract = (await self.find_contracts(fqme))[0] |         contract: Contract = (await self.find_contracts(fqme))[0] | ||||||
|         bars_kwargs.update(getattr(contract, 'bars_kwargs', {})) |         bars_kwargs.update(getattr(contract, 'bars_kwargs', {})) | ||||||
| 
 | 
 | ||||||
|         bars = await self.ib.reqHistoricalDataAsync( |         kwargs: dict[str, Any] = dict( | ||||||
|             contract, |             contract=contract, | ||||||
|             endDateTime=end_dt, |             endDateTime=end_dt, | ||||||
|             formatDate=2, |             formatDate=2, | ||||||
| 
 | 
 | ||||||
|  | @ -353,7 +388,7 @@ class Client: | ||||||
| 
 | 
 | ||||||
|             # time history length values format: |             # time history length values format: | ||||||
|             # ``durationStr=integer{SPACE}unit (S|D|W|M|Y)`` |             # ``durationStr=integer{SPACE}unit (S|D|W|M|Y)`` | ||||||
|             durationStr=duration, |             durationStr=ib_duration_str, | ||||||
| 
 | 
 | ||||||
|             # always use extended hours |             # always use extended hours | ||||||
|             useRTH=False, |             useRTH=False, | ||||||
|  | @ -363,36 +398,81 @@ class Client: | ||||||
|             # whatToShow='MIDPOINT', |             # whatToShow='MIDPOINT', | ||||||
|             # whatToShow='TRADES', |             # whatToShow='TRADES', | ||||||
|         ) |         ) | ||||||
|  |         bars = await self.ib.reqHistoricalDataAsync( | ||||||
|  |             **kwargs, | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|  |         query_info: str = ( | ||||||
|  |             f'REQUESTING IB history BARS\n' | ||||||
|  |             f'    ------ - ------\n' | ||||||
|  |             f'dt_duration: {dt_duration}\n' | ||||||
|  |             f'ib_duration_str: {ib_duration_str}\n' | ||||||
|  |             f'bar_size: {bar_size}\n' | ||||||
|  |             f'fqme: {fqme}\n' | ||||||
|  |             f'actor-global _enters: {_enters}\n' | ||||||
|  |             f'kwargs: {pformat(kwargs)}\n' | ||||||
|  |         ) | ||||||
|         # tail case if no history for range or none prior. |         # tail case if no history for range or none prior. | ||||||
|         if not bars: |         # NOTE: there's actually 3 cases here to handle (and | ||||||
|             # NOTE: there's 2 cases here to handle (and this should be |         # this should be read alongside the implementation of | ||||||
|             # read alongside the implementation of |         # `.reqHistoricalDataAsync()`): | ||||||
|             # ``.reqHistoricalDataAsync()``): |  | ||||||
|             # - no data is returned for the period likely due to |  | ||||||
|             # a weekend, holiday or other non-trading period prior to |  | ||||||
|             # ``end_dt`` which exceeds the ``duration``, |  | ||||||
|         # - a timeout occurred in which case insync internals return |         # - a timeout occurred in which case insync internals return | ||||||
|         #   an empty list thing with bars.clear()... |         #   an empty list thing with bars.clear()... | ||||||
|             return [], np.empty(0), dt_duration |         # - no data exists for the period likely due to | ||||||
|             # TODO: we could maybe raise ``NoData`` instead if we |         #   a weekend, holiday or other non-trading period prior to | ||||||
|             # rewrite the method in the first case? right now there's no |         #   ``end_dt`` which exceeds the ``duration``, | ||||||
|             # way to detect a timeout. |         # - LITERALLY this is the start of the mkt's history! | ||||||
|  |         if not bars: | ||||||
|  |             # TODO: figure out wut's going on here. | ||||||
| 
 | 
 | ||||||
|         # NOTE XXX: ensure minimum duration in bars B) |             # TODO: is this handy, a sync requester for tinkering | ||||||
|         # => we recursively call this method until we get at least |             # with empty frame cases? | ||||||
|         # as many bars such that they sum in aggregate to the the |             # def get_hist(): | ||||||
|  |             #     return self.ib.reqHistoricalData(**kwargs) | ||||||
|  |             # import pdbp | ||||||
|  |             # pdbp.set_trace() | ||||||
|  | 
 | ||||||
|  |             log.critical( | ||||||
|  |                 'STUPID IB SAYS NO HISTORY\n\n' | ||||||
|  |                 + query_info | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             # TODO: we could maybe raise ``NoData`` instead if we | ||||||
|  |             # rewrite the method in the first case? | ||||||
|  |             # right now there's no way to detect a timeout.. | ||||||
|  |             return [], np.empty(0), dt_duration | ||||||
|  | 
 | ||||||
|  |         log.info(query_info) | ||||||
|  |         # NOTE XXX: ensure minimum duration in bars? | ||||||
|  |         # => recursively call this method until we get at least as | ||||||
|  |         #   many bars such that they sum in aggregate to the the | ||||||
|         #   desired total time (duration) at most. |         #   desired total time (duration) at most. | ||||||
|         elif ( |         #  - if you query over a gap and get no data | ||||||
|             end_dt |         #    that may short circuit the history | ||||||
|             and ( |         if ( | ||||||
|                 (len(bars) * sample_period_s) < dt_duration.in_seconds() |             # XXX XXX XXX | ||||||
|             ) |             # => WHY DID WE EVEN NEED THIS ORIGINALLY!? <= | ||||||
|  |             # XXX XXX XXX | ||||||
|  |             False | ||||||
|  |             and end_dt | ||||||
|         ): |         ): | ||||||
|  |             nparr: np.ndarray = bars_to_np(bars) | ||||||
|  |             times: np.ndarray = nparr['time'] | ||||||
|  |             first: float = times[0] | ||||||
|  |             tdiff: float = times[-1] - first | ||||||
|  | 
 | ||||||
|  |             if ( | ||||||
|  |                 # len(bars) * sample_period_s) < dt_duration.in_seconds() | ||||||
|  |                 tdiff < dt_duration.in_seconds() | ||||||
|  |                 # and False | ||||||
|  |             ): | ||||||
|  |                 end_dt: DateTime = from_timestamp(first) | ||||||
|                 log.warning( |                 log.warning( | ||||||
|                 f'Recursing to get more bars from {end_dt} for {dt_duration}' |                     f'Frame result was shorter then {dt_duration}!?\n' | ||||||
|  |                     'Recursing for more bars:\n' | ||||||
|  |                     f'end_dt: {end_dt}\n' | ||||||
|  |                     f'dt_duration: {dt_duration}\n' | ||||||
|                 ) |                 ) | ||||||
|             end_dt -= dt_duration |  | ||||||
|                 ( |                 ( | ||||||
|                     r_bars, |                     r_bars, | ||||||
|                     r_arr, |                     r_arr, | ||||||
|  | @ -401,12 +481,39 @@ class Client: | ||||||
|                     fqme, |                     fqme, | ||||||
|                     start_dt=start_dt, |                     start_dt=start_dt, | ||||||
|                     end_dt=end_dt, |                     end_dt=end_dt, | ||||||
|  |                     sample_period_s=sample_period_s, | ||||||
|  | 
 | ||||||
|  |                     # TODO: make a table for Duration to | ||||||
|  |                     # the ib str values in order to use this? | ||||||
|  |                     # duration=duration, | ||||||
|                 ) |                 ) | ||||||
|                 r_bars.extend(bars) |                 r_bars.extend(bars) | ||||||
|                 bars = r_bars |                 bars = r_bars | ||||||
| 
 | 
 | ||||||
|         nparr = bars_to_np(bars) |         nparr: np.ndarray = bars_to_np(bars) | ||||||
|         return bars, nparr, dt_duration | 
 | ||||||
|  |         # timestep should always be at least as large as the | ||||||
|  |         # period step. | ||||||
|  |         tdiff: np.ndarray = np.diff(nparr['time']) | ||||||
|  |         to_short: np.ndarray = tdiff < sample_period_s | ||||||
|  |         if (to_short).any(): | ||||||
|  |             # raise ValueError( | ||||||
|  |             log.error( | ||||||
|  |                 f'OHLC frame for {sample_period_s} has {to_short.size} ' | ||||||
|  |                 'time steps which are shorter then expected?!"' | ||||||
|  |             ) | ||||||
|  |             # OOF: this will break teardown? | ||||||
|  |             # -[ ] check if it's greenback | ||||||
|  |             # -[ ] why tf are we leaking shm entries.. | ||||||
|  |             # -[ ] make a test on the debugging asyncio testing | ||||||
|  |             #    branch.. | ||||||
|  |             # breakpoint() | ||||||
|  | 
 | ||||||
|  |         return ( | ||||||
|  |             bars, | ||||||
|  |             nparr, | ||||||
|  |             dt_duration, | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     async def con_deats( |     async def con_deats( | ||||||
|         self, |         self, | ||||||
|  | @ -416,20 +523,26 @@ class Client: | ||||||
| 
 | 
 | ||||||
|         futs: list[asyncio.Future] = [] |         futs: list[asyncio.Future] = [] | ||||||
|         for con in contracts: |         for con in contracts: | ||||||
|             if con.primaryExchange not in _exch_skip_list: |             exch: str = con.primaryExchange or con.exchange | ||||||
|  |             if ( | ||||||
|  |                 exch | ||||||
|  |                 and exch not in _exch_skip_list | ||||||
|  |             ): | ||||||
|                 futs.append(self.ib.reqContractDetailsAsync(con)) |                 futs.append(self.ib.reqContractDetailsAsync(con)) | ||||||
| 
 | 
 | ||||||
|         # batch request all details |         # batch request all details | ||||||
|         try: |         try: | ||||||
|             results: list[ContractDetails] = await asyncio.gather(*futs) |             results: list[ContractDetails] = await asyncio.gather(*futs) | ||||||
|         except RequestError as err: |         except RequestError as err: | ||||||
|             msg = err.message |             msg: str = err.message | ||||||
|             if ( |             if ( | ||||||
|                 'No security definition' in msg |                 'No security definition' in msg | ||||||
|             ): |             ): | ||||||
|                 log.warning(f'{msg}: {contracts}') |                 log.warning(f'{msg}: {contracts}') | ||||||
|                 return {} |                 return {} | ||||||
| 
 | 
 | ||||||
|  |             raise | ||||||
|  | 
 | ||||||
|         # one set per future result |         # one set per future result | ||||||
|         details: dict[str, ContractDetails] = {} |         details: dict[str, ContractDetails] = {} | ||||||
|         for details_set in results: |         for details_set in results: | ||||||
|  | @ -663,7 +776,7 @@ class Client: | ||||||
| 
 | 
 | ||||||
|         # commodities |         # commodities | ||||||
|         elif exch == 'CMDTY':  # eg. XAUUSD.CMDTY |         elif exch == 'CMDTY':  # eg. XAUUSD.CMDTY | ||||||
|             con_kwargs, bars_kwargs = _adhoc_symbol_map[symbol] |             con_kwargs, bars_kwargs = _adhoc_symbol_map[symbol.upper()] | ||||||
|             con = Commodity(**con_kwargs) |             con = Commodity(**con_kwargs) | ||||||
|             con.bars_kwargs = bars_kwargs |             con.bars_kwargs = bars_kwargs | ||||||
| 
 | 
 | ||||||
|  | @ -727,12 +840,16 @@ class Client: | ||||||
|                 or tract.exchange |                 or tract.exchange | ||||||
|                 or exch |                 or exch | ||||||
|             ) |             ) | ||||||
|             pattern: str = f'{symbol}.{exch}' |             pattern: str = f'{symbol}.{exch.lower()}' | ||||||
|             expiry: str = tract.lastTradeDateOrContractMonth |             expiry: str = tract.lastTradeDateOrContractMonth | ||||||
|             # add an entry with expiry suffix if available |             # add an entry with expiry suffix if available | ||||||
|             if expiry: |             if expiry: | ||||||
|                 pattern += f'.{expiry}' |                 pattern += f'.{expiry}' | ||||||
| 
 | 
 | ||||||
|  |             # since pos update msgs will always have the full fqme | ||||||
|  |             # with suffix? | ||||||
|  |             pattern += '.ib' | ||||||
|  | 
 | ||||||
|             # directly cache the input pattern to the output |             # directly cache the input pattern to the output | ||||||
|             # contract match as well as by the IB-internal conId. |             # contract match as well as by the IB-internal conId. | ||||||
|             self._contracts[pattern] = tract |             self._contracts[pattern] = tract | ||||||
|  | @ -740,6 +857,23 @@ class Client: | ||||||
| 
 | 
 | ||||||
|         return contracts |         return contracts | ||||||
| 
 | 
 | ||||||
|  |     async def maybe_get_head_time( | ||||||
|  |         self, | ||||||
|  |         fqme: str, | ||||||
|  | 
 | ||||||
|  |     ) -> datetime | None: | ||||||
|  |         ''' | ||||||
|  |         Return the first datetime stamp for `fqme` or `None` | ||||||
|  |         on request failure. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         try: | ||||||
|  |             head_dt: datetime = await self.get_head_time(fqme=fqme) | ||||||
|  |             return head_dt | ||||||
|  |         except RequestError: | ||||||
|  |             log.warning(f'Unable to get head time: {fqme} ?') | ||||||
|  |             return None | ||||||
|  | 
 | ||||||
|     async def get_head_time( |     async def get_head_time( | ||||||
|         self, |         self, | ||||||
|         fqme: str, |         fqme: str, | ||||||
|  | @ -779,8 +913,11 @@ class Client: | ||||||
|     async def get_quote( |     async def get_quote( | ||||||
|         self, |         self, | ||||||
|         contract: Contract, |         contract: Contract, | ||||||
|  |         timeout: float = 1, | ||||||
|  |         tries: int = 100, | ||||||
|  |         raise_on_timeout: bool = False, | ||||||
| 
 | 
 | ||||||
|     ) -> Ticker: |     ) -> Ticker | None: | ||||||
|         ''' |         ''' | ||||||
|         Return a single (snap) quote for symbol. |         Return a single (snap) quote for symbol. | ||||||
| 
 | 
 | ||||||
|  | @ -789,30 +926,48 @@ class Client: | ||||||
|             contract, |             contract, | ||||||
|             snapshot=True, |             snapshot=True, | ||||||
|         ) |         ) | ||||||
|         ready = ticker.updateEvent |         ready: ticker.TickerUpdateEvent = ticker.updateEvent | ||||||
| 
 | 
 | ||||||
|         # ensure a last price gets filled in before we deliver quote |         # ensure a last price gets filled in before we deliver quote | ||||||
|  |         timeouterr: Exception | None = None | ||||||
|         warnset: bool = False |         warnset: bool = False | ||||||
|         for _ in range(100): |         for _ in range(tries): | ||||||
|             if isnan(ticker.last): |  | ||||||
| 
 | 
 | ||||||
|                 done, pending = await asyncio.wait( |             # wait for a first update(Event) indicatingn a  | ||||||
|                     [ready], |             # live quote feed. | ||||||
|                     timeout=0.01, |             if isnan(ticker.last): | ||||||
|  |                 try: | ||||||
|  |                     tkr = await asyncio.wait_for( | ||||||
|  |                         ready, | ||||||
|  |                         timeout=timeout, | ||||||
|                     ) |                     ) | ||||||
|                 if ready in done: |                     if tkr: | ||||||
|                         break |                         break | ||||||
|  |                 except TimeoutError as err: | ||||||
|  |                     timeouterr = err | ||||||
|  |                     await asyncio.sleep(0.01) | ||||||
|  |                     continue | ||||||
|  | 
 | ||||||
|                 else: |                 else: | ||||||
|                     if not warnset: |                     if not warnset: | ||||||
|                         log.warning( |                         log.warning( | ||||||
|                             f'Quote for {contract} timed out: market is closed?' |                             f'Quote req timed out..maybe venue is closed?\n' | ||||||
|  |                             f'{asdict(contract)}' | ||||||
|                         ) |                         ) | ||||||
|                         warnset = True |                         warnset = True | ||||||
| 
 | 
 | ||||||
|             else: |             else: | ||||||
|                 log.info(f'Got first quote for {contract}') |                 log.info( | ||||||
|  |                     'Got first quote for contract\n' | ||||||
|  |                     f'{contract}\n' | ||||||
|  |                 ) | ||||||
|                 break |                 break | ||||||
|         else: |         else: | ||||||
|  |             if timeouterr and raise_on_timeout: | ||||||
|  |                 import pdbp | ||||||
|  |                 pdbp.set_trace() | ||||||
|  |                 raise timeouterr | ||||||
|  | 
 | ||||||
|             if not warnset: |             if not warnset: | ||||||
|                 log.warning( |                 log.warning( | ||||||
|                     f'Contract {contract} is not returning a quote ' |                     f'Contract {contract} is not returning a quote ' | ||||||
|  | @ -820,6 +975,8 @@ class Client: | ||||||
|                 ) |                 ) | ||||||
|                 warnset = True |                 warnset = True | ||||||
| 
 | 
 | ||||||
|  |             return None | ||||||
|  | 
 | ||||||
|         return ticker |         return ticker | ||||||
| 
 | 
 | ||||||
|     # async to be consistent for the client proxy, and cuz why not. |     # async to be consistent for the client proxy, and cuz why not. | ||||||
|  | @ -867,8 +1024,12 @@ class Client: | ||||||
|                     outsideRth=True, |                     outsideRth=True, | ||||||
| 
 | 
 | ||||||
|                     optOutSmartRouting=True, |                     optOutSmartRouting=True, | ||||||
|  |                     # TODO: need to understand this setting better as | ||||||
|  |                     # it pertains to shit ass mms.. | ||||||
|                     routeMarketableToBbo=True, |                     routeMarketableToBbo=True, | ||||||
|  | 
 | ||||||
|                     designatedLocation='SMART', |                     designatedLocation='SMART', | ||||||
|  | 
 | ||||||
|                     # TODO: make all orders GTC? |                     # TODO: make all orders GTC? | ||||||
|                     # https://interactivebrokers.github.io/tws-api/classIBApi_1_1Order.html#a95539081751afb9980f4c6bd1655a6ba |                     # https://interactivebrokers.github.io/tws-api/classIBApi_1_1Order.html#a95539081751afb9980f4c6bd1655a6ba | ||||||
|                     # goodTillDate=f"yyyyMMdd-HH:mm:ss", |                     # goodTillDate=f"yyyyMMdd-HH:mm:ss", | ||||||
|  | @ -981,7 +1142,9 @@ _scan_ignore: set[tuple[str, int]] = set() | ||||||
| 
 | 
 | ||||||
| def get_config() -> dict[str, Any]: | def get_config() -> dict[str, Any]: | ||||||
| 
 | 
 | ||||||
|     conf, path = config.load('brokers') |     conf, path = config.load( | ||||||
|  |         conf_name='brokers', | ||||||
|  |     ) | ||||||
|     section = conf.get('ib') |     section = conf.get('ib') | ||||||
| 
 | 
 | ||||||
|     accounts = section.get('accounts') |     accounts = section.get('accounts') | ||||||
|  | @ -994,8 +1157,8 @@ def get_config() -> dict[str, Any]: | ||||||
|     names = list(accounts.keys()) |     names = list(accounts.keys()) | ||||||
|     accts = section['accounts'] = bidict(accounts) |     accts = section['accounts'] = bidict(accounts) | ||||||
|     log.info( |     log.info( | ||||||
|         f'brokers.toml defines {len(accts)} accounts: ' |         f'{path} defines {len(accts)} account aliases:\n' | ||||||
|         f'{pformat(names)}' |         f'{pformat(names)}\n' | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     if section is None: |     if section is None: | ||||||
|  | @ -1062,7 +1225,7 @@ async def load_aio_clients( | ||||||
|         try_ports = list(try_ports.values()) |         try_ports = list(try_ports.values()) | ||||||
| 
 | 
 | ||||||
|     _err = None |     _err = None | ||||||
|     accounts_def = config.load_accounts(['ib']) |     accounts_def: dict[str, str] = config.load_accounts(['ib']) | ||||||
|     ports = try_ports if port is None else [port] |     ports = try_ports if port is None else [port] | ||||||
|     combos = list(itertools.product(hosts, ports)) |     combos = list(itertools.product(hosts, ports)) | ||||||
|     accounts_found: dict[str, Client] = {} |     accounts_found: dict[str, Client] = {} | ||||||
|  | @ -1087,6 +1250,12 @@ async def load_aio_clients( | ||||||
| 
 | 
 | ||||||
|         for i in range(connect_retries): |         for i in range(connect_retries): | ||||||
|             try: |             try: | ||||||
|  |                 log.info( | ||||||
|  |                     'Trying `ib_async` connect\n' | ||||||
|  |                     f'{host}: {port}\n' | ||||||
|  |                     f'clientId: {client_id}\n' | ||||||
|  |                     f'timeout: {connect_timeout}\n' | ||||||
|  |                 ) | ||||||
|                 await ib.connectAsync( |                 await ib.connectAsync( | ||||||
|                     host, |                     host, | ||||||
|                     port, |                     port, | ||||||
|  | @ -1101,7 +1270,9 @@ async def load_aio_clients( | ||||||
|                 client = Client(ib=ib, config=conf) |                 client = Client(ib=ib, config=conf) | ||||||
| 
 | 
 | ||||||
|                 # update all actor-global caches |                 # update all actor-global caches | ||||||
|                 log.info(f"Caching client for {sockaddr}") |                 log.runtime( | ||||||
|  |                     f'Connected and caching `Client` @ {sockaddr!r}' | ||||||
|  |                 ) | ||||||
|                 _client_cache[sockaddr] = client |                 _client_cache[sockaddr] = client | ||||||
|                 break |                 break | ||||||
| 
 | 
 | ||||||
|  | @ -1116,32 +1287,54 @@ async def load_aio_clients( | ||||||
|                 OSError, |                 OSError, | ||||||
|             ) as ce: |             ) as ce: | ||||||
|                 _err = ce |                 _err = ce | ||||||
|                 log.warning( |                 message: str = ( | ||||||
|                     f'Failed to connect on {host}:{port} for {i} time with,\n' |                     f'Failed to connect on {host}:{port} after {i} tries with\n' | ||||||
|                     f'{ib.client.apiError.value()}\n' |                     f'{ib.client.apiError.value()!r}\n\n' | ||||||
|                     'retrying with a new client id..') |                     'Retrying with a new client id..\n' | ||||||
|  |                 ) | ||||||
|  |                 log.runtime(message) | ||||||
|  |         else: | ||||||
|  |             # XXX report loudly if we never established after all | ||||||
|  |             # re-tries | ||||||
|  |             log.warning(message) | ||||||
| 
 | 
 | ||||||
|         # Pre-collect all accounts available for this |         # Pre-collect all accounts available for this | ||||||
|         # connection and map account names to this client |         # connection and map account names to this client | ||||||
|         # instance. |         # instance. | ||||||
|         for value in ib.accountValues(): |         for value in ib.accountValues(): | ||||||
|             acct_number = value.account |             acct_number: str = value.account | ||||||
| 
 | 
 | ||||||
|             entry = accounts_def.inverse.get(acct_number) |             acnt_alias: str = accounts_def.inverse.get(acct_number) | ||||||
|             if not entry: |             if not acnt_alias: | ||||||
|  | 
 | ||||||
|  |                 # TODO: should we constuct the below reco-ex from | ||||||
|  |                 # the existing config content? | ||||||
|  |                 _, path = config.load( | ||||||
|  |                     conf_name='brokers', | ||||||
|  |                 ) | ||||||
|                 raise ValueError( |                 raise ValueError( | ||||||
|                     'No section in brokers.toml for account:' |                     'No alias in account section for account!\n' | ||||||
|                     f' {acct_number}\n' |                     f'Please add an acnt alias entry to your {path}\n' | ||||||
|                     f'Please add entry to continue using this API client' |                     'For example,\n\n' | ||||||
|  | 
 | ||||||
|  |                     '[ib.accounts]\n' | ||||||
|  |                     'margin = {accnt_number!r}\n' | ||||||
|  |                     '^^^^^^ <- you need this part!\n\n' | ||||||
|  | 
 | ||||||
|  |                     'This ensures `piker` will not leak private acnt info ' | ||||||
|  |                     'to console output by default!\n' | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|             # surjection of account names to operating clients. |             # surjection of account names to operating clients. | ||||||
|             if acct_number not in accounts_found: |             if acnt_alias not in accounts_found: | ||||||
|                 accounts_found[entry] = client |                 accounts_found[acnt_alias] = client | ||||||
|  |                 # client._acnt_names.add(acnt_alias) | ||||||
|  |                 client._acnt_names.append(acnt_alias) | ||||||
| 
 | 
 | ||||||
|  |         if accounts_found: | ||||||
|             log.info( |             log.info( | ||||||
|             f'Loaded accounts for client @ {host}:{port}\n' |                 f'Loaded accounts for api client\n\n' | ||||||
|             f'{pformat(accounts_found)}' |                 f'{pformat(accounts_found)}\n' | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             # XXX: why aren't we just updating this directy above |             # XXX: why aren't we just updating this directy above | ||||||
|  | @ -1180,7 +1373,9 @@ async def load_clients_for_trio( | ||||||
|     a ``tractor.to_asyncio.open_channel_from()``. |     a ``tractor.to_asyncio.open_channel_from()``. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async with load_aio_clients() as accts2clients: |     async with load_aio_clients( | ||||||
|  |         disconnect_on_exit=False, | ||||||
|  |     ) as accts2clients: | ||||||
| 
 | 
 | ||||||
|         to_trio.send_nowait(accts2clients) |         to_trio.send_nowait(accts2clients) | ||||||
| 
 | 
 | ||||||
|  | @ -1306,7 +1501,7 @@ class MethodProxy: | ||||||
|         self, |         self, | ||||||
|         pattern: str, |         pattern: str, | ||||||
| 
 | 
 | ||||||
|     ) -> Union[dict[str, Any], trio.Event]: |     ) -> dict[str, Any] | trio.Event: | ||||||
| 
 | 
 | ||||||
|         ev = self.event_table.get(pattern) |         ev = self.event_table.get(pattern) | ||||||
| 
 | 
 | ||||||
|  | @ -1343,11 +1538,10 @@ async def open_aio_client_method_relay( | ||||||
|     # relay all method requests to ``asyncio``-side client and deliver |     # relay all method requests to ``asyncio``-side client and deliver | ||||||
|     # back results |     # back results | ||||||
|     while not to_trio._closed: |     while not to_trio._closed: | ||||||
|         msg = await from_trio.get() |         msg: tuple[str, dict] | dict | None = await from_trio.get() | ||||||
| 
 |  | ||||||
|         match msg: |         match msg: | ||||||
|             case None:  # termination sentinel |             case None:  # termination sentinel | ||||||
|                 print('asyncio PROXY-RELAY SHUTDOWN') |                 log.info('asyncio `Client` method-proxy SHUTDOWN!') | ||||||
|                 break |                 break | ||||||
| 
 | 
 | ||||||
|             case (meth_name, kwargs): |             case (meth_name, kwargs): | ||||||
|  |  | ||||||
|  | @ -20,7 +20,7 @@ Order and trades endpoints for use with ``piker``'s EMS. | ||||||
| """ | """ | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| from contextlib import ExitStack | from contextlib import ExitStack | ||||||
| from collections import ChainMap | # from collections import ChainMap | ||||||
| from functools import partial | from functools import partial | ||||||
| from pprint import pformat | from pprint import pformat | ||||||
| import time | import time | ||||||
|  | @ -846,6 +846,18 @@ async def emit_pp_update( | ||||||
| 
 | 
 | ||||||
|         # con: Contract = fill.contract |         # con: Contract = fill.contract | ||||||
| 
 | 
 | ||||||
|  |         # provide a backup fqme -> MktPair table in case the | ||||||
|  |         # symcache does not (yet) have an entry for the current mkt | ||||||
|  |         # txn. | ||||||
|  |         backup_table: dict[str, MktPair] = {} | ||||||
|  |         for tid, txn in trans.items(): | ||||||
|  |             fqme: str = txn.fqme | ||||||
|  |             if fqme not in ledger.symcache.mktmaps: | ||||||
|  |                 # bs_mktid: str = txn.bs_mktid | ||||||
|  |                 backup_table[fqme] = client._cons2mkts[ | ||||||
|  |                     client._contracts[fqme] | ||||||
|  |                 ] | ||||||
|  | 
 | ||||||
|         acnt.update_from_ledger( |         acnt.update_from_ledger( | ||||||
|             trans, |             trans, | ||||||
| 
 | 
 | ||||||
|  | @ -855,7 +867,7 @@ async def emit_pp_update( | ||||||
| 
 | 
 | ||||||
|             # TODO: remove this hack by attempting to symcache an |             # TODO: remove this hack by attempting to symcache an | ||||||
|             # incrementally updated table? |             # incrementally updated table? | ||||||
|             _mktmap_table=client._contracts |             _mktmap_table=backup_table, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # re-compute all positions that have changed state. |         # re-compute all positions that have changed state. | ||||||
|  | @ -1171,7 +1183,14 @@ async def deliver_trade_events( | ||||||
|                         pos |                         pos | ||||||
|                         and fill |                         and fill | ||||||
|                     ): |                     ): | ||||||
|                         assert fill.commissionReport == cr |                         now_cr: CommissionReport = fill.commissionReport | ||||||
|  |                         if (now_cr != cr): | ||||||
|  |                             log.warning( | ||||||
|  |                                 'UhhHh ib updated the commission report mid-fill..?\n' | ||||||
|  |                                 f'was: {pformat(cr)}\n' | ||||||
|  |                                 f'now: {pformat(now_cr)}\n' | ||||||
|  |                             ) | ||||||
|  | 
 | ||||||
|                         await emit_pp_update( |                         await emit_pp_update( | ||||||
|                             ems_stream, |                             ems_stream, | ||||||
|                             accounts_def, |                             accounts_def, | ||||||
|  | @ -1249,7 +1268,10 @@ async def deliver_trade_events( | ||||||
|                 if err['reqid'] == -1: |                 if err['reqid'] == -1: | ||||||
|                     log.error(f'TWS external order error:\n{pformat(err)}') |                     log.error(f'TWS external order error:\n{pformat(err)}') | ||||||
| 
 | 
 | ||||||
|                 flow: ChainMap = flows.get(reqid) |                 flow: dict = dict( | ||||||
|  |                     flows.get(reqid) | ||||||
|  |                     or {} | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|                 # TODO: we don't want to relay data feed / lookup errors |                 # TODO: we don't want to relay data feed / lookup errors | ||||||
|                 # so we need some further filtering logic here.. |                 # so we need some further filtering logic here.. | ||||||
|  | @ -1260,7 +1282,7 @@ async def deliver_trade_events( | ||||||
|                     reason=reason, |                     reason=reason, | ||||||
|                     broker_details={ |                     broker_details={ | ||||||
|                         'name': 'ib', |                         'name': 'ib', | ||||||
|                         'flow': dict(flow), |                         'flow': flow, | ||||||
|                     }, |                     }, | ||||||
|                 ) |                 ) | ||||||
|                 flows.add_msg(reqid, err_msg.to_dict()) |                 flows.add_msg(reqid, err_msg.to_dict()) | ||||||
|  |  | ||||||
|  | @ -25,6 +25,7 @@ from contextlib import ( | ||||||
| from dataclasses import asdict | from dataclasses import asdict | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from functools import partial | from functools import partial | ||||||
|  | from pprint import pformat | ||||||
| from math import isnan | from math import isnan | ||||||
| import time | import time | ||||||
| from typing import ( | from typing import ( | ||||||
|  | @ -36,7 +37,13 @@ from typing import ( | ||||||
| from async_generator import aclosing | from async_generator import aclosing | ||||||
| import ib_insync as ibis | import ib_insync as ibis | ||||||
| import numpy as np | import numpy as np | ||||||
| import pendulum | from pendulum import ( | ||||||
|  |     now, | ||||||
|  |     from_timestamp, | ||||||
|  |     # DateTime, | ||||||
|  |     Duration, | ||||||
|  |     duration as mk_duration, | ||||||
|  | ) | ||||||
| import tractor | import tractor | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
|  | @ -45,10 +52,9 @@ from piker.accounting import ( | ||||||
|     MktPair, |     MktPair, | ||||||
| ) | ) | ||||||
| from piker.data.validate import FeedInit | from piker.data.validate import FeedInit | ||||||
| from .._util import ( | from piker.brokers._util import ( | ||||||
|     NoData, |     NoData, | ||||||
|     DataUnavailable, |     DataUnavailable, | ||||||
|     SymbolNotFound, |  | ||||||
| ) | ) | ||||||
| from .api import ( | from .api import ( | ||||||
|     # _adhoc_futes_set, |     # _adhoc_futes_set, | ||||||
|  | @ -159,13 +165,13 @@ async def open_history_client( | ||||||
|         head_dt: None | datetime = None |         head_dt: None | datetime = None | ||||||
|         if ( |         if ( | ||||||
|             # fx cons seem to not provide this endpoint? |             # fx cons seem to not provide this endpoint? | ||||||
|  |             # TODO: guard against all contract types which don't | ||||||
|  |             # support it? | ||||||
|             'idealpro' not in fqme |             'idealpro' not in fqme | ||||||
|         ): |         ): | ||||||
|             try: |             head_dt: datetime | None = await proxy.maybe_get_head_time( | ||||||
|                 head_dt = await proxy.get_head_time(fqme=fqme) |                 fqme=fqme | ||||||
|             except RequestError: |             ) | ||||||
|                 log.warning(f'Unable to get head time: {fqme} ?') |  | ||||||
|                 pass |  | ||||||
| 
 | 
 | ||||||
|         async def get_hist( |         async def get_hist( | ||||||
|             timeframe: float, |             timeframe: float, | ||||||
|  | @ -173,8 +179,15 @@ async def open_history_client( | ||||||
|             start_dt: datetime | None = None, |             start_dt: datetime | None = None, | ||||||
| 
 | 
 | ||||||
|         ) -> tuple[np.ndarray, str]: |         ) -> tuple[np.ndarray, str]: | ||||||
|  | 
 | ||||||
|             nonlocal max_timeout, mean, count |             nonlocal max_timeout, mean, count | ||||||
| 
 | 
 | ||||||
|  |             if ( | ||||||
|  |                 start_dt | ||||||
|  |                 and start_dt.timestamp() == 0 | ||||||
|  |             ): | ||||||
|  |                 await tractor.pause() | ||||||
|  | 
 | ||||||
|             query_start = time.time() |             query_start = time.time() | ||||||
|             out, timedout = await get_bars( |             out, timedout = await get_bars( | ||||||
|                 proxy, |                 proxy, | ||||||
|  | @ -195,24 +208,48 @@ async def open_history_client( | ||||||
|                     f'mean: {mean}' |                     f'mean: {mean}' | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|             if ( |  | ||||||
|                 out is None |  | ||||||
|             ): |  | ||||||
|             # could be trying to retreive bars over weekend |             # could be trying to retreive bars over weekend | ||||||
|  |             if out is None: | ||||||
|                 log.error(f"Can't grab bars starting at {end_dt}!?!?") |                 log.error(f"Can't grab bars starting at {end_dt}!?!?") | ||||||
|                 raise NoData( |  | ||||||
|                     f'{end_dt}', |  | ||||||
|                     # frame_size=2000, |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 if ( |                 if ( | ||||||
|                     end_dt |                     end_dt | ||||||
|                     and head_dt |                     and head_dt | ||||||
|                     and end_dt <= head_dt |                     and end_dt <= head_dt | ||||||
|                 ): |                 ): | ||||||
|                 raise DataUnavailable(f'First timestamp is {head_dt}') |                     raise DataUnavailable( | ||||||
|  |                         f'First timestamp is {head_dt}\n' | ||||||
|  |                         f'But {end_dt} was requested..' | ||||||
|  |                     ) | ||||||
| 
 | 
 | ||||||
|             bars, bars_array, first_dt, last_dt = out |                 else: | ||||||
|  |                     raise NoData( | ||||||
|  |                         info={ | ||||||
|  |                             'fqme': fqme, | ||||||
|  |                             'head_dt': head_dt, | ||||||
|  |                             'start_dt': start_dt, | ||||||
|  |                             'end_dt': end_dt, | ||||||
|  |                             'timedout': timedout, | ||||||
|  |                         }, | ||||||
|  |                     ) | ||||||
|  | 
 | ||||||
|  |             # also see return type for `get_bars()` | ||||||
|  |             bars: ibis.objects.BarDataList | ||||||
|  |             bars_array: np.ndarray | ||||||
|  |             first_dt: datetime | ||||||
|  |             last_dt: datetime | ||||||
|  |             ( | ||||||
|  |                 bars, | ||||||
|  |                 bars_array, | ||||||
|  |                 first_dt, | ||||||
|  |                 last_dt, | ||||||
|  |             ) = out | ||||||
|  | 
 | ||||||
|  |             # TODO: audit the sampling period here as well? | ||||||
|  |             # timestep should always be at least as large as the | ||||||
|  |             # period step. | ||||||
|  |             # tdiff: np.ndarray = np.diff(bars_array['time']) | ||||||
|  |             # if (tdiff < timeframe).any(): | ||||||
|  |             #     await tractor.pause() | ||||||
| 
 | 
 | ||||||
|             # volume cleaning since there's -ve entries, |             # volume cleaning since there's -ve entries, | ||||||
|             # wood luv to know what crookery that is.. |             # wood luv to know what crookery that is.. | ||||||
|  | @ -226,7 +263,18 @@ async def open_history_client( | ||||||
|         # quite sure why.. needs some tinkering and probably |         # quite sure why.. needs some tinkering and probably | ||||||
|         # a lookthrough of the ``ib_insync`` machinery, for eg. maybe |         # a lookthrough of the ``ib_insync`` machinery, for eg. maybe | ||||||
|         # we have to do the batch queries on the `asyncio` side? |         # we have to do the batch queries on the `asyncio` side? | ||||||
|         yield get_hist, {'erlangs': 1, 'rate': 3} |         yield ( | ||||||
|  |             get_hist, | ||||||
|  |             { | ||||||
|  |                 'erlangs': 1,  # max conc reqs | ||||||
|  |                 'rate': 3,  # max req rate | ||||||
|  |                 'frame_types': {  # expected frame sizes | ||||||
|  |                     1: mk_duration(seconds=2e3), | ||||||
|  |                     60: mk_duration(days=2), | ||||||
|  |                 } | ||||||
|  | 
 | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _pacing: str = ( | _pacing: str = ( | ||||||
|  | @ -371,7 +419,11 @@ async def get_bars( | ||||||
| 
 | 
 | ||||||
|         while _failed_resets < max_failed_resets: |         while _failed_resets < max_failed_resets: | ||||||
|             try: |             try: | ||||||
|                 out = await proxy.bars( |                 ( | ||||||
|  |                     bars, | ||||||
|  |                     bars_array, | ||||||
|  |                     dt_duration, | ||||||
|  |                 ) = await proxy.bars( | ||||||
|                     fqme=fqme, |                     fqme=fqme, | ||||||
|                     end_dt=end_dt, |                     end_dt=end_dt, | ||||||
|                     sample_period_s=timeframe, |                     sample_period_s=timeframe, | ||||||
|  | @ -382,44 +434,58 @@ async def get_bars( | ||||||
|                     # current impl) to detect a cancel case. |                     # current impl) to detect a cancel case. | ||||||
|                     # timeout=timeout, |                     # timeout=timeout, | ||||||
|                 ) |                 ) | ||||||
|                 if out is None: |                 # usually either a request during a venue closure | ||||||
|                     raise NoData(f'{end_dt}') |                 # or into a large (weekend) closure gap. | ||||||
| 
 |                 if not bars: | ||||||
|                 bars, bars_array, dt_duration = out |                     # no data returned? | ||||||
|  |                     log.warning( | ||||||
|  |                         'History frame is blank?\n' | ||||||
|  |                         f'start_dt: {start_dt}\n' | ||||||
|  |                         f'end_dt: {end_dt}\n' | ||||||
|  |                         f'duration: {dt_duration}\n' | ||||||
|  |                     ) | ||||||
|  |                     # NOTE: REQUIRED to pass back value.. | ||||||
|  |                     result = None | ||||||
|  |                     return None | ||||||
| 
 | 
 | ||||||
|                 # not enough bars signal, likely due to venue |                 # not enough bars signal, likely due to venue | ||||||
|                 # operational gaps. |                 # operational gaps. | ||||||
|                 too_little: bool = False |                 if end_dt: | ||||||
|                 if ( |                     dur_s: float = len(bars) * timeframe | ||||||
|                     end_dt |                     bars_dur = Duration(seconds=dur_s) | ||||||
|                     and ( |                     dt_dur_s: float = dt_duration.in_seconds() | ||||||
|                         not bars |                     if dur_s < dt_dur_s: | ||||||
|                         or (too_little := |  | ||||||
|                             start_dt |  | ||||||
|                             and (len(bars) * timeframe) |  | ||||||
|                                 < dt_duration.in_seconds() |  | ||||||
|                         ) |  | ||||||
|                     ) |  | ||||||
|                 ): |  | ||||||
|                     if ( |  | ||||||
|                         end_dt |  | ||||||
|                         or too_little |  | ||||||
|                     ): |  | ||||||
|                         log.warning( |                         log.warning( | ||||||
|                             f'History is blank for {dt_duration} from {end_dt}' |                             'History frame is shorter then expected?\n' | ||||||
|  |                             f'start_dt: {start_dt}\n' | ||||||
|  |                             f'end_dt: {end_dt}\n' | ||||||
|  |                             f'duration: {dt_dur_s}\n' | ||||||
|  |                             f'frame duration seconds: {dur_s}\n' | ||||||
|  |                             f'dur diff: {dt_duration - bars_dur}\n' | ||||||
|                         ) |                         ) | ||||||
|                         end_dt -= dt_duration |                         # NOTE: we used to try to get a minimal | ||||||
|                         continue |                         # set of bars by recursing but this ran | ||||||
|  |                         # into possible infinite query loops | ||||||
|  |                         # when logic in the `Client.bars()` dt | ||||||
|  |                         # diffing went bad. So instead for now | ||||||
|  |                         # we just return the | ||||||
|  |                         # shorter-then-expected history with | ||||||
|  |                         # a warning. | ||||||
|  |                         # TODO: in the future it prolly makes | ||||||
|  |                         # the most send to do venue operating | ||||||
|  |                         # hours lookup and | ||||||
|  |                         # timestamp-in-operating-range set | ||||||
|  |                         # checking to know for sure if we can | ||||||
|  |                         # safely and quickly ignore non-uniform history | ||||||
|  |                         # frame timestamp gaps.. | ||||||
|  |                         # end_dt -= dt_duration | ||||||
|  |                         # continue | ||||||
|  |                         # await tractor.pause() | ||||||
| 
 | 
 | ||||||
|                     raise NoData(f'{end_dt}') |                 first_dt = from_timestamp( | ||||||
| 
 |  | ||||||
|                 if bars_array is None: |  | ||||||
|                     raise SymbolNotFound(fqme) |  | ||||||
| 
 |  | ||||||
|                 first_dt = pendulum.from_timestamp( |  | ||||||
|                     bars[0].date.timestamp()) |                     bars[0].date.timestamp()) | ||||||
| 
 | 
 | ||||||
|                 last_dt = pendulum.from_timestamp( |                 last_dt = from_timestamp( | ||||||
|                     bars[-1].date.timestamp()) |                     bars[-1].date.timestamp()) | ||||||
| 
 | 
 | ||||||
|                 time = bars_array['time'] |                 time = bars_array['time'] | ||||||
|  | @ -432,6 +498,7 @@ async def get_bars( | ||||||
|                 if data_cs: |                 if data_cs: | ||||||
|                     data_cs.cancel() |                     data_cs.cancel() | ||||||
| 
 | 
 | ||||||
|  |                 # NOTE: setting this is critical! | ||||||
|                 result = ( |                 result = ( | ||||||
|                     bars,  # ib native |                     bars,  # ib native | ||||||
|                     bars_array,  # numpy |                     bars_array,  # numpy | ||||||
|  | @ -442,6 +509,7 @@ async def get_bars( | ||||||
|                 # signal data reset loop parent task |                 # signal data reset loop parent task | ||||||
|                 result_ready.set() |                 result_ready.set() | ||||||
| 
 | 
 | ||||||
|  |                 # NOTE: this isn't getting collected anywhere! | ||||||
|                 return result |                 return result | ||||||
| 
 | 
 | ||||||
|             except RequestError as err: |             except RequestError as err: | ||||||
|  | @ -467,7 +535,7 @@ async def get_bars( | ||||||
|                     if end_dt is not None: |                     if end_dt is not None: | ||||||
|                         end_dt = end_dt.subtract(days=1) |                         end_dt = end_dt.subtract(days=1) | ||||||
|                     elif end_dt is None: |                     elif end_dt is None: | ||||||
|                         end_dt = pendulum.now().subtract(days=1) |                         end_dt = now().subtract(days=1) | ||||||
| 
 | 
 | ||||||
|                     log.warning( |                     log.warning( | ||||||
|                         f'NO DATA found ending @ {end_dt}\n' |                         f'NO DATA found ending @ {end_dt}\n' | ||||||
|  | @ -603,8 +671,8 @@ async def _setup_quote_stream( | ||||||
|         # making them mostly useless and explains why the scanner |         # making them mostly useless and explains why the scanner | ||||||
|         # is always slow XD |         # is always slow XD | ||||||
|         # '293',  # Trade count for day |         # '293',  # Trade count for day | ||||||
|         '294',  # Trade rate / minute |         # '294',  # Trade rate / minute | ||||||
|         '295',  # Vlm rate / minute |         # '295',  # Vlm rate / minute | ||||||
|     ), |     ), | ||||||
|     contract: Contract | None = None, |     contract: Contract | None = None, | ||||||
| 
 | 
 | ||||||
|  | @ -815,7 +883,10 @@ async def stream_quotes( | ||||||
|     proxy: MethodProxy |     proxy: MethodProxy | ||||||
|     mkt: MktPair |     mkt: MktPair | ||||||
|     details: ibis.ContractDetails |     details: ibis.ContractDetails | ||||||
|     async with open_data_client() as proxy: |     async with ( | ||||||
|  |         open_data_client() as proxy, | ||||||
|  |         # trio.open_nursery() as tn, | ||||||
|  |     ): | ||||||
|         mkt, details = await get_mkt_info( |         mkt, details = await get_mkt_info( | ||||||
|             sym, |             sym, | ||||||
|             proxy=proxy,  # passed to avoid implicit client load |             proxy=proxy,  # passed to avoid implicit client load | ||||||
|  | @ -835,27 +906,50 @@ async def stream_quotes( | ||||||
|         init_msgs.append(init_msg) |         init_msgs.append(init_msg) | ||||||
| 
 | 
 | ||||||
|         con: Contract = details.contract |         con: Contract = details.contract | ||||||
|         first_ticker: Ticker = await proxy.get_quote(contract=con) |         first_ticker: Ticker | None = None | ||||||
|  |         with trio.move_on_after(1): | ||||||
|  |             first_ticker: Ticker = await proxy.get_quote( | ||||||
|  |                 contract=con, | ||||||
|  |                 raise_on_timeout=False, | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         if first_ticker: | ||||||
|             first_quote: dict = normalize(first_ticker) |             first_quote: dict = normalize(first_ticker) | ||||||
| 
 | 
 | ||||||
|         log.warning(f'FIRST QUOTE: {first_quote}') |             # TODO: we need a stack-oriented log levels filters for | ||||||
|  |             # this! | ||||||
|  |             # log.info(message, filter={'stack': 'live_feed'}) ? | ||||||
|  |             log.runtime( | ||||||
|  |                 'Rxed init quote:\n\n' | ||||||
|  |                 f'{pformat(first_quote)}\n' | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         # TODO: we should instead spawn a task that waits on a feed to start |         # NOTE: it might be outside regular trading hours for | ||||||
|         # and let it wait indefinitely..instead of this hard coded stuff. |         # assets with "standard venue operating hours" so we | ||||||
|         with trio.move_on_after(1): |         # only "pretend the feed is live" when the dst asset | ||||||
|             first_ticker = await proxy.get_quote(contract=con) |         # type is NOT within the NON-NORMAL-venue set: aka not | ||||||
| 
 |         # commodities, forex or crypto currencies which CAN | ||||||
|         # it might be outside regular trading hours so see if we can at |         # always return a NaN on a snap quote request during | ||||||
|         # least grab history. |         # normal venue hours. In the case of a closed venue | ||||||
|  |         # (equitiies, futes, bonds etc.) we at least try to | ||||||
|  |         # grab the OHLC history. | ||||||
|         if ( |         if ( | ||||||
|             isnan(first_ticker.last)  # last quote price value is nan |             first_ticker | ||||||
|  |             and | ||||||
|  |             isnan(first_ticker.last) | ||||||
|  |             # SO, if the last quote price value is NaN we ONLY | ||||||
|  |             # "pretend to do" `feed_is_live.set()` if it's a known | ||||||
|  |             # dst asset venue with a lot of closed operating hours. | ||||||
|             and mkt.dst.atype not in { |             and mkt.dst.atype not in { | ||||||
|                 'commodity', |                 'commodity', | ||||||
|                 'fiat', |                 'fiat', | ||||||
|                 'crypto', |                 'crypto', | ||||||
|             } |             } | ||||||
|         ): |         ): | ||||||
|             task_status.started((init_msgs, first_quote)) |             task_status.started(( | ||||||
|  |                 init_msgs, | ||||||
|  |                 first_quote, | ||||||
|  |             )) | ||||||
| 
 | 
 | ||||||
|             # it's not really live but this will unblock |             # it's not really live but this will unblock | ||||||
|             # the brokerd feed task to tell the ui to update? |             # the brokerd feed task to tell the ui to update? | ||||||
|  | @ -865,6 +959,28 @@ async def stream_quotes( | ||||||
|             await trio.sleep_forever() |             await trio.sleep_forever() | ||||||
|             return  # we never expect feed to come up? |             return  # we never expect feed to come up? | ||||||
| 
 | 
 | ||||||
|  |         # TODO: we should instead spawn a task that waits on a feed | ||||||
|  |         # to start and let it wait indefinitely..instead of this | ||||||
|  |         # hard coded stuff. | ||||||
|  |         # async def wait_for_first_quote(): | ||||||
|  |         #     with trio.CancelScope() as cs: | ||||||
|  | 
 | ||||||
|  |         # XXX: MUST acquire a ticker + first quote before starting | ||||||
|  |         # the live quotes loop! | ||||||
|  |         # with trio.move_on_after(1): | ||||||
|  |         first_ticker = await proxy.get_quote( | ||||||
|  |             contract=con, | ||||||
|  |             raise_on_timeout=True, | ||||||
|  |         ) | ||||||
|  |         first_quote: dict = normalize(first_ticker) | ||||||
|  | 
 | ||||||
|  |         # TODO: we need a stack-oriented log levels filters for | ||||||
|  |         # this! | ||||||
|  |         # log.info(message, filter={'stack': 'live_feed'}) ? | ||||||
|  |         log.runtime( | ||||||
|  |             'Rxed init quote:\n' | ||||||
|  |             f'{pformat(first_quote)}' | ||||||
|  |         ) | ||||||
|         cs: trio.CancelScope | None = None |         cs: trio.CancelScope | None = None | ||||||
|         startup: bool = True |         startup: bool = True | ||||||
|         while ( |         while ( | ||||||
|  | @ -885,8 +1001,11 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|                     # only on first entry at feed boot up |                     # only on first entry at feed boot up | ||||||
|                     if startup: |                     if startup: | ||||||
|                         startup = False |                         startup: bool = False | ||||||
|                         task_status.started((init_msgs, first_quote)) |                         task_status.started(( | ||||||
|  |                             init_msgs, | ||||||
|  |                             first_quote, | ||||||
|  |                         )) | ||||||
| 
 | 
 | ||||||
|                     # start a stream restarter task which monitors the |                     # start a stream restarter task which monitors the | ||||||
|                     # data feed event. |                     # data feed event. | ||||||
|  | @ -910,7 +1029,7 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|                             # generally speaking these feeds don't |                             # generally speaking these feeds don't | ||||||
|                             # include vlm data. |                             # include vlm data. | ||||||
|                             atype = mkt.dst.atype |                             atype: str = mkt.dst.atype | ||||||
|                             log.info( |                             log.info( | ||||||
|                                 f'No-vlm {mkt.fqme}@{atype}, skipping quote poll' |                                 f'No-vlm {mkt.fqme}@{atype}, skipping quote poll' | ||||||
|                             ) |                             ) | ||||||
|  | @ -946,7 +1065,8 @@ async def stream_quotes( | ||||||
|                             quote = normalize(ticker) |                             quote = normalize(ticker) | ||||||
|                             log.debug(f"First ticker received {quote}") |                             log.debug(f"First ticker received {quote}") | ||||||
| 
 | 
 | ||||||
|                         # tell caller quotes are now coming in live |                         # tell data-layer spawner-caller that live | ||||||
|  |                         # quotes are now streaming. | ||||||
|                         feed_is_live.set() |                         feed_is_live.set() | ||||||
| 
 | 
 | ||||||
|                         # last = time.time() |                         # last = time.time() | ||||||
|  |  | ||||||
|  | @ -31,9 +31,14 @@ from typing import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| from bidict import bidict | from bidict import bidict | ||||||
| import pendulum | from pendulum import ( | ||||||
| from ib_insync.objects import ( |     DateTime, | ||||||
|  |     parse, | ||||||
|  |     from_timestamp, | ||||||
|  | ) | ||||||
|  | from ib_insync import ( | ||||||
|     Contract, |     Contract, | ||||||
|  |     Commodity, | ||||||
|     Fill, |     Fill, | ||||||
|     Execution, |     Execution, | ||||||
|     CommissionReport, |     CommissionReport, | ||||||
|  | @ -65,10 +70,11 @@ tx_sort: Callable = partial( | ||||||
|     iter_by_dt, |     iter_by_dt, | ||||||
|     parsers={ |     parsers={ | ||||||
|         'dateTime': parse_flex_dt, |         'dateTime': parse_flex_dt, | ||||||
|         'datetime': pendulum.parse, |         'datetime': parse, | ||||||
|         # for some some fucking 2022 and | 
 | ||||||
|         # back options records...fuck me. |         # XXX: for some some fucking 2022 and | ||||||
|         'date': pendulum.parse, |         # back options records.. f@#$ me.. | ||||||
|  |         'date': parse, | ||||||
|     } |     } | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | @ -88,15 +94,38 @@ def norm_trade( | ||||||
| 
 | 
 | ||||||
|     conid: int = str(record.get('conId') or record['conid']) |     conid: int = str(record.get('conId') or record['conid']) | ||||||
|     bs_mktid: str = str(conid) |     bs_mktid: str = str(conid) | ||||||
|     comms = record.get('commission') |  | ||||||
|     if comms is None: |  | ||||||
|         comms = -1*record['ibCommission'] |  | ||||||
| 
 | 
 | ||||||
|     price = record.get('price') or record['tradePrice'] |     # NOTE: sometimes weird records (like BTTX?) | ||||||
|  |     # have no field for this? | ||||||
|  |     comms: float = -1 * ( | ||||||
|  |         record.get('commission') | ||||||
|  |         or record.get('ibCommission') | ||||||
|  |         or 0 | ||||||
|  |     ) | ||||||
|  |     if not comms: | ||||||
|  |         log.warning( | ||||||
|  |             'No commissions found for record?\n' | ||||||
|  |             f'{pformat(record)}\n' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     price: float = ( | ||||||
|  |         record.get('price') | ||||||
|  |         or record.get('tradePrice') | ||||||
|  |     ) | ||||||
|  |     if price is None: | ||||||
|  |         log.warning( | ||||||
|  |             'No `price` field found in record?\n' | ||||||
|  |             'Skipping normalization..\n' | ||||||
|  |             f'{pformat(record)}\n' | ||||||
|  |         ) | ||||||
|  |         return None | ||||||
| 
 | 
 | ||||||
|     # the api doesn't do the -/+ on the quantity for you but flex |     # the api doesn't do the -/+ on the quantity for you but flex | ||||||
|     # records do.. are you fucking serious ib...!? |     # records do.. are you fucking serious ib...!? | ||||||
|     size = record.get('quantity') or record['shares'] * { |     size: float|int = ( | ||||||
|  |         record.get('quantity') | ||||||
|  |         or record['shares'] | ||||||
|  |     ) * { | ||||||
|         'BOT': 1, |         'BOT': 1, | ||||||
|         'SLD': -1, |         'SLD': -1, | ||||||
|     }[record['side']] |     }[record['side']] | ||||||
|  | @ -127,26 +156,31 @@ def norm_trade( | ||||||
|         # otype = tail[6] |         # otype = tail[6] | ||||||
|         # strike = tail[7:] |         # strike = tail[7:] | ||||||
| 
 | 
 | ||||||
|         print(f'skipping opts contract {symbol}') |         log.warning( | ||||||
|  |             f'Skipping option contract -> NO SUPPORT YET!\n' | ||||||
|  |             f'{symbol}\n' | ||||||
|  |         ) | ||||||
|         return None |         return None | ||||||
| 
 | 
 | ||||||
|     # timestamping is way different in API records |     # timestamping is way different in API records | ||||||
|     dtstr = record.get('datetime') |     dtstr: str = record.get('datetime') | ||||||
|     date = record.get('date') |     date: str = record.get('date') | ||||||
|     flex_dtstr = record.get('dateTime') |     flex_dtstr: str = record.get('dateTime') | ||||||
| 
 | 
 | ||||||
|     if dtstr or date: |     if dtstr or date: | ||||||
|         dt = pendulum.parse(dtstr or date) |         dt: DateTime = parse(dtstr or date) | ||||||
| 
 | 
 | ||||||
|     elif flex_dtstr: |     elif flex_dtstr: | ||||||
|         # probably a flex record with a wonky non-std timestamp.. |         # probably a flex record with a wonky non-std timestamp.. | ||||||
|         dt = parse_flex_dt(record['dateTime']) |         dt: DateTime = parse_flex_dt(record['dateTime']) | ||||||
| 
 | 
 | ||||||
|     # special handling of symbol extraction from |     # special handling of symbol extraction from | ||||||
|     # flex records using some ad-hoc schema parsing. |     # flex records using some ad-hoc schema parsing. | ||||||
|     asset_type: str = record.get( |     asset_type: str = ( | ||||||
|         'assetCategory' |         record.get('assetCategory') | ||||||
|     ) or record.get('secType', 'STK') |         or record.get('secType') | ||||||
|  |         or 'STK' | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     if (expiry := ( |     if (expiry := ( | ||||||
|             record.get('lastTradeDateOrContractMonth') |             record.get('lastTradeDateOrContractMonth') | ||||||
|  | @ -237,6 +271,21 @@ def norm_trade( | ||||||
|                 name=symbol.lower(), |                 name=symbol.lower(), | ||||||
|                 atype='option', |                 atype='option', | ||||||
|                 tx_tick=Decimal('1'), |                 tx_tick=Decimal('1'), | ||||||
|  | 
 | ||||||
|  |                 # TODO: we should probably always cast to the | ||||||
|  |                 # `Contract` instance then dict-serialize that for | ||||||
|  |                 # the `.info` field! | ||||||
|  |                 # info=asdict(Option()), | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         case 'CMDTY': | ||||||
|  |             from .symbols import _adhoc_symbol_map | ||||||
|  |             con_kwargs, _ = _adhoc_symbol_map[symbol.upper()] | ||||||
|  |             dst = Asset( | ||||||
|  |                 name=symbol.lower(), | ||||||
|  |                 atype='commodity', | ||||||
|  |                 tx_tick=Decimal('1'), | ||||||
|  |                 info=asdict(Commodity(**con_kwargs)), | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|     # try to build out piker fqme from record. |     # try to build out piker fqme from record. | ||||||
|  | @ -341,6 +390,7 @@ def norm_trade_records( | ||||||
|         if txn is None: |         if txn is None: | ||||||
|             continue |             continue | ||||||
| 
 | 
 | ||||||
|  |         # inject txns sorted by datetime | ||||||
|         insort( |         insort( | ||||||
|             records, |             records, | ||||||
|             txn, |             txn, | ||||||
|  | @ -389,7 +439,7 @@ def api_trades_to_ledger_entries( | ||||||
|                     txn_dict[attr_name] = val |                     txn_dict[attr_name] = val | ||||||
| 
 | 
 | ||||||
|         tid = str(txn_dict['execId']) |         tid = str(txn_dict['execId']) | ||||||
|         dt = pendulum.from_timestamp(txn_dict['time']) |         dt = from_timestamp(txn_dict['time']) | ||||||
|         txn_dict['datetime'] = str(dt) |         txn_dict['datetime'] = str(dt) | ||||||
|         acctid = accounts[txn_dict['acctNumber']] |         acctid = accounts[txn_dict['acctNumber']] | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -29,7 +29,7 @@ from typing import ( | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| from fuzzywuzzy import process as fuzzy | from rapidfuzz import process as fuzzy | ||||||
| import ib_insync as ibis | import ib_insync as ibis | ||||||
| import tractor | import tractor | ||||||
| import trio | import trio | ||||||
|  | @ -165,6 +165,7 @@ _exch_skip_list = { | ||||||
|     'MEXI',  # mexican stocks |     'MEXI',  # mexican stocks | ||||||
| 
 | 
 | ||||||
|     # no idea |     # no idea | ||||||
|  |     'NSE', | ||||||
|     'VALUE', |     'VALUE', | ||||||
|     'FUNDSERV', |     'FUNDSERV', | ||||||
|     'SWB2', |     'SWB2', | ||||||
|  | @ -208,12 +209,15 @@ async def open_symbol_search(ctx: tractor.Context) -> None: | ||||||
|                 break |                 break | ||||||
| 
 | 
 | ||||||
|             ib_client = proxy._aio_ns.ib |             ib_client = proxy._aio_ns.ib | ||||||
|             log.info(f'Using {ib_client} for symbol search') |             log.info( | ||||||
|  |                 f'Using API client for symbol-search\n' | ||||||
|  |                 f'{ib_client}\n' | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             last = time.time() |             last = time.time() | ||||||
|             async for pattern in stream: |             async for pattern in stream: | ||||||
|                 log.info(f'received {pattern}') |                 log.info(f'received {pattern}') | ||||||
|                 now = time.time() |                 now: float = time.time() | ||||||
| 
 | 
 | ||||||
|                 # this causes tractor hang... |                 # this causes tractor hang... | ||||||
|                 # assert 0 |                 # assert 0 | ||||||
|  | @ -260,7 +264,9 @@ async def open_symbol_search(ctx: tractor.Context) -> None: | ||||||
|                 # defined adhoc symbol set. |                 # defined adhoc symbol set. | ||||||
|                 stock_results = [] |                 stock_results = [] | ||||||
| 
 | 
 | ||||||
|                 async def stash_results(target: Awaitable[list]): |                 async def extend_results( | ||||||
|  |                     target: Awaitable[list] | ||||||
|  |                 ) -> None: | ||||||
|                     try: |                     try: | ||||||
|                         results = await target |                         results = await target | ||||||
|                     except tractor.trionics.Lagged: |                     except tractor.trionics.Lagged: | ||||||
|  | @ -269,11 +275,11 @@ async def open_symbol_search(ctx: tractor.Context) -> None: | ||||||
| 
 | 
 | ||||||
|                     stock_results.extend(results) |                     stock_results.extend(results) | ||||||
| 
 | 
 | ||||||
|                 for i in range(10): |                 for _ in range(10): | ||||||
|                     with trio.move_on_after(3) as cs: |                     with trio.move_on_after(3) as cs: | ||||||
|                         async with trio.open_nursery() as sn: |                         async with trio.open_nursery() as sn: | ||||||
|                             sn.start_soon( |                             sn.start_soon( | ||||||
|                                 stash_results, |                                 extend_results, | ||||||
|                                 proxy.search_symbols( |                                 proxy.search_symbols( | ||||||
|                                     pattern=pattern, |                                     pattern=pattern, | ||||||
|                                     upto=5, |                                     upto=5, | ||||||
|  | @ -288,11 +294,13 @@ async def open_symbol_search(ctx: tractor.Context) -> None: | ||||||
|                             f'Search timeout? {proxy._aio_ns.ib.client}' |                             f'Search timeout? {proxy._aio_ns.ib.client}' | ||||||
|                         ) |                         ) | ||||||
|                         continue |                         continue | ||||||
|                     else: |                     elif stock_results: | ||||||
|                         break |                         break | ||||||
|  |                     # else: | ||||||
|  |                     # await tractor.pause() | ||||||
| 
 | 
 | ||||||
|                     # # match against our ad-hoc set immediately |                     # # match against our ad-hoc set immediately | ||||||
|                     # adhoc_matches = fuzzy.extractBests( |                     # adhoc_matches = fuzzy.extract( | ||||||
|                     #     pattern, |                     #     pattern, | ||||||
|                     #     list(_adhoc_futes_set), |                     #     list(_adhoc_futes_set), | ||||||
|                     #     score_cutoff=90, |                     #     score_cutoff=90, | ||||||
|  | @ -305,7 +313,7 @@ async def open_symbol_search(ctx: tractor.Context) -> None: | ||||||
|                     #     adhoc_matches} |                     #     adhoc_matches} | ||||||
| 
 | 
 | ||||||
|                 log.debug(f'fuzzy matching stocks {stock_results}') |                 log.debug(f'fuzzy matching stocks {stock_results}') | ||||||
|                 stock_matches = fuzzy.extractBests( |                 stock_matches = fuzzy.extract( | ||||||
|                     pattern, |                     pattern, | ||||||
|                     stock_results, |                     stock_results, | ||||||
|                     score_cutoff=50, |                     score_cutoff=50, | ||||||
|  | @ -423,9 +431,9 @@ def con2fqme( | ||||||
|         except KeyError: |         except KeyError: | ||||||
|             pass |             pass | ||||||
| 
 | 
 | ||||||
|     suffix = con.primaryExchange or con.exchange |     suffix: str = con.primaryExchange or con.exchange | ||||||
|     symbol = con.symbol |     symbol: str = con.symbol | ||||||
|     expiry = con.lastTradeDateOrContractMonth or '' |     expiry: str = con.lastTradeDateOrContractMonth or '' | ||||||
| 
 | 
 | ||||||
|     match con: |     match con: | ||||||
|         case ibis.Option(): |         case ibis.Option(): | ||||||
|  | @ -517,7 +525,21 @@ async def get_mkt_info( | ||||||
|         venue = con.primaryExchange or con.exchange |         venue = con.primaryExchange or con.exchange | ||||||
| 
 | 
 | ||||||
|     price_tick: Decimal = Decimal(str(details.minTick)) |     price_tick: Decimal = Decimal(str(details.minTick)) | ||||||
|     # price_tick: Decimal = Decimal('0.01') |     ib_min_tick_gt_2: Decimal = Decimal('0.01') | ||||||
|  |     if ( | ||||||
|  |         price_tick < ib_min_tick_gt_2 | ||||||
|  |     ): | ||||||
|  |         # TODO: we need to add some kinda dynamic rounding sys | ||||||
|  |         # to our MktPair i guess? | ||||||
|  |         # not sure where the logic should sit, but likely inside | ||||||
|  |         # the `.clearing._ems` i suppose... | ||||||
|  |         log.warning( | ||||||
|  |             'IB seems to disallow a min price tick < 0.01 ' | ||||||
|  |             'when the price is > 2.0..?\n' | ||||||
|  |             f'Decreasing min tick precision for {fqme} to 0.01' | ||||||
|  |         ) | ||||||
|  |         # price_tick = ib_min_tick | ||||||
|  |         # await tractor.pause() | ||||||
| 
 | 
 | ||||||
|     if atype == 'stock': |     if atype == 'stock': | ||||||
|         # XXX: GRRRR they don't support fractional share sizes for |         # XXX: GRRRR they don't support fractional share sizes for | ||||||
|  |  | ||||||
|  | @ -27,9 +27,8 @@ from typing import ( | ||||||
| ) | ) | ||||||
| import time | import time | ||||||
| 
 | 
 | ||||||
|  | import httpx | ||||||
| import pendulum | import pendulum | ||||||
| import asks |  | ||||||
| from fuzzywuzzy import process as fuzzy |  | ||||||
| import numpy as np | import numpy as np | ||||||
| import urllib.parse | import urllib.parse | ||||||
| import hashlib | import hashlib | ||||||
|  | @ -38,7 +37,10 @@ import base64 | ||||||
| import trio | import trio | ||||||
| 
 | 
 | ||||||
| from piker import config | from piker import config | ||||||
| from piker.data import def_iohlcv_fields | from piker.data import ( | ||||||
|  |     def_iohlcv_fields, | ||||||
|  |     match_from_pairs, | ||||||
|  | ) | ||||||
| from piker.accounting._mktinfo import ( | from piker.accounting._mktinfo import ( | ||||||
|     Asset, |     Asset, | ||||||
|     digits_to_dec, |     digits_to_dec, | ||||||
|  | @ -58,6 +60,11 @@ log = get_logger('piker.brokers.kraken') | ||||||
| 
 | 
 | ||||||
| # <uri>/<version>/ | # <uri>/<version>/ | ||||||
| _url = 'https://api.kraken.com/0' | _url = 'https://api.kraken.com/0' | ||||||
|  | 
 | ||||||
|  | _headers: dict[str, str] = { | ||||||
|  |     'User-Agent': 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)' | ||||||
|  | } | ||||||
|  | 
 | ||||||
| # TODO: this is the only backend providing this right? | # TODO: this is the only backend providing this right? | ||||||
| # in which case we should drop it from the defaults and | # in which case we should drop it from the defaults and | ||||||
| # instead make a custom fields descr in this module! | # instead make a custom fields descr in this module! | ||||||
|  | @ -68,12 +75,18 @@ _symbol_info_translation: dict[str, str] = { | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_config() -> dict[str, Any]: | def get_config() -> dict[str, Any]: | ||||||
|  |     ''' | ||||||
|  |     Load our section from `piker/brokers.toml`. | ||||||
| 
 | 
 | ||||||
|     conf, path = config.load() |     ''' | ||||||
|     section = conf.get('kraken') |     conf, path = config.load( | ||||||
| 
 |         conf_name='brokers', | ||||||
|     if section is None: |         touch_if_dne=True, | ||||||
|         log.warning(f'No config section found for kraken in {path}') |     ) | ||||||
|  |     if (section := conf.get('kraken')) is None: | ||||||
|  |         log.warning( | ||||||
|  |             f'No config section found for kraken in {path}' | ||||||
|  |         ) | ||||||
|         return {} |         return {} | ||||||
| 
 | 
 | ||||||
|     return section |     return section | ||||||
|  | @ -106,16 +119,19 @@ class InvalidKey(ValueError): | ||||||
| 
 | 
 | ||||||
| class Client: | class Client: | ||||||
| 
 | 
 | ||||||
|     # symbol mapping from all names to the altname |     # assets and mkt pairs are key-ed by kraken's ReST response | ||||||
|     _altnames: dict[str, str] = {} |     # symbol-bs_mktids (we call them "X-keys" like fricking | ||||||
| 
 |     # "XXMRZEUR"). these keys used directly since ledger endpoints | ||||||
|     # key-ed by kraken's own bs_mktids (like fricking "XXMRZEUR") |     # return transaction sets keyed with the same set! | ||||||
|     # with said keys used directly from EP responses so that ledger |  | ||||||
|     # parsing can be easily accomplished from both trade-event-msgs |  | ||||||
|     # and offline toml files |  | ||||||
|     _Assets: dict[str, Asset] = {} |     _Assets: dict[str, Asset] = {} | ||||||
|     _AssetPairs: dict[str, Pair] = {} |     _AssetPairs: dict[str, Pair] = {} | ||||||
| 
 | 
 | ||||||
|  |     # offer lookup tables for all .altname and .wsname | ||||||
|  |     # to the equivalent .xname so that various symbol-schemas | ||||||
|  |     # can be mapped to `Pair`s in the tables above. | ||||||
|  |     _altnames: dict[str, str] = {} | ||||||
|  |     _wsnames: dict[str, str] = {} | ||||||
|  | 
 | ||||||
|     # key-ed by `Pair.bs_fqme: str`, and thus used for search |     # key-ed by `Pair.bs_fqme: str`, and thus used for search | ||||||
|     # allowing for lookup using piker's own FQME symbology sys. |     # allowing for lookup using piker's own FQME symbology sys. | ||||||
|     _pairs: dict[str, Pair] = {} |     _pairs: dict[str, Pair] = {} | ||||||
|  | @ -124,16 +140,15 @@ class Client: | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         config: dict[str, str], |         config: dict[str, str], | ||||||
|  |         httpx_client: httpx.AsyncClient, | ||||||
|  | 
 | ||||||
|         name: str = '', |         name: str = '', | ||||||
|         api_key: str = '', |         api_key: str = '', | ||||||
|         secret: str = '' |         secret: str = '' | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         self._sesh = asks.Session(connections=4) | 
 | ||||||
|         self._sesh.base_location = _url |         self._sesh: httpx.AsyncClient = httpx_client | ||||||
|         self._sesh.headers.update({ | 
 | ||||||
|             'User-Agent': |  | ||||||
|                 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)' |  | ||||||
|         }) |  | ||||||
|         self._name = name |         self._name = name | ||||||
|         self._api_key = api_key |         self._api_key = api_key | ||||||
|         self._secret = secret |         self._secret = secret | ||||||
|  | @ -155,10 +170,9 @@ class Client: | ||||||
|         method: str, |         method: str, | ||||||
|         data: dict, |         data: dict, | ||||||
|     ) -> dict[str, Any]: |     ) -> dict[str, Any]: | ||||||
|         resp = await self._sesh.post( |         resp: httpx.Response = await self._sesh.post( | ||||||
|             path=f'/public/{method}', |             url=f'/public/{method}', | ||||||
|             json=data, |             json=data, | ||||||
|             timeout=float('inf') |  | ||||||
|         ) |         ) | ||||||
|         return resproc(resp, log) |         return resproc(resp, log) | ||||||
| 
 | 
 | ||||||
|  | @ -169,18 +183,18 @@ class Client: | ||||||
|         uri_path: str |         uri_path: str | ||||||
|     ) -> dict[str, Any]: |     ) -> dict[str, Any]: | ||||||
|         headers = { |         headers = { | ||||||
|             'Content-Type': |             'Content-Type': 'application/x-www-form-urlencoded', | ||||||
|                 'application/x-www-form-urlencoded', |             'API-Key': self._api_key, | ||||||
|             'API-Key': |             'API-Sign': get_kraken_signature( | ||||||
|                 self._api_key, |                 uri_path, | ||||||
|             'API-Sign': |                 data, | ||||||
|                 get_kraken_signature(uri_path, data, self._secret) |                 self._secret, | ||||||
|  |             ), | ||||||
|         } |         } | ||||||
|         resp = await self._sesh.post( |         resp: httpx.Response = await self._sesh.post( | ||||||
|             path=f'/private/{method}', |             url=f'/private/{method}', | ||||||
|             data=data, |             data=data, | ||||||
|             headers=headers, |             headers=headers, | ||||||
|             timeout=float('inf') |  | ||||||
|         ) |         ) | ||||||
|         return resproc(resp, log) |         return resproc(resp, log) | ||||||
| 
 | 
 | ||||||
|  | @ -209,8 +223,8 @@ class Client: | ||||||
|         by_bsmktid: dict[str, dict] = resp['result'] |         by_bsmktid: dict[str, dict] = resp['result'] | ||||||
| 
 | 
 | ||||||
|         balances: dict = {} |         balances: dict = {} | ||||||
|         for respname, bal in by_bsmktid.items(): |         for xname, bal in by_bsmktid.items(): | ||||||
|             asset: Asset = self._Assets[respname] |             asset: Asset = self._Assets[xname] | ||||||
| 
 | 
 | ||||||
|             # TODO: which KEY should we use? it's used to index |             # TODO: which KEY should we use? it's used to index | ||||||
|             # the `Account.pps: dict` .. |             # the `Account.pps: dict` .. | ||||||
|  | @ -367,7 +381,6 @@ class Client: | ||||||
|             asset_key: str = entry['asset'] |             asset_key: str = entry['asset'] | ||||||
|             asset: Asset = self._Assets[asset_key] |             asset: Asset = self._Assets[asset_key] | ||||||
|             asset_key: str = asset.name.lower() |             asset_key: str = asset.name.lower() | ||||||
|             # asset_key: str = self._altnames[asset_key].lower() |  | ||||||
| 
 | 
 | ||||||
|             # XXX: this is in the asset units (likely) so it isn't |             # XXX: this is in the asset units (likely) so it isn't | ||||||
|             # quite the same as a commisions cost necessarily..) |             # quite the same as a commisions cost necessarily..) | ||||||
|  | @ -473,25 +486,31 @@ class Client: | ||||||
|             if err: |             if err: | ||||||
|                 raise SymbolNotFound(pair_patt) |                 raise SymbolNotFound(pair_patt) | ||||||
| 
 | 
 | ||||||
|             # NOTE: we key pairs by our custom defined `.bs_fqme` |             # NOTE: we try to key pairs by our custom defined | ||||||
|             # field since we want to offer search over this key |             # `.bs_fqme` field since we want to offer search over | ||||||
|             # set, callers should fill out lookup tables for |             # this pattern set, callers should fill out lookup | ||||||
|             # kraken's bs_mktid keys to map to these keys! |             # tables for kraken's bs_mktid keys to map to these | ||||||
|             for key, data in resp['result'].items(): |             # keys! | ||||||
|                 pair = Pair(respname=key, **data) |             # XXX: FURTHER kraken's data eng team decided to offer | ||||||
|  |             # 3 frickin market-pair-symbol key sets depending on | ||||||
|  |             # which frickin API is being used. | ||||||
|  |             # Example for the trading pair 'LTC<EUR' | ||||||
|  |             # - the "X-key" from rest eps 'XLTCZEUR' | ||||||
|  |             # - the "websocket key" from ws msgs is 'LTC/EUR' | ||||||
|  |             # - the "altname key" also delivered in pair info is 'LTCEUR' | ||||||
|  |             for xkey, data in resp['result'].items(): | ||||||
| 
 | 
 | ||||||
|                 # always cache so we can possibly do faster lookup |                 # NOTE: always cache in pairs tables for faster lookup | ||||||
|                 self._AssetPairs[key] = pair |                 pair = Pair(xname=xkey, **data) | ||||||
| 
 | 
 | ||||||
|                 bs_fqme: str = pair.bs_fqme |                 # register the above `Pair` structs for all | ||||||
| 
 |                 # key-sets/monikers: a set of 4 (frickin) tables | ||||||
|                 self._pairs[bs_fqme] = pair |                 # acting as a combined surjection of all possible | ||||||
| 
 |                 # (and stupid) kraken names to their `Pair` obj. | ||||||
|                 # register the piker pair under all monikers, a giant flat |                 self._AssetPairs[xkey] = pair | ||||||
|                 # surjection of all possible (and stupid) kraken names to |                 self._pairs[pair.bs_fqme] = pair | ||||||
|                 # the FMQE style piker key. |                 self._altnames[pair.altname] = pair | ||||||
|                 self._altnames[pair.altname] = bs_fqme |                 self._wsnames[pair.wsname] = pair | ||||||
|                 self._altnames[pair.wsname] = bs_fqme |  | ||||||
| 
 | 
 | ||||||
|         if pair_patt is not None: |         if pair_patt is not None: | ||||||
|             return next(iter(self._pairs.items()))[1] |             return next(iter(self._pairs.items()))[1] | ||||||
|  | @ -506,12 +525,13 @@ class Client: | ||||||
|         Load all market pair info build and cache it for downstream |         Load all market pair info build and cache it for downstream | ||||||
|         use. |         use. | ||||||
| 
 | 
 | ||||||
|         An ``._altnames: dict[str, str]`` is available for looking |         Multiple pair info lookup tables (like ``._altnames: | ||||||
|         up the piker-native FQME style `Pair.bs_fqme: str` for any |         dict[str, str]``) are created for looking up the | ||||||
|         input of the three (yes, it's that idiotic) available |         piker-native `Pair`-struct from any input of the three | ||||||
|         key-sets that kraken frickin offers depending on the API |         (yes, it's that idiotic..) available symbol/pair-key-sets | ||||||
|         including the .altname, .wsname and the weird ass default |         that kraken frickin offers depending on the API including | ||||||
|         set they return in rest responses.. |         the .altname, .wsname and the weird ass default set they | ||||||
|  |         return in ReST responses .xname.. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         if ( |         if ( | ||||||
|  | @ -539,13 +559,17 @@ class Client: | ||||||
|             await self.get_mkt_pairs() |             await self.get_mkt_pairs() | ||||||
|             assert self._pairs, '`Client.get_mkt_pairs()` was never called!?' |             assert self._pairs, '`Client.get_mkt_pairs()` was never called!?' | ||||||
| 
 | 
 | ||||||
|         matches = fuzzy.extractBests( |         matches: dict[str, Pair] = match_from_pairs( | ||||||
|             pattern, |             pairs=self._pairs, | ||||||
|             self._pairs, |             query=pattern.upper(), | ||||||
|             score_cutoff=50, |             score_cutoff=50, | ||||||
|         ) |         ) | ||||||
|         # repack in dict form | 
 | ||||||
|         return {item[0].altname: item[0] for item in matches} |         # repack in .altname-keyed output table | ||||||
|  |         return { | ||||||
|  |             pair.altname: pair | ||||||
|  |             for pair in matches.values() | ||||||
|  |         } | ||||||
| 
 | 
 | ||||||
|     async def bars( |     async def bars( | ||||||
|         self, |         self, | ||||||
|  | @ -628,7 +652,7 @@ class Client: | ||||||
|     def to_bs_fqme( |     def to_bs_fqme( | ||||||
|         cls, |         cls, | ||||||
|         pair_str: str |         pair_str: str | ||||||
|     ) -> tuple[str, Pair]: |     ) -> str: | ||||||
|         ''' |         ''' | ||||||
|         Normalize symbol names to to a 3x3 pair from the global |         Normalize symbol names to to a 3x3 pair from the global | ||||||
|         definition map which we build out from the data retreived from |         definition map which we build out from the data retreived from | ||||||
|  | @ -636,7 +660,7 @@ class Client: | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         try: |         try: | ||||||
|             return cls._altnames[pair_str.upper()] |             return cls._altnames[pair_str.upper()].bs_fqme | ||||||
|         except KeyError as ke: |         except KeyError as ke: | ||||||
|             raise SymbolNotFound(f'kraken has no {ke.args[0]}') |             raise SymbolNotFound(f'kraken has no {ke.args[0]}') | ||||||
| 
 | 
 | ||||||
|  | @ -644,10 +668,19 @@ class Client: | ||||||
| @acm | @acm | ||||||
| async def get_client() -> Client: | async def get_client() -> Client: | ||||||
| 
 | 
 | ||||||
|     conf = get_config() |     conf: dict[str, Any] = get_config() | ||||||
|  |     async with httpx.AsyncClient( | ||||||
|  |         base_url=_url, | ||||||
|  |         headers=_headers, | ||||||
|  | 
 | ||||||
|  |         # TODO: is there a way to numerate this? | ||||||
|  |         # https://www.python-httpx.org/advanced/clients/#why-use-a-client | ||||||
|  |         # connections=4 | ||||||
|  |     ) as trio_client: | ||||||
|         if conf: |         if conf: | ||||||
|             client = Client( |             client = Client( | ||||||
|                 conf, |                 conf, | ||||||
|  |                 httpx_client=trio_client, | ||||||
| 
 | 
 | ||||||
|                 # TODO: don't break these up and just do internal |                 # TODO: don't break these up and just do internal | ||||||
|                 # conf lookups instead.. |                 # conf lookups instead.. | ||||||
|  | @ -656,7 +689,10 @@ async def get_client() -> Client: | ||||||
|                 secret=conf['secret'] |                 secret=conf['secret'] | ||||||
|             ) |             ) | ||||||
|         else: |         else: | ||||||
|         client = Client({}) |             client = Client( | ||||||
|  |                 conf={}, | ||||||
|  |                 httpx_client=trio_client, | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         # at startup, load all symbols, and asset info in |         # at startup, load all symbols, and asset info in | ||||||
|         # batch requests. |         # batch requests. | ||||||
|  |  | ||||||
|  | @ -407,7 +407,7 @@ def trades2pps( | ||||||
|                 # included? |                 # included? | ||||||
|                 account='kraken.' + acctid, |                 account='kraken.' + acctid, | ||||||
|                 symbol=p.mkt.fqme, |                 symbol=p.mkt.fqme, | ||||||
|                 size=p.size, |                 size=p.cumsize, | ||||||
|                 avg_price=p.ppu, |                 avg_price=p.ppu, | ||||||
|                 currency='', |                 currency='', | ||||||
|             ) |             ) | ||||||
|  | @ -513,6 +513,7 @@ async def open_trade_dialog( | ||||||
|             ledger_trans: dict[str, Transaction] = await norm_trade_records( |             ledger_trans: dict[str, Transaction] = await norm_trade_records( | ||||||
|                 ledger, |                 ledger, | ||||||
|                 client, |                 client, | ||||||
|  |                 api_name_set='xname', | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             if not acnt.pps: |             if not acnt.pps: | ||||||
|  | @ -534,6 +535,7 @@ async def open_trade_dialog( | ||||||
|             api_trans: dict[str, Transaction] = await norm_trade_records( |             api_trans: dict[str, Transaction] = await norm_trade_records( | ||||||
|                 tids2trades, |                 tids2trades, | ||||||
|                 client, |                 client, | ||||||
|  |                 api_name_set='xname', | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             # retrieve kraken reported balances |             # retrieve kraken reported balances | ||||||
|  | @ -610,18 +612,18 @@ async def open_trade_dialog( | ||||||
| 
 | 
 | ||||||
|                 # enter relay loop |                 # enter relay loop | ||||||
|                 await handle_order_updates( |                 await handle_order_updates( | ||||||
|                     client, |                     client=client, | ||||||
|                     ws, |                     ws=ws, | ||||||
|                     stream, |                     ws_stream=stream, | ||||||
|                     ems_stream, |                     ems_stream=ems_stream, | ||||||
|                     apiflows, |                     apiflows=apiflows, | ||||||
|                     ids, |                     ids=ids, | ||||||
|                     reqids2txids, |                     reqids2txids=reqids2txids, | ||||||
|                     acnt, |                     acnt=acnt, | ||||||
|                     api_trans, |                     ledger=ledger, | ||||||
|                     acctid, |                     acctid=acctid, | ||||||
|                     acc_name, |                     acc_name=acc_name, | ||||||
|                     token, |                     token=token, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -637,7 +639,8 @@ async def handle_order_updates( | ||||||
| 
 | 
 | ||||||
|     # transaction records which will be updated |     # transaction records which will be updated | ||||||
|     # on new trade clearing events (aka order "fills") |     # on new trade clearing events (aka order "fills") | ||||||
|     ledger_trans: dict[str, Transaction], |     ledger: TransactionLedger, | ||||||
|  |     # ledger_trans: dict[str, Transaction], | ||||||
|     acctid: str, |     acctid: str, | ||||||
|     acc_name: str, |     acc_name: str, | ||||||
|     token: str, |     token: str, | ||||||
|  | @ -697,7 +700,8 @@ async def handle_order_updates( | ||||||
|                     # if tid not in ledger_trans |                     # if tid not in ledger_trans | ||||||
|                 } |                 } | ||||||
|                 for tid, trade in trades.items(): |                 for tid, trade in trades.items(): | ||||||
|                     assert tid not in ledger_trans |                     # assert tid not in ledger_trans | ||||||
|  |                     assert tid not in ledger | ||||||
|                     txid = trade['ordertxid'] |                     txid = trade['ordertxid'] | ||||||
|                     reqid = trade.get('userref') |                     reqid = trade.get('userref') | ||||||
| 
 | 
 | ||||||
|  | @ -743,12 +747,19 @@ async def handle_order_updates( | ||||||
|                 new_trans = await norm_trade_records( |                 new_trans = await norm_trade_records( | ||||||
|                     trades, |                     trades, | ||||||
|                     client, |                     client, | ||||||
|  |                     api_name_set='wsname', | ||||||
|                 ) |                 ) | ||||||
|                 ppmsgs = trades2pps( |                 ppmsgs: list[BrokerdPosition] = trades2pps( | ||||||
|                     acnt, |                     acnt=acnt, | ||||||
|                     acctid, |                     ledger=ledger, | ||||||
|                     new_trans, |                     acctid=acctid, | ||||||
|  |                     new_trans=new_trans, | ||||||
|                 ) |                 ) | ||||||
|  |                 # ppmsgs = trades2pps( | ||||||
|  |                 #     acnt, | ||||||
|  |                 #     acctid, | ||||||
|  |                 #     new_trans, | ||||||
|  |                 # ) | ||||||
|                 for pp_msg in ppmsgs: |                 for pp_msg in ppmsgs: | ||||||
|                     await ems_stream.send(pp_msg) |                     await ems_stream.send(pp_msg) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -64,9 +64,19 @@ def norm_trade( | ||||||
|         'sell': -1, |         'sell': -1, | ||||||
|     }[record['type']] |     }[record['type']] | ||||||
| 
 | 
 | ||||||
|     rest_pair_key: str = record['pair'] |     # NOTE: this value may be either the websocket OR the rest schema | ||||||
|     pair: Pair = pairs[rest_pair_key] |     # so we need to detect the key format and then choose the | ||||||
|  |     # correct symbol lookup table to evetually get a ``Pair``.. | ||||||
|  |     # See internals of `Client.asset_pairs()` for deats! | ||||||
|  |     src_pair_key: str = record['pair'] | ||||||
| 
 | 
 | ||||||
|  |     # XXX: kraken's data engineering is soo bad they require THREE | ||||||
|  |     # different pair schemas (more or less seemingly tied to | ||||||
|  |     # transport-APIs)..LITERALLY they return different market id | ||||||
|  |     # pairs in the ledger endpoints vs. the websocket event subs.. | ||||||
|  |     # lookup pair using appropriately provided tabled depending | ||||||
|  |     # on API-key-schema.. | ||||||
|  |     pair: Pair = pairs[src_pair_key] | ||||||
|     fqme: str = pair.bs_fqme.lower() + '.kraken' |     fqme: str = pair.bs_fqme.lower() + '.kraken' | ||||||
| 
 | 
 | ||||||
|     return Transaction( |     return Transaction( | ||||||
|  | @ -83,6 +93,7 @@ def norm_trade( | ||||||
| async def norm_trade_records( | async def norm_trade_records( | ||||||
|     ledger: dict[str, Any], |     ledger: dict[str, Any], | ||||||
|     client: Client, |     client: Client, | ||||||
|  |     api_name_set: str = 'xname', | ||||||
| 
 | 
 | ||||||
| ) -> dict[str, Transaction]: | ) -> dict[str, Transaction]: | ||||||
|     ''' |     ''' | ||||||
|  | @ -97,11 +108,16 @@ async def norm_trade_records( | ||||||
|         # mkt: MktPair = (await get_mkt_info(manual_fqme))[0] |         # mkt: MktPair = (await get_mkt_info(manual_fqme))[0] | ||||||
|         # fqme: str = mkt.fqme |         # fqme: str = mkt.fqme | ||||||
|         # assert fqme == manual_fqme |         # assert fqme == manual_fqme | ||||||
|  |         pairs: dict[str, Pair] = { | ||||||
|  |             'xname': client._AssetPairs, | ||||||
|  |             'wsname': client._wsnames, | ||||||
|  |             'altname': client._altnames, | ||||||
|  |         }[api_name_set] | ||||||
| 
 | 
 | ||||||
|         records[tid] = norm_trade( |         records[tid] = norm_trade( | ||||||
|             tid, |             tid, | ||||||
|             record, |             record, | ||||||
|             pairs=client._AssetPairs, |             pairs=pairs, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     return records |     return records | ||||||
|  |  | ||||||
|  | @ -21,7 +21,7 @@ Symbology defs and search. | ||||||
| from decimal import Decimal | from decimal import Decimal | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
| from fuzzywuzzy import process as fuzzy | from rapidfuzz import process as fuzzy | ||||||
| 
 | 
 | ||||||
| from piker._cacheables import ( | from piker._cacheables import ( | ||||||
|     async_lifo_cache, |     async_lifo_cache, | ||||||
|  | @ -43,7 +43,7 @@ from piker.accounting._mktinfo import ( | ||||||
| 
 | 
 | ||||||
| # https://www.kraken.com/features/api#get-tradable-pairs | # https://www.kraken.com/features/api#get-tradable-pairs | ||||||
| class Pair(Struct): | class Pair(Struct): | ||||||
|     respname: str  # idiotic bs_mktid equiv i guess? |     xname: str  # idiotic bs_mktid equiv i guess? | ||||||
|     altname: str  # alternate pair name |     altname: str  # alternate pair name | ||||||
|     wsname: str  # WebSocket pair name (if available) |     wsname: str  # WebSocket pair name (if available) | ||||||
|     aclass_base: str  # asset class of base component |     aclass_base: str  # asset class of base component | ||||||
|  | @ -94,7 +94,7 @@ class Pair(Struct): | ||||||
|         make up their minds on a better key set XD |         make up their minds on a better key set XD | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         return self.respname |         return self.xname | ||||||
| 
 | 
 | ||||||
|     @property |     @property | ||||||
|     def price_tick(self) -> Decimal: |     def price_tick(self) -> Decimal: | ||||||
|  | @ -136,19 +136,10 @@ async def open_symbol_search(ctx: tractor.Context) -> None: | ||||||
|         await ctx.started(cache) |         await ctx.started(cache) | ||||||
| 
 | 
 | ||||||
|         async with ctx.open_stream() as stream: |         async with ctx.open_stream() as stream: | ||||||
| 
 |  | ||||||
|             async for pattern in stream: |             async for pattern in stream: | ||||||
| 
 |                 await stream.send( | ||||||
|                 matches = fuzzy.extractBests( |                     await client.search_symbols(pattern) | ||||||
|                     pattern, |  | ||||||
|                     client._pairs, |  | ||||||
|                     score_cutoff=50, |  | ||||||
|                 ) |                 ) | ||||||
|                 # repack in dict form |  | ||||||
|                 await stream.send({ |  | ||||||
|                     pair[0].altname: pair[0] |  | ||||||
|                     for pair in matches |  | ||||||
|                 }) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @async_lifo_cache() | @async_lifo_cache() | ||||||
|  |  | ||||||
|  | @ -16,10 +16,9 @@ | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| Kucoin broker backend | Kucoin cex API backend. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| 
 |  | ||||||
| from contextlib import ( | from contextlib import ( | ||||||
|     asynccontextmanager as acm, |     asynccontextmanager as acm, | ||||||
|     aclosing, |     aclosing, | ||||||
|  | @ -41,9 +40,8 @@ from typing import ( | ||||||
| import wsproto | import wsproto | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
| 
 | 
 | ||||||
| from fuzzywuzzy import process as fuzzy |  | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import asks | import httpx | ||||||
| from bidict import bidict | from bidict import bidict | ||||||
| import numpy as np | import numpy as np | ||||||
| import pendulum | import pendulum | ||||||
|  | @ -64,8 +62,11 @@ from piker._cacheables import ( | ||||||
| ) | ) | ||||||
| from piker.log import get_logger | from piker.log import get_logger | ||||||
| from piker.data.validate import FeedInit | from piker.data.validate import FeedInit | ||||||
| from piker.types import Struct | from piker.types import Struct  # NOTE, this is already a `tractor.msg.Struct` | ||||||
| from piker.data import def_iohlcv_fields | from piker.data import ( | ||||||
|  |     def_iohlcv_fields, | ||||||
|  |     match_from_pairs, | ||||||
|  | ) | ||||||
| from piker.data._web_bs import ( | from piker.data._web_bs import ( | ||||||
|     open_autorecon_ws, |     open_autorecon_ws, | ||||||
|     NoBsWs, |     NoBsWs, | ||||||
|  | @ -97,9 +98,18 @@ class KucoinMktPair(Struct, frozen=True): | ||||||
|     def size_tick(self) -> Decimal: |     def size_tick(self) -> Decimal: | ||||||
|         return Decimal(str(self.quoteMinSize)) |         return Decimal(str(self.quoteMinSize)) | ||||||
| 
 | 
 | ||||||
|  |     callauctionFirstStageStartTime: None|float | ||||||
|  |     callauctionIsEnabled: bool | ||||||
|  |     callauctionPriceCeiling: float|None | ||||||
|  |     callauctionPriceFloor: float|None | ||||||
|  |     callauctionSecondStageStartTime: float|None | ||||||
|  |     callauctionThirdStageStartTime: float|None | ||||||
|  | 
 | ||||||
|     enableTrading: bool |     enableTrading: bool | ||||||
|  |     feeCategory: int | ||||||
|     feeCurrency: str |     feeCurrency: str | ||||||
|     isMarginEnabled: bool |     isMarginEnabled: bool | ||||||
|  |     makerFeeCoefficient: float | ||||||
|     market: str |     market: str | ||||||
|     minFunds: float |     minFunds: float | ||||||
|     name: str |     name: str | ||||||
|  | @ -109,7 +119,10 @@ class KucoinMktPair(Struct, frozen=True): | ||||||
|     quoteIncrement: float |     quoteIncrement: float | ||||||
|     quoteMaxSize: float |     quoteMaxSize: float | ||||||
|     quoteMinSize: float |     quoteMinSize: float | ||||||
|  |     st: bool | ||||||
|     symbol: str  # our bs_mktid, kucoin's internal id |     symbol: str  # our bs_mktid, kucoin's internal id | ||||||
|  |     takerFeeCoefficient: float | ||||||
|  |     tradingStartTime: float|None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class AccountTrade(Struct, frozen=True): | class AccountTrade(Struct, frozen=True): | ||||||
|  | @ -210,7 +223,11 @@ def get_config() -> BrokerConfig | None: | ||||||
| 
 | 
 | ||||||
| class Client: | class Client: | ||||||
| 
 | 
 | ||||||
|     def __init__(self) -> None: |     def __init__( | ||||||
|  |         self, | ||||||
|  |         httpx_client: httpx.AsyncClient, | ||||||
|  |     ) -> None: | ||||||
|  |         self._http: httpx.AsyncClient = httpx_client | ||||||
|         self._config: BrokerConfig|None = get_config() |         self._config: BrokerConfig|None = get_config() | ||||||
|         self._pairs: dict[str, KucoinMktPair] = {} |         self._pairs: dict[str, KucoinMktPair] = {} | ||||||
|         self._fqmes2mktids: bidict[str, str] = bidict() |         self._fqmes2mktids: bidict[str, str] = bidict() | ||||||
|  | @ -225,18 +242,24 @@ class Client: | ||||||
| 
 | 
 | ||||||
|     ) -> dict[str, str | bytes]: |     ) -> dict[str, str | bytes]: | ||||||
|         ''' |         ''' | ||||||
|         Generate authenticated request headers |         Generate authenticated request headers: | ||||||
|  | 
 | ||||||
|         https://docs.kucoin.com/#authentication |         https://docs.kucoin.com/#authentication | ||||||
|  |         https://www.kucoin.com/docs/basic-info/connection-method/authentication/creating-a-request | ||||||
|  |         https://www.kucoin.com/docs/basic-info/connection-method/authentication/signing-a-message | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
| 
 |  | ||||||
|         if not self._config: |         if not self._config: | ||||||
|             raise ValueError( |             raise ValueError( | ||||||
|                 'No config found when trying to send authenticated request') |                 'No config found when trying to send authenticated request' | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         str_to_sign = ( |         str_to_sign = ( | ||||||
|             str(int(time.time() * 1000)) |             str(int(time.time() * 1000)) | ||||||
|             + action + f'/api/{api}/{endpoint.lstrip("/")}' |             + | ||||||
|  |             action | ||||||
|  |             + | ||||||
|  |             f'/api/{api}/{endpoint.lstrip("/")}' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         signature = base64.b64encode( |         signature = base64.b64encode( | ||||||
|  | @ -247,6 +270,7 @@ class Client: | ||||||
|             ).digest() |             ).digest() | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |         # TODO: can we cache this between calls? | ||||||
|         passphrase = base64.b64encode( |         passphrase = base64.b64encode( | ||||||
|             hmac.new( |             hmac.new( | ||||||
|                 self._config.key_secret.encode('utf-8'), |                 self._config.key_secret.encode('utf-8'), | ||||||
|  | @ -268,8 +292,10 @@ class Client: | ||||||
|         self, |         self, | ||||||
|         action: Literal['POST', 'GET'], |         action: Literal['POST', 'GET'], | ||||||
|         endpoint: str, |         endpoint: str, | ||||||
|  | 
 | ||||||
|         api: str = 'v2', |         api: str = 'v2', | ||||||
|         headers: dict = {}, |         headers: dict = {}, | ||||||
|  | 
 | ||||||
|     ) -> Any: |     ) -> Any: | ||||||
|         ''' |         ''' | ||||||
|         Generic request wrapper for Kucoin API |         Generic request wrapper for Kucoin API | ||||||
|  | @ -282,14 +308,19 @@ class Client: | ||||||
|                 api, |                 api, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         api_url = f'https://api.kucoin.com/api/{api}/{endpoint}' |         req_meth: Callable = getattr( | ||||||
| 
 |             self._http, | ||||||
|         res = await asks.request(action, api_url, headers=headers) |             action.lower(), | ||||||
| 
 |         ) | ||||||
|         json = res.json() |         res = await req_meth( | ||||||
|         if 'data' in json: |             url=f'/{api}/{endpoint}', | ||||||
|             return json['data'] |             headers=headers, | ||||||
|  |         ) | ||||||
|  |         json: dict = res.json() | ||||||
|  |         if (data := json.get('data')) is not None: | ||||||
|  |             return data | ||||||
|         else: |         else: | ||||||
|  |             api_url: str = self._http.base_url | ||||||
|             log.error( |             log.error( | ||||||
|                 f'Error making request to {api_url} ->\n' |                 f'Error making request to {api_url} ->\n' | ||||||
|                 f'{pformat(res)}' |                 f'{pformat(res)}' | ||||||
|  | @ -347,8 +378,8 @@ class Client: | ||||||
|             currencies: dict[str, Currency] = {} |             currencies: dict[str, Currency] = {} | ||||||
|             entries: list[dict] = await self._request( |             entries: list[dict] = await self._request( | ||||||
|                 'GET', |                 'GET', | ||||||
|                 api='v1', |  | ||||||
|                 endpoint='currencies', |                 endpoint='currencies', | ||||||
|  |                 api='v1', | ||||||
|             ) |             ) | ||||||
|             for entry in entries: |             for entry in entries: | ||||||
|                 curr = Currency(**entry).copy() |                 curr = Currency(**entry).copy() | ||||||
|  | @ -364,20 +395,29 @@ class Client: | ||||||
|         dict[str, KucoinMktPair], |         dict[str, KucoinMktPair], | ||||||
|         bidict[str, KucoinMktPair], |         bidict[str, KucoinMktPair], | ||||||
|     ]: |     ]: | ||||||
|         entries = await self._request('GET', 'symbols') |         entries = await self._request( | ||||||
|  |             'GET', | ||||||
|  |             endpoint='symbols', | ||||||
|  |         ) | ||||||
|         log.info(f' {len(entries)} Kucoin market pairs fetched') |         log.info(f' {len(entries)} Kucoin market pairs fetched') | ||||||
| 
 | 
 | ||||||
|         pairs: dict[str, KucoinMktPair] = {} |         pairs: dict[str, KucoinMktPair] = {} | ||||||
|         fqmes2mktids: bidict[str, str] = bidict() |         fqmes2mktids: bidict[str, str] = bidict() | ||||||
|         for item in entries: |         for item in entries: | ||||||
|  |             try: | ||||||
|                 pair = pairs[item['name']] = KucoinMktPair(**item) |                 pair = pairs[item['name']] = KucoinMktPair(**item) | ||||||
|  |             except TypeError as te: | ||||||
|  |                 raise TypeError( | ||||||
|  |                     '`KucoinMktPair` and reponse fields do not match ??\n' | ||||||
|  |                     f'{KucoinMktPair.fields_diff(item)}\n' | ||||||
|  |                 ) from te | ||||||
|             fqmes2mktids[ |             fqmes2mktids[ | ||||||
|                 item['name'].lower().replace('-', '') |                 item['name'].lower().replace('-', '') | ||||||
|             ] = pair.name |             ] = pair.name | ||||||
| 
 | 
 | ||||||
|         return pairs, fqmes2mktids |         return pairs, fqmes2mktids | ||||||
| 
 | 
 | ||||||
|     async def cache_pairs( |     async def get_mkt_pairs( | ||||||
|         self, |         self, | ||||||
|         update: bool = False, |         update: bool = False, | ||||||
| 
 | 
 | ||||||
|  | @ -405,16 +445,27 @@ class Client: | ||||||
| 
 | 
 | ||||||
|     ) -> dict[str, KucoinMktPair]: |     ) -> dict[str, KucoinMktPair]: | ||||||
|         ''' |         ''' | ||||||
|         Use fuzzy search to match against all market names. |         Use fuzzy search engine to match against pairs, deliver | ||||||
|  |         matching ones. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         data = await self.cache_pairs() |         if not len(self._pairs): | ||||||
|  |             await self.get_mkt_pairs() | ||||||
|  |             assert self._pairs, '`Client.get_mkt_pairs()` was never called!?' | ||||||
| 
 | 
 | ||||||
|         matches = fuzzy.extractBests( |         matches: dict[str, KucoinMktPair] = match_from_pairs( | ||||||
|             pattern, data, score_cutoff=35, limit=limit |             pairs=self._pairs, | ||||||
|  |             # query=pattern.upper(), | ||||||
|  |             query=pattern.upper(), | ||||||
|  |             score_cutoff=35, | ||||||
|  |             limit=limit, | ||||||
|         ) |         ) | ||||||
|  | 
 | ||||||
|         # repack in dict form |         # repack in dict form | ||||||
|         return {item[0].name: item[0] for item in matches} |         return { | ||||||
|  |             pair.name: pair | ||||||
|  |             for pair in matches.values() | ||||||
|  |         } | ||||||
| 
 | 
 | ||||||
|     async def last_trades(self, sym: str) -> list[AccountTrade]: |     async def last_trades(self, sym: str) -> list[AccountTrade]: | ||||||
|         trades = await self._request( |         trades = await self._request( | ||||||
|  | @ -554,10 +605,18 @@ def fqme_to_kucoin_sym( | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def get_client() -> AsyncGenerator[Client, None]: | async def get_client() -> AsyncGenerator[Client, None]: | ||||||
|     client = Client() |     ''' | ||||||
|  |     Load an API `Client` preconfigured from user settings | ||||||
| 
 | 
 | ||||||
|     async with trio.open_nursery() as n: |     ''' | ||||||
|         n.start_soon(client.cache_pairs) |     async with ( | ||||||
|  |         httpx.AsyncClient( | ||||||
|  |             base_url='https://api.kucoin.com/api', | ||||||
|  |         ) as trio_client, | ||||||
|  |     ): | ||||||
|  |         client = Client(httpx_client=trio_client) | ||||||
|  |         async with trio.open_nursery() as tn: | ||||||
|  |             tn.start_soon(client.get_mkt_pairs) | ||||||
|             await client.get_currencies() |             await client.get_currencies() | ||||||
| 
 | 
 | ||||||
|         yield client |         yield client | ||||||
|  | @ -569,7 +628,7 @@ async def open_symbol_search( | ||||||
| ) -> None: | ) -> None: | ||||||
|     async with open_cached_client('kucoin') as client: |     async with open_cached_client('kucoin') as client: | ||||||
|         # load all symbols locally for fast search |         # load all symbols locally for fast search | ||||||
|         await client.cache_pairs() |         await client.get_mkt_pairs() | ||||||
|         await ctx.started() |         await ctx.started() | ||||||
| 
 | 
 | ||||||
|         async with ctx.open_stream() as stream: |         async with ctx.open_stream() as stream: | ||||||
|  | @ -596,7 +655,7 @@ async def open_ping_task( | ||||||
|                 await trio.sleep((ping_interval - 1000) / 1000) |                 await trio.sleep((ping_interval - 1000) / 1000) | ||||||
|                 await ws.send_msg({'id': connect_id, 'type': 'ping'}) |                 await ws.send_msg({'id': connect_id, 'type': 'ping'}) | ||||||
| 
 | 
 | ||||||
|         log.info('Starting ping task for kucoin ws connection') |         log.warning('Starting ping task for kucoin ws connection') | ||||||
|         n.start_soon(ping_server) |         n.start_soon(ping_server) | ||||||
| 
 | 
 | ||||||
|         yield |         yield | ||||||
|  | @ -608,16 +667,21 @@ async def open_ping_task( | ||||||
| async def get_mkt_info( | async def get_mkt_info( | ||||||
|     fqme: str, |     fqme: str, | ||||||
| 
 | 
 | ||||||
| ) -> tuple[MktPair, KucoinMktPair]: | ) -> tuple[ | ||||||
|  |     MktPair, | ||||||
|  |     KucoinMktPair, | ||||||
|  | ]: | ||||||
|     ''' |     ''' | ||||||
|     Query for and return a `MktPair` and `KucoinMktPair`. |     Query for and return both a `piker.accounting.MktPair` and | ||||||
|  |     `KucoinMktPair` from provided `fqme: str` | ||||||
|  |     (fully-qualified-market-endpoint). | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async with open_cached_client('kucoin') as client: |     async with open_cached_client('kucoin') as client: | ||||||
|         # split off any fqme broker part |         # split off any fqme broker part | ||||||
|         bs_fqme, _, broker = fqme.partition('.') |         bs_fqme, _, broker = fqme.partition('.') | ||||||
| 
 | 
 | ||||||
|         pairs: dict[str, KucoinMktPair] = await client.cache_pairs() |         pairs: dict[str, KucoinMktPair] = await client.get_mkt_pairs() | ||||||
| 
 | 
 | ||||||
|         try: |         try: | ||||||
|             # likely search result key which is already in native mkt symbol form |             # likely search result key which is already in native mkt symbol form | ||||||
|  | @ -685,6 +749,8 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|         log.info(f'Starting up quote stream(s) for {symbols}') |         log.info(f'Starting up quote stream(s) for {symbols}') | ||||||
|         for sym_str in symbols: |         for sym_str in symbols: | ||||||
|  |             mkt: MktPair | ||||||
|  |             pair: KucoinMktPair | ||||||
|             mkt, pair = await get_mkt_info(sym_str) |             mkt, pair = await get_mkt_info(sym_str) | ||||||
|             init_msgs.append( |             init_msgs.append( | ||||||
|                 FeedInit(mkt_info=mkt) |                 FeedInit(mkt_info=mkt) | ||||||
|  | @ -692,7 +758,11 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|         ws: NoBsWs |         ws: NoBsWs | ||||||
|         token, ping_interval = await client._get_ws_token() |         token, ping_interval = await client._get_ws_token() | ||||||
|         connect_id = str(uuid4()) |         log.info('API reported ping_interval: {ping_interval}\n') | ||||||
|  | 
 | ||||||
|  |         connect_id: str = str(uuid4()) | ||||||
|  |         typ: str | ||||||
|  |         quote: dict | ||||||
|         async with ( |         async with ( | ||||||
|             open_autorecon_ws( |             open_autorecon_ws( | ||||||
|                 ( |                 ( | ||||||
|  | @ -706,20 +776,37 @@ async def stream_quotes( | ||||||
|                 ), |                 ), | ||||||
|             ) as ws, |             ) as ws, | ||||||
|             open_ping_task(ws, ping_interval, connect_id), |             open_ping_task(ws, ping_interval, connect_id), | ||||||
|             aclosing(stream_messages(ws, sym_str)) as msg_gen, |             aclosing( | ||||||
|  |                 iter_normed_quotes( | ||||||
|  |                     ws, sym_str | ||||||
|  |                 ) | ||||||
|  |             ) as iter_quotes, | ||||||
|         ): |         ): | ||||||
|             typ, quote = await anext(msg_gen) |             typ, quote = await anext(iter_quotes) | ||||||
| 
 | 
 | ||||||
|             while typ != 'trade': |  | ||||||
|             # take care to not unblock here until we get a real |             # take care to not unblock here until we get a real | ||||||
|                 # trade quote |             # trade quote? | ||||||
|                 typ, quote = await anext(msg_gen) |             # ^TODO, remove this right? | ||||||
|  |             # -[ ] what often blocks chart boot/new-feed switching | ||||||
|  |             #   since we'ere waiting for a live quote instead of just | ||||||
|  |             #   loading history afap.. | ||||||
|  |             #  |_ XXX, not sure if we require a bit of rework to core | ||||||
|  |             #    feed init logic or if backends justg gotta be | ||||||
|  |             #    changed up.. feel like there was some causality | ||||||
|  |             #    dilema prolly only seen with IB too.. | ||||||
|  |             # while typ != 'trade': | ||||||
|  |             #     typ, quote = await anext(iter_quotes) | ||||||
| 
 | 
 | ||||||
|             task_status.started((init_msgs, quote)) |             task_status.started((init_msgs, quote)) | ||||||
|             feed_is_live.set() |             feed_is_live.set() | ||||||
| 
 | 
 | ||||||
|             async for typ, msg in msg_gen: |             # XXX NOTE, DO NOT include the `.<backend>` suffix! | ||||||
|                 await send_chan.send({sym_str: msg}) |             # OW the sampling loop will not broadcast correctly.. | ||||||
|  |             # since `bus._subscribers.setdefault(bs_fqme, set())` | ||||||
|  |             # is used inside `.data.open_feed_bus()` !!! | ||||||
|  |             topic: str = mkt.bs_fqme | ||||||
|  |             async for typ, quote in iter_quotes: | ||||||
|  |                 await send_chan.send({topic: quote}) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
|  | @ -774,7 +861,7 @@ async def subscribe( | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def stream_messages( | async def iter_normed_quotes( | ||||||
|     ws: NoBsWs, |     ws: NoBsWs, | ||||||
|     sym: str, |     sym: str, | ||||||
| 
 | 
 | ||||||
|  | @ -805,6 +892,9 @@ async def stream_messages( | ||||||
| 
 | 
 | ||||||
|                 yield 'trade', { |                 yield 'trade', { | ||||||
|                     'symbol': sym, |                     'symbol': sym, | ||||||
|  |                     # TODO, is 'last' even used elsewhere/a-good | ||||||
|  |                     # semantic? can't we just read the ticks with our | ||||||
|  |                     # .data.ticktools.frame_ticks()`/ | ||||||
|                     'last': trade_data.price, |                     'last': trade_data.price, | ||||||
|                     'brokerd_ts': last_trade_ts, |                     'brokerd_ts': last_trade_ts, | ||||||
|                     'ticks': [ |                     'ticks': [ | ||||||
|  | @ -897,7 +987,7 @@ async def open_history_client( | ||||||
|             if end_dt is None: |             if end_dt is None: | ||||||
|                 inow = round(time.time()) |                 inow = round(time.time()) | ||||||
| 
 | 
 | ||||||
|                 print( |                 log.debug( | ||||||
|                     f'difference in time between load and processing' |                     f'difference in time between load and processing' | ||||||
|                     f'{inow - times[-1]}' |                     f'{inow - times[-1]}' | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|  | @ -0,0 +1,49 @@ | ||||||
|  | piker.clearing | ||||||
|  | ______________ | ||||||
|  | trade execution-n-control subsys for both live and paper trading as | ||||||
|  | well as algo-trading manual override/interaction across any backend | ||||||
|  | broker and data provider. | ||||||
|  | 
 | ||||||
|  | avail UIs | ||||||
|  | ********* | ||||||
|  | 
 | ||||||
|  | order ctl | ||||||
|  | --------- | ||||||
|  | the `piker.clearing` subsys is exposed mainly though | ||||||
|  | the `piker chart` GUI as a "chart trader" style UX and | ||||||
|  | is automatically enabled whenever a chart is opened. | ||||||
|  | 
 | ||||||
|  | .. ^TODO, more prose here! | ||||||
|  | 
 | ||||||
|  | the "manual" order control features are exposed via the | ||||||
|  | `piker.ui.order_mode` API and can pretty much always be | ||||||
|  | used (at least) in simulated-trading mode, aka "paper"-mode, and | ||||||
|  | the micro-manual is as follows: | ||||||
|  | 
 | ||||||
|  | ``order_mode`` ( | ||||||
|  |     edge triggered activation by any of the following keys, | ||||||
|  |     ``mouse-click`` on y-level to submit at that price | ||||||
|  |     ): | ||||||
|  | 
 | ||||||
|  |     - ``f``/ ``ctl-f`` to stage buy | ||||||
|  |     - ``d``/ ``ctl-d`` to stage sell | ||||||
|  |     - ``a`` to stage alert | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | ``search_mode`` ( | ||||||
|  |     ``ctl-l`` or ``ctl-space`` to open, | ||||||
|  |     ``ctl-c`` or ``ctl-space`` to close | ||||||
|  |     ) : | ||||||
|  | 
 | ||||||
|  |     - begin typing to have symbol search automatically lookup | ||||||
|  |       symbols from all loaded backend (broker) providers | ||||||
|  |     - arrow keys and mouse click to navigate selection | ||||||
|  |     - vi-like ``ctl-[hjkl]`` for navigation | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | position (pp) mgmt | ||||||
|  | ------------------ | ||||||
|  | you can also configure your position allocation limits from the | ||||||
|  | sidepane. | ||||||
|  | 
 | ||||||
|  | .. ^TODO, explain and provide tut once more refined! | ||||||
|  | @ -913,8 +913,17 @@ async def translate_and_relay_brokerd_events( | ||||||
|             }: |             }: | ||||||
|                 if ( |                 if ( | ||||||
|                     not oid |                     not oid | ||||||
|  |                     # try to lookup any order dialog by | ||||||
|  |                     # brokerd-side id.. | ||||||
|  |                     and not ( | ||||||
|  |                         oid := book._ems2brokerd_ids.inverse.get(reqid) | ||||||
|  |                     ) | ||||||
|                 ): |                 ): | ||||||
|                     oid: str = book._ems2brokerd_ids.inverse[reqid] |                     log.warning( | ||||||
|  |                         f'Rxed unusable error-msg:\n' | ||||||
|  |                         f'{brokerd_msg}' | ||||||
|  |                     ) | ||||||
|  |                     continue | ||||||
| 
 | 
 | ||||||
|                 msg = BrokerdError(**brokerd_msg) |                 msg = BrokerdError(**brokerd_msg) | ||||||
| 
 | 
 | ||||||
|  | @ -949,7 +958,10 @@ async def translate_and_relay_brokerd_events( | ||||||
|                     fqme: str = ( |                     fqme: str = ( | ||||||
|                         bdmsg.symbol  # might be None |                         bdmsg.symbol  # might be None | ||||||
|                         or |                         or | ||||||
|                         bdmsg.broker_details['flow']['symbol'] |                         bdmsg.broker_details['flow'] | ||||||
|  |                         # NOTE: what happens in empty case in the | ||||||
|  |                         # broadcast below? it's a problem? | ||||||
|  |                         .get('symbol', '') | ||||||
|                     ) |                     ) | ||||||
| 
 | 
 | ||||||
|                 await router.client_broadcast( |                 await router.client_broadcast( | ||||||
|  |  | ||||||
|  | @ -26,6 +26,7 @@ from contextlib import asynccontextmanager as acm | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from operator import itemgetter | from operator import itemgetter | ||||||
| import itertools | import itertools | ||||||
|  | from pprint import pformat | ||||||
| import time | import time | ||||||
| from typing import ( | from typing import ( | ||||||
|     Callable, |     Callable, | ||||||
|  | @ -39,6 +40,7 @@ import trio | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| from piker.brokers import get_brokermod | from piker.brokers import get_brokermod | ||||||
|  | from piker.service import find_service | ||||||
| from piker.accounting import ( | from piker.accounting import ( | ||||||
|     Account, |     Account, | ||||||
|     MktPair, |     MktPair, | ||||||
|  | @ -696,7 +698,12 @@ async def open_trade_dialog( | ||||||
|                 # sanity check all the mkt infos |                 # sanity check all the mkt infos | ||||||
|                 for fqme, flume in feed.flumes.items(): |                 for fqme, flume in feed.flumes.items(): | ||||||
|                     mkt: MktPair = symcache.mktmaps.get(fqme) or mkt_by_fqme[fqme] |                     mkt: MktPair = symcache.mktmaps.get(fqme) or mkt_by_fqme[fqme] | ||||||
|                     assert mkt == flume.mkt |                     if mkt != flume.mkt: | ||||||
|  |                         diff: tuple = mkt - flume.mkt | ||||||
|  |                         log.warning( | ||||||
|  |                             'MktPair sig mismatch?\n' | ||||||
|  |                             f'{pformat(diff)}' | ||||||
|  |                         ) | ||||||
| 
 | 
 | ||||||
|                 get_cost: Callable = getattr( |                 get_cost: Callable = getattr( | ||||||
|                     brokermod, |                     brokermod, | ||||||
|  | @ -754,7 +761,7 @@ async def open_paperboi( | ||||||
|     service_name = f'paperboi.{broker}' |     service_name = f'paperboi.{broker}' | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         tractor.find_actor(service_name) as portal, |         find_service(service_name) as portal, | ||||||
|         tractor.open_nursery() as an, |         tractor.open_nursery() as an, | ||||||
|     ): |     ): | ||||||
|         # NOTE: only spawn if no paperboi already is up since we likely |         # NOTE: only spawn if no paperboi already is up since we likely | ||||||
|  | @ -777,8 +784,10 @@ async def open_paperboi( | ||||||
|         ) as (ctx, first): |         ) as (ctx, first): | ||||||
|             yield ctx, first |             yield ctx, first | ||||||
| 
 | 
 | ||||||
|             # tear down connection and any spawned actor on exit |             # ALWAYS tear down connection AND any newly spawned | ||||||
|  |             # paperboi actor on exit! | ||||||
|             await ctx.cancel() |             await ctx.cancel() | ||||||
|  | 
 | ||||||
|             if we_spawned: |             if we_spawned: | ||||||
|                 await portal.cancel_actor() |                 await portal.cancel_actor() | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,30 +1,33 @@ | ||||||
| # piker: trading gear for hackers | # piker: trading gear for hackers | ||||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers) | # Copyright (C) 2018-present Tyler Goodlet | ||||||
|  | # (in stewardship for pikers, everywhere.) | ||||||
| 
 | 
 | ||||||
| # This program is free software: you can redistribute it and/or modify | # This program is free software: you can redistribute it and/or | ||||||
| # it under the terms of the GNU Affero General Public License as published by | # modify it under the terms of the GNU Affero General Public | ||||||
| # the Free Software Foundation, either version 3 of the License, or | # License as published by the Free Software Foundation, either | ||||||
| # (at your option) any later version. | # version 3 of the License, or (at your option) any later version. | ||||||
| 
 | 
 | ||||||
| # This program is distributed in the hope that it will be useful, | # This program is distributed in the hope that it will be useful, | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU | ||||||
| # GNU Affero General Public License for more details. | # Affero General Public License for more details. | ||||||
| 
 | 
 | ||||||
| # You should have received a copy of the GNU Affero General Public License | # You should have received a copy of the GNU Affero General Public | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # License along with this program.  If not, see | ||||||
|  | # <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| CLI commons. | CLI commons. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| import os | import os | ||||||
| from contextlib import AsyncExitStack | # from contextlib import AsyncExitStack | ||||||
| from types import ModuleType | from types import ModuleType | ||||||
| 
 | 
 | ||||||
| import click | import click | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
|  | from tractor._multiaddr import parse_maddr | ||||||
| 
 | 
 | ||||||
| from ..log import ( | from ..log import ( | ||||||
|     get_console_log, |     get_console_log, | ||||||
|  | @ -42,35 +45,97 @@ from .. import config | ||||||
| log = get_logger('piker.cli') | log = get_logger('piker.cli') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | def load_trans_eps( | ||||||
|  |     network: dict | None = None, | ||||||
|  |     maddrs: list[tuple] | None = None, | ||||||
|  | 
 | ||||||
|  | ) -> dict[str, dict[str, dict]]: | ||||||
|  | 
 | ||||||
|  |     # transport-oriented endpoint multi-addresses | ||||||
|  |     eps: dict[ | ||||||
|  |         str,  # service name, eg. `pikerd`, `emsd`.. | ||||||
|  | 
 | ||||||
|  |         # libp2p style multi-addresses parsed into prot layers | ||||||
|  |         list[dict[str, str | int]] | ||||||
|  |     ] = {} | ||||||
|  | 
 | ||||||
|  |     if ( | ||||||
|  |         network | ||||||
|  |         and not maddrs | ||||||
|  |     ): | ||||||
|  |         # load network section and (attempt to) connect all endpoints | ||||||
|  |         # which are reachable B) | ||||||
|  |         for key, maddrs in network.items(): | ||||||
|  |             match key: | ||||||
|  | 
 | ||||||
|  |                 # TODO: resolve table across multiple discov | ||||||
|  |                 # prots Bo | ||||||
|  |                 case 'resolv': | ||||||
|  |                     pass | ||||||
|  | 
 | ||||||
|  |                 case 'pikerd': | ||||||
|  |                     dname: str = key | ||||||
|  |                     for maddr in maddrs: | ||||||
|  |                         layers: dict = parse_maddr(maddr) | ||||||
|  |                         eps.setdefault( | ||||||
|  |                             dname, | ||||||
|  |                             [], | ||||||
|  |                         ).append(layers) | ||||||
|  | 
 | ||||||
|  |     elif maddrs: | ||||||
|  |         # presume user is manually specifying the root actor ep. | ||||||
|  |         eps['pikerd'] = [parse_maddr(maddr)] | ||||||
|  | 
 | ||||||
|  |     return eps | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @click.command() | @click.command() | ||||||
| @click.option('--loglevel', '-l', default='warning', help='Logging level') |  | ||||||
| @click.option('--tl', is_flag=True, help='Enable tractor logging') |  | ||||||
| @click.option('--pdb', is_flag=True, help='Enable tractor debug mode') |  | ||||||
| @click.option('--host', '-h', default=None, help='Host addr to bind') |  | ||||||
| @click.option('--port', '-p', default=None, help='Port number to bind') |  | ||||||
| @click.option( | @click.option( | ||||||
|     '--tsdb', |     '--loglevel', | ||||||
|     is_flag=True, |     '-l', | ||||||
|     help='Enable local ``marketstore`` instance' |     default='warning', | ||||||
|  |     help='Logging level', | ||||||
| ) | ) | ||||||
| @click.option( | @click.option( | ||||||
|     '--es', |     '--tl', | ||||||
|     is_flag=True, |     is_flag=True, | ||||||
|     help='Enable local ``elasticsearch`` instance' |     help='Enable tractor-runtime logs', | ||||||
| ) | ) | ||||||
|  | @click.option( | ||||||
|  |     '--pdb', | ||||||
|  |     is_flag=True, | ||||||
|  |     help='Enable tractor debug mode', | ||||||
|  | ) | ||||||
|  | @click.option( | ||||||
|  |     '--maddr', | ||||||
|  |     '-m', | ||||||
|  |     default=None, | ||||||
|  |     help='Multiaddrs to bind or contact', | ||||||
|  | ) | ||||||
|  | # @click.option( | ||||||
|  | #     '--tsdb', | ||||||
|  | #     is_flag=True, | ||||||
|  | #     help='Enable local ``marketstore`` instance' | ||||||
|  | # ) | ||||||
|  | # @click.option( | ||||||
|  | #     '--es', | ||||||
|  | #     is_flag=True, | ||||||
|  | #     help='Enable local ``elasticsearch`` instance' | ||||||
|  | # ) | ||||||
| def pikerd( | def pikerd( | ||||||
|  |     maddr: list[str] | None, | ||||||
|     loglevel: str, |     loglevel: str, | ||||||
|     host: str, |  | ||||||
|     port: int, |  | ||||||
|     tl: bool, |     tl: bool, | ||||||
|     pdb: bool, |     pdb: bool, | ||||||
|     tsdb: bool, |     # tsdb: bool, | ||||||
|     es: bool, |     # es: bool, | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     Spawn the piker broker-daemon. |     Spawn the piker broker-daemon. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  |     from tractor.devx import maybe_open_crash_handler | ||||||
|  |     with maybe_open_crash_handler(pdb=pdb): | ||||||
|         log = get_console_log(loglevel, name='cli') |         log = get_console_log(loglevel, name='cli') | ||||||
| 
 | 
 | ||||||
|         if pdb: |         if pdb: | ||||||
|  | @ -82,12 +147,32 @@ def pikerd( | ||||||
|                 "\n" |                 "\n" | ||||||
|             )) |             )) | ||||||
| 
 | 
 | ||||||
|     reg_addr: None | tuple[str, int] = None |         # service-actor registry endpoint socket-address set | ||||||
|     if host or port: |         regaddrs: list[tuple[str, int]] = [] | ||||||
|         reg_addr = ( | 
 | ||||||
|             host or _default_registry_host, |         conf, _ = config.load( | ||||||
|             int(port) or _default_registry_port, |             conf_name='conf', | ||||||
|         ) |         ) | ||||||
|  |         network: dict = conf.get('network') | ||||||
|  |         if ( | ||||||
|  |             network is None | ||||||
|  |             and not maddr | ||||||
|  |         ): | ||||||
|  |             regaddrs = [( | ||||||
|  |                 _default_registry_host, | ||||||
|  |                 _default_registry_port, | ||||||
|  |             )] | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             eps: dict = load_trans_eps( | ||||||
|  |                 network, | ||||||
|  |                 maddr, | ||||||
|  |             ) | ||||||
|  |             for layers in eps['pikerd']: | ||||||
|  |                 regaddrs.append(( | ||||||
|  |                     layers['ipv4']['addr'], | ||||||
|  |                     layers['tcp']['port'], | ||||||
|  |                 )) | ||||||
| 
 | 
 | ||||||
|         from .. import service |         from .. import service | ||||||
| 
 | 
 | ||||||
|  | @ -96,31 +181,35 @@ def pikerd( | ||||||
| 
 | 
 | ||||||
|             async with ( |             async with ( | ||||||
|                 service.open_pikerd( |                 service.open_pikerd( | ||||||
|  |                     registry_addrs=regaddrs, | ||||||
|                     loglevel=loglevel, |                     loglevel=loglevel, | ||||||
|                     debug_mode=pdb, |                     debug_mode=pdb, | ||||||
|                 registry_addr=reg_addr, |  | ||||||
| 
 | 
 | ||||||
|                 ) as service_mngr,  # normally delivers a ``Services`` handle |                 ) as service_mngr,  # normally delivers a ``Services`` handle | ||||||
| 
 | 
 | ||||||
|             AsyncExitStack() as stack, |                 # AsyncExitStack() as stack, | ||||||
|             ): |             ): | ||||||
|             if tsdb: |                 # TODO: spawn all other sub-actor daemons according to | ||||||
|                 dname, conf = await stack.enter_async_context( |                 # multiaddress endpoint spec defined by user config | ||||||
|                     service.marketstore.start_ahab_daemon( |                 assert service_mngr | ||||||
|                         service_mngr, |  | ||||||
|                         loglevel=loglevel, |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
|                 log.info(f'TSDB `{dname}` up with conf:\n{conf}') |  | ||||||
| 
 | 
 | ||||||
|             if es: |                 # if tsdb: | ||||||
|                 dname, conf = await stack.enter_async_context( |                 #     dname, conf = await stack.enter_async_context( | ||||||
|                     service.elastic.start_ahab_daemon( |                 #         service.marketstore.start_ahab_daemon( | ||||||
|                         service_mngr, |                 #             service_mngr, | ||||||
|                         loglevel=loglevel, |                 #             loglevel=loglevel, | ||||||
|                     ) |                 #         ) | ||||||
|                 ) |                 #     ) | ||||||
|                 log.info(f'DB `{dname}` up with conf:\n{conf}') |                 #     log.info(f'TSDB `{dname}` up with conf:\n{conf}') | ||||||
|  | 
 | ||||||
|  |                 # if es: | ||||||
|  |                 #     dname, conf = await stack.enter_async_context( | ||||||
|  |                 #         service.elastic.start_ahab_daemon( | ||||||
|  |                 #             service_mngr, | ||||||
|  |                 #             loglevel=loglevel, | ||||||
|  |                 #         ) | ||||||
|  |                 #     ) | ||||||
|  |                 #     log.info(f'DB `{dname}` up with conf:\n{conf}') | ||||||
| 
 | 
 | ||||||
|                 await trio.sleep_forever() |                 await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
|  | @ -137,8 +226,24 @@ def pikerd( | ||||||
| @click.option('--loglevel', '-l', default='warning', help='Logging level') | @click.option('--loglevel', '-l', default='warning', help='Logging level') | ||||||
| @click.option('--tl', is_flag=True, help='Enable tractor logging') | @click.option('--tl', is_flag=True, help='Enable tractor logging') | ||||||
| @click.option('--configdir', '-c', help='Configuration directory') | @click.option('--configdir', '-c', help='Configuration directory') | ||||||
| @click.option('--host', '-h', default=None, help='Host addr to bind') | @click.option( | ||||||
| @click.option('--port', '-p', default=None, help='Port number to bind') |     '--pdb', | ||||||
|  |     is_flag=True, | ||||||
|  |     help='Enable runtime debug mode ', | ||||||
|  | ) | ||||||
|  | @click.option( | ||||||
|  |     '--maddr', | ||||||
|  |     '-m', | ||||||
|  |     default=None, | ||||||
|  |     multiple=True, | ||||||
|  |     help='Multiaddr to bind', | ||||||
|  | ) | ||||||
|  | @click.option( | ||||||
|  |     '--regaddr', | ||||||
|  |     '-r', | ||||||
|  |     default=None, | ||||||
|  |     help='Registrar addr to contact', | ||||||
|  | ) | ||||||
| @click.pass_context | @click.pass_context | ||||||
| def cli( | def cli( | ||||||
|     ctx: click.Context, |     ctx: click.Context, | ||||||
|  | @ -146,8 +251,11 @@ def cli( | ||||||
|     loglevel: str, |     loglevel: str, | ||||||
|     tl: bool, |     tl: bool, | ||||||
|     configdir: str, |     configdir: str, | ||||||
|     host: str, |     pdb: bool, | ||||||
|     port: int, | 
 | ||||||
|  |     # TODO: make these list[str] with multiple -m maddr0 -m maddr1 | ||||||
|  |     maddr: list[str], | ||||||
|  |     regaddr: str, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     if configdir is not None: |     if configdir is not None: | ||||||
|  | @ -168,12 +276,20 @@ def cli( | ||||||
|     } |     } | ||||||
|     assert brokermods |     assert brokermods | ||||||
| 
 | 
 | ||||||
|     reg_addr: None | tuple[str, int] = None |     # TODO: load endpoints from `conf::[network].pikerd` | ||||||
|     if host or port: |     # - pikerd vs. regd, separate registry daemon? | ||||||
|         reg_addr = ( |     # - expose datad vs. brokerd? | ||||||
|             host or _default_registry_host, |     # - bind emsd with certain perms on public iface? | ||||||
|             int(port) or _default_registry_port, |     regaddrs: list[tuple[str, int]] = regaddr or [( | ||||||
|         ) |         _default_registry_host, | ||||||
|  |         _default_registry_port, | ||||||
|  |     )] | ||||||
|  | 
 | ||||||
|  |     # TODO: factor [network] section parsing out from pikerd | ||||||
|  |     # above and call it here as well. | ||||||
|  |     # if maddr: | ||||||
|  |     #     for addr in maddr: | ||||||
|  |     #         layers: dict = parse_maddr(addr) | ||||||
| 
 | 
 | ||||||
|     ctx.obj.update({ |     ctx.obj.update({ | ||||||
|         'brokers': brokers, |         'brokers': brokers, | ||||||
|  | @ -183,7 +299,12 @@ def cli( | ||||||
|         'log': get_console_log(loglevel), |         'log': get_console_log(loglevel), | ||||||
|         'confdir': config._config_dir, |         'confdir': config._config_dir, | ||||||
|         'wl_path': config._watchlists_data_path, |         'wl_path': config._watchlists_data_path, | ||||||
|         'registry_addr': reg_addr, |         'registry_addrs': regaddrs, | ||||||
|  |         'pdb': pdb,  # debug mode flag | ||||||
|  | 
 | ||||||
|  |         # TODO: endpoint parsing, pinging and binding | ||||||
|  |         # on no existing server. | ||||||
|  |         # 'maddrs': maddr, | ||||||
|     }) |     }) | ||||||
| 
 | 
 | ||||||
|     # allow enabling same loglevel in ``tractor`` machinery |     # allow enabling same loglevel in ``tractor`` machinery | ||||||
|  | @ -230,8 +351,7 @@ def services(config, tl, ports): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def _load_clis() -> None: | def _load_clis() -> None: | ||||||
|     from ..service import marketstore  # noqa |     # from ..service import elastic  # noqa | ||||||
|     from ..service import elastic  # noqa |  | ||||||
|     from ..brokers import cli  # noqa |     from ..brokers import cli  # noqa | ||||||
|     from ..ui import cli  # noqa |     from ..ui import cli  # noqa | ||||||
|     from ..watchlists import cli  # noqa |     from ..watchlists import cli  # noqa | ||||||
|  |  | ||||||
|  | @ -104,14 +104,15 @@ def get_app_dir( | ||||||
|     # `tractor`) with the testing dir and check for it whenever we |     # `tractor`) with the testing dir and check for it whenever we | ||||||
|     # detect `pytest` is being used (which it isn't under normal |     # detect `pytest` is being used (which it isn't under normal | ||||||
|     # operation). |     # operation). | ||||||
|     if "pytest" in sys.modules: |     # if "pytest" in sys.modules: | ||||||
|         import tractor |     #     import tractor | ||||||
|         actor = tractor.current_actor(err_on_no_runtime=False) |     #     actor = tractor.current_actor(err_on_no_runtime=False) | ||||||
|         if actor:  # runtime is up |     #     if actor:  # runtime is up | ||||||
|             rvs = tractor._state._runtime_vars |     #         rvs = tractor._state._runtime_vars | ||||||
|             testdirpath = Path(rvs['piker_vars']['piker_test_dir']) |     #         import pdbp; pdbp.set_trace() | ||||||
|             assert testdirpath.exists(), 'piker test harness might be borked!?' |     #         testdirpath = Path(rvs['piker_vars']['piker_test_dir']) | ||||||
|             app_name = str(testdirpath) |     #         assert testdirpath.exists(), 'piker test harness might be borked!?' | ||||||
|  |     #         app_name = str(testdirpath) | ||||||
| 
 | 
 | ||||||
|     if platform.system() == 'Windows': |     if platform.system() == 'Windows': | ||||||
|         key = "APPDATA" if roaming else "LOCALAPPDATA" |         key = "APPDATA" if roaming else "LOCALAPPDATA" | ||||||
|  | @ -134,14 +135,19 @@ def get_app_dir( | ||||||
| 
 | 
 | ||||||
| _click_config_dir: Path = Path(get_app_dir('piker')) | _click_config_dir: Path = Path(get_app_dir('piker')) | ||||||
| _config_dir: Path = _click_config_dir | _config_dir: Path = _click_config_dir | ||||||
| _parent_user: str = os.environ.get('SUDO_USER') |  | ||||||
| 
 | 
 | ||||||
| if _parent_user: | # NOTE: when using `sudo` we attempt to determine the non-root user | ||||||
|  | # and still use their normal config dir. | ||||||
|  | if ( | ||||||
|  |     (_parent_user := os.environ.get('SUDO_USER')) | ||||||
|  |     and | ||||||
|  |     _parent_user != 'root' | ||||||
|  | ): | ||||||
|     non_root_user_dir = Path( |     non_root_user_dir = Path( | ||||||
|         os.path.expanduser(f'~{_parent_user}') |         os.path.expanduser(f'~{_parent_user}') | ||||||
|     ) |     ) | ||||||
|     root: str = 'root' |     root: str = 'root' | ||||||
|     _ccds: str = str(_click_config_dir)  # click config dir string |     _ccds: str = str(_click_config_dir)  # click config dir as string | ||||||
|     i_tail: int = int(_ccds.rfind(root) + len(root)) |     i_tail: int = int(_ccds.rfind(root) + len(root)) | ||||||
|     _config_dir = ( |     _config_dir = ( | ||||||
|         non_root_user_dir |         non_root_user_dir | ||||||
|  | @ -246,7 +252,8 @@ def repodir() -> Path: | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def load( | def load( | ||||||
|     conf_name: str = 'brokers',  # appended with .toml suffix |     # NOTE: always appended with .toml suffix | ||||||
|  |     conf_name: str = 'conf', | ||||||
|     path: Path | None = None, |     path: Path | None = None, | ||||||
| 
 | 
 | ||||||
|     decode: Callable[ |     decode: Callable[ | ||||||
|  | @ -357,7 +364,9 @@ def load_accounts( | ||||||
| 
 | 
 | ||||||
| ) -> bidict[str, str | None]: | ) -> bidict[str, str | None]: | ||||||
| 
 | 
 | ||||||
|     conf, path = load() |     conf, path = load( | ||||||
|  |         conf_name='brokers', | ||||||
|  |     ) | ||||||
|     accounts = bidict() |     accounts = bidict() | ||||||
|     for provider_name, section in conf.items(): |     for provider_name, section in conf.items(): | ||||||
|         accounts_section = section.get('accounts') |         accounts_section = section.get('accounts') | ||||||
|  |  | ||||||
|  | @ -43,8 +43,10 @@ from ._symcache import ( | ||||||
|     SymbologyCache, |     SymbologyCache, | ||||||
|     open_symcache, |     open_symcache, | ||||||
|     get_symcache, |     get_symcache, | ||||||
|  |     match_from_pairs, | ||||||
| ) | ) | ||||||
| from ._sampling import open_sample_stream | from ._sampling import open_sample_stream | ||||||
|  | from ..types import Struct | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| __all__: list[str] = [ | __all__: list[str] = [ | ||||||
|  | @ -54,6 +56,7 @@ __all__: list[str] = [ | ||||||
|     'ShmArray', |     'ShmArray', | ||||||
|     'iterticks', |     'iterticks', | ||||||
|     'maybe_open_shm_array', |     'maybe_open_shm_array', | ||||||
|  |     'match_from_pairs', | ||||||
|     'attach_shm_array', |     'attach_shm_array', | ||||||
|     'open_shm_array', |     'open_shm_array', | ||||||
|     'get_shm_token', |     'get_shm_token', | ||||||
|  | @ -62,6 +65,7 @@ __all__: list[str] = [ | ||||||
|     'open_symcache', |     'open_symcache', | ||||||
|     'open_sample_stream', |     'open_sample_stream', | ||||||
|     'get_symcache', |     'get_symcache', | ||||||
|  |     'Struct', | ||||||
|     'SymbologyCache', |     'SymbologyCache', | ||||||
|     'types', |     'types', | ||||||
| ] | ] | ||||||
|  |  | ||||||
|  | @ -41,6 +41,11 @@ if TYPE_CHECKING: | ||||||
|     ) |     ) | ||||||
|     from piker.toolz import Profiler |     from piker.toolz import Profiler | ||||||
| 
 | 
 | ||||||
|  | # default gap between bars: "bar gap multiplier" | ||||||
|  | # - 0.5 is no overlap between OC arms, | ||||||
|  | # - 1.0 is full overlap on each neighbor sample | ||||||
|  | BGM: float = 0.16 | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| class IncrementalFormatter(msgspec.Struct): | class IncrementalFormatter(msgspec.Struct): | ||||||
|     ''' |     ''' | ||||||
|  | @ -513,6 +518,7 @@ class IncrementalFormatter(msgspec.Struct): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class OHLCBarsFmtr(IncrementalFormatter): | class OHLCBarsFmtr(IncrementalFormatter): | ||||||
|  | 
 | ||||||
|     x_offset: np.ndarray = np.array([ |     x_offset: np.ndarray = np.array([ | ||||||
|         -0.5, |         -0.5, | ||||||
|         0, |         0, | ||||||
|  | @ -604,8 +610,9 @@ class OHLCBarsFmtr(IncrementalFormatter): | ||||||
|         vr: tuple[int, int], |         vr: tuple[int, int], | ||||||
| 
 | 
 | ||||||
|         start: int = 0,  # XXX: do we need this? |         start: int = 0,  # XXX: do we need this? | ||||||
|  | 
 | ||||||
|         # 0.5 is no overlap between arms, 1.0 is full overlap |         # 0.5 is no overlap between arms, 1.0 is full overlap | ||||||
|         w: float = 0.16, |         gap: float = BGM, | ||||||
| 
 | 
 | ||||||
|     ) -> tuple[ |     ) -> tuple[ | ||||||
|         np.ndarray, |         np.ndarray, | ||||||
|  | @ -622,7 +629,7 @@ class OHLCBarsFmtr(IncrementalFormatter): | ||||||
|             array[:-1], |             array[:-1], | ||||||
|             start, |             start, | ||||||
|             bar_w=self.index_step_size, |             bar_w=self.index_step_size, | ||||||
|             bar_gap=w * self.index_step_size, |             bar_gap=gap * self.index_step_size, | ||||||
| 
 | 
 | ||||||
|             # XXX: don't ask, due to a ``numba`` bug.. |             # XXX: don't ask, due to a ``numba`` bug.. | ||||||
|             use_time_index=(self.index_field == 'time'), |             use_time_index=(self.index_field == 'time'), | ||||||
|  |  | ||||||
|  | @ -33,6 +33,11 @@ from typing import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
|  | from tractor import ( | ||||||
|  |     Context, | ||||||
|  |     MsgStream, | ||||||
|  |     Channel, | ||||||
|  | ) | ||||||
| from tractor.trionics import ( | from tractor.trionics import ( | ||||||
|     maybe_open_nursery, |     maybe_open_nursery, | ||||||
| ) | ) | ||||||
|  | @ -53,7 +58,10 @@ if TYPE_CHECKING: | ||||||
|     from ._sharedmem import ( |     from ._sharedmem import ( | ||||||
|         ShmArray, |         ShmArray, | ||||||
|     ) |     ) | ||||||
|     from .feed import _FeedsBus |     from .feed import ( | ||||||
|  |         _FeedsBus, | ||||||
|  |         Sub, | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # highest frequency sample step is 1 second by default, though in | # highest frequency sample step is 1 second by default, though in | ||||||
|  | @ -94,7 +102,7 @@ class Sampler: | ||||||
|         float, |         float, | ||||||
|         list[ |         list[ | ||||||
|             float, |             float, | ||||||
|             set[tractor.MsgStream] |             set[MsgStream] | ||||||
|         ], |         ], | ||||||
|     ] = defaultdict( |     ] = defaultdict( | ||||||
|         lambda: [ |         lambda: [ | ||||||
|  | @ -258,8 +266,8 @@ class Sampler: | ||||||
|             f'broadcasting {period_s} -> {last_ts}\n' |             f'broadcasting {period_s} -> {last_ts}\n' | ||||||
|             # f'consumers: {subs}' |             # f'consumers: {subs}' | ||||||
|         ) |         ) | ||||||
|         borked: set[tractor.MsgStream] = set() |         borked: set[MsgStream] = set() | ||||||
|         sent: set[tractor.MsgStream] = set() |         sent: set[MsgStream] = set() | ||||||
|         while True: |         while True: | ||||||
|             try: |             try: | ||||||
|                 for stream in (subs - sent): |                 for stream in (subs - sent): | ||||||
|  | @ -314,7 +322,7 @@ class Sampler: | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def register_with_sampler( | async def register_with_sampler( | ||||||
|     ctx: tractor.Context, |     ctx: Context, | ||||||
|     period_s: float, |     period_s: float, | ||||||
|     shms_by_period: dict[float, dict] | None = None, |     shms_by_period: dict[float, dict] | None = None, | ||||||
| 
 | 
 | ||||||
|  | @ -649,12 +657,7 @@ async def sample_and_broadcast( | ||||||
|             # eventually block this producer end of the feed and |             # eventually block this producer end of the feed and | ||||||
|             # thus other consumers still attached. |             # thus other consumers still attached. | ||||||
|             sub_key: str = broker_symbol.lower() |             sub_key: str = broker_symbol.lower() | ||||||
|             subs: list[ |             subs: set[Sub] = bus.get_subs(sub_key) | ||||||
|                 tuple[ |  | ||||||
|                     tractor.MsgStream | trio.MemorySendChannel, |  | ||||||
|                     float | None,  # tick throttle in Hz |  | ||||||
|                 ] |  | ||||||
|             ] = bus.get_subs(sub_key) |  | ||||||
| 
 | 
 | ||||||
|             # NOTE: by default the broker backend doesn't append |             # NOTE: by default the broker backend doesn't append | ||||||
|             # it's own "name" into the fqme schema (but maybe it |             # it's own "name" into the fqme schema (but maybe it | ||||||
|  | @ -663,34 +666,40 @@ async def sample_and_broadcast( | ||||||
|             fqme: str = f'{broker_symbol}.{brokername}' |             fqme: str = f'{broker_symbol}.{brokername}' | ||||||
|             lags: int = 0 |             lags: int = 0 | ||||||
| 
 | 
 | ||||||
|             # TODO: speed up this loop in an AOT compiled lang (like |             # XXX TODO XXX: speed up this loop in an AOT compiled | ||||||
|             # rust or nim or zig) and/or instead of doing a fan out to |             # lang (like rust or nim or zig)! | ||||||
|             # TCP sockets here, we add a shm-style tick queue which |             # AND/OR instead of doing a fan out to TCP sockets | ||||||
|             # readers can pull from instead of placing the burden of |             # here, we add a shm-style tick queue which readers can | ||||||
|             # broadcast on solely on this `brokerd` actor. see issues: |             # pull from instead of placing the burden of broadcast | ||||||
|  |             # on solely on this `brokerd` actor. see issues: | ||||||
|             # - https://github.com/pikers/piker/issues/98 |             # - https://github.com/pikers/piker/issues/98 | ||||||
|             # - https://github.com/pikers/piker/issues/107 |             # - https://github.com/pikers/piker/issues/107 | ||||||
| 
 | 
 | ||||||
|             for (stream, tick_throttle) in subs.copy(): |             # for (stream, tick_throttle) in subs.copy(): | ||||||
|  |             for sub in subs.copy(): | ||||||
|  |                 ipc: MsgStream = sub.ipc | ||||||
|  |                 throttle: float = sub.throttle_rate | ||||||
|                 try: |                 try: | ||||||
|                     with trio.move_on_after(0.2) as cs: |                     with trio.move_on_after(0.2) as cs: | ||||||
|                         if tick_throttle: |                         if throttle: | ||||||
|  |                             send_chan: trio.abc.SendChannel = sub.send_chan | ||||||
|  | 
 | ||||||
|                             # this is a send mem chan that likely |                             # this is a send mem chan that likely | ||||||
|                             # pushes to the ``uniform_rate_send()`` below. |                             # pushes to the ``uniform_rate_send()`` below. | ||||||
|                             try: |                             try: | ||||||
|                                 stream.send_nowait( |                                 send_chan.send_nowait( | ||||||
|                                     (fqme, quote) |                                     (fqme, quote) | ||||||
|                                 ) |                                 ) | ||||||
|                             except trio.WouldBlock: |                             except trio.WouldBlock: | ||||||
|                                 overruns[sub_key] += 1 |                                 overruns[sub_key] += 1 | ||||||
|                                 ctx = stream._ctx |                                 ctx: Context = ipc._ctx | ||||||
|                                 chan = ctx.chan |                                 chan: Channel = ctx.chan | ||||||
| 
 | 
 | ||||||
|                                 log.warning( |                                 log.warning( | ||||||
|                                     f'Feed OVERRUN {sub_key}' |                                     f'Feed OVERRUN {sub_key}' | ||||||
|                                     '@{bus.brokername} -> \n' |                                     '@{bus.brokername} -> \n' | ||||||
|                                     f'feed @ {chan.uid}\n' |                                     f'feed @ {chan.uid}\n' | ||||||
|                                     f'throttle = {tick_throttle} Hz' |                                     f'throttle = {throttle} Hz' | ||||||
|                                 ) |                                 ) | ||||||
| 
 | 
 | ||||||
|                                 if overruns[sub_key] > 6: |                                 if overruns[sub_key] > 6: | ||||||
|  | @ -707,10 +716,10 @@ async def sample_and_broadcast( | ||||||
|                                             f'{sub_key}:' |                                             f'{sub_key}:' | ||||||
|                                             f'{ctx.cid}@{chan.uid}' |                                             f'{ctx.cid}@{chan.uid}' | ||||||
|                                         ) |                                         ) | ||||||
|                                         await stream.aclose() |                                         await ipc.aclose() | ||||||
|                                         raise trio.BrokenResourceError |                                         raise trio.BrokenResourceError | ||||||
|                         else: |                         else: | ||||||
|                             await stream.send( |                             await ipc.send( | ||||||
|                                 {fqme: quote} |                                 {fqme: quote} | ||||||
|                             ) |                             ) | ||||||
| 
 | 
 | ||||||
|  | @ -724,16 +733,16 @@ async def sample_and_broadcast( | ||||||
|                     trio.ClosedResourceError, |                     trio.ClosedResourceError, | ||||||
|                     trio.EndOfChannel, |                     trio.EndOfChannel, | ||||||
|                 ): |                 ): | ||||||
|                     ctx = stream._ctx |                     ctx: Context = ipc._ctx | ||||||
|                     chan = ctx.chan |                     chan: Channel = ctx.chan | ||||||
|                     if ctx: |                     if ctx: | ||||||
|                         log.warning( |                         log.warning( | ||||||
|                             'Dropped `brokerd`-quotes-feed connection:\n' |                             'Dropped `brokerd`-quotes-feed connection:\n' | ||||||
|                             f'{broker_symbol}:' |                             f'{broker_symbol}:' | ||||||
|                             f'{ctx.cid}@{chan.uid}' |                             f'{ctx.cid}@{chan.uid}' | ||||||
|                         ) |                         ) | ||||||
|                     if tick_throttle: |                     if sub.throttle_rate: | ||||||
|                         assert stream._closed |                         assert ipc._closed | ||||||
| 
 | 
 | ||||||
|                     # XXX: do we need to deregister here |                     # XXX: do we need to deregister here | ||||||
|                     # if it's done in the fee bus code? |                     # if it's done in the fee bus code? | ||||||
|  | @ -742,7 +751,7 @@ async def sample_and_broadcast( | ||||||
|                     # since there seems to be some kinda race.. |                     # since there seems to be some kinda race.. | ||||||
|                     bus.remove_subs( |                     bus.remove_subs( | ||||||
|                         sub_key, |                         sub_key, | ||||||
|                         {(stream, tick_throttle)}, |                         {sub}, | ||||||
|                     ) |                     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -750,7 +759,7 @@ async def uniform_rate_send( | ||||||
| 
 | 
 | ||||||
|     rate: float, |     rate: float, | ||||||
|     quote_stream: trio.abc.ReceiveChannel, |     quote_stream: trio.abc.ReceiveChannel, | ||||||
|     stream: tractor.MsgStream, |     stream: MsgStream, | ||||||
| 
 | 
 | ||||||
|     task_status: TaskStatus = trio.TASK_STATUS_IGNORED, |     task_status: TaskStatus = trio.TASK_STATUS_IGNORED, | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -31,11 +31,13 @@ from pathlib import Path | ||||||
| from pprint import pformat | from pprint import pformat | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|  |     Sequence, | ||||||
|  |     Hashable, | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| from types import ModuleType | from types import ModuleType | ||||||
| 
 | 
 | ||||||
| from fuzzywuzzy import process as fuzzy | from rapidfuzz import process as fuzzy | ||||||
| import tomli_w  # for fast symbol cache writing | import tomli_w  # for fast symbol cache writing | ||||||
| import tractor | import tractor | ||||||
| import trio | import trio | ||||||
|  | @ -128,8 +130,8 @@ class SymbologyCache(Struct): | ||||||
|           - `.get_mkt_pairs()`: returning a table of pair-`Struct` |           - `.get_mkt_pairs()`: returning a table of pair-`Struct` | ||||||
|             types, custom defined by the particular backend. |             types, custom defined by the particular backend. | ||||||
| 
 | 
 | ||||||
|         AND, the required `.get_mkt_info()` module-level endpoint which |         AND, the required `.get_mkt_info()` module-level endpoint | ||||||
|         maps `fqme: str` -> `MktPair`s. |         which maps `fqme: str` -> `MktPair`s. | ||||||
| 
 | 
 | ||||||
|         These tables are then used to fill out the `.assets`, `.pairs` and |         These tables are then used to fill out the `.assets`, `.pairs` and | ||||||
|         `.mktmaps` tables on this cache instance, respectively. |         `.mktmaps` tables on this cache instance, respectively. | ||||||
|  | @ -308,7 +310,7 @@ class SymbologyCache(Struct): | ||||||
|         matches in a `dict` including the `MktPair` values. |         matches in a `dict` including the `MktPair` values. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         matches = fuzzy.extractBests( |         matches = fuzzy.extract( | ||||||
|             pattern, |             pattern, | ||||||
|             getattr(self, table), |             getattr(self, table), | ||||||
|             score_cutoff=50, |             score_cutoff=50, | ||||||
|  | @ -466,3 +468,43 @@ def get_symcache( | ||||||
|         pdbp.xpm() |         pdbp.xpm() | ||||||
| 
 | 
 | ||||||
|     return symcache |     return symcache | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def match_from_pairs( | ||||||
|  |     pairs: dict[str, Struct], | ||||||
|  |     query: str, | ||||||
|  |     score_cutoff: int = 50, | ||||||
|  |     **extract_kwargs, | ||||||
|  | 
 | ||||||
|  | ) -> dict[str, Struct]: | ||||||
|  |     ''' | ||||||
|  |     Fuzzy search over a "pairs table" maintained by most backends | ||||||
|  |     as part of their symbology-info caching internals. | ||||||
|  | 
 | ||||||
|  |     Scan the native symbol key set and return best ranked | ||||||
|  |     matches back in a new `dict`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  | 
 | ||||||
|  |     # TODO: somehow cache this list (per call) like we were in | ||||||
|  |     # `open_symbol_search()`? | ||||||
|  |     keys: list[str] = list(pairs) | ||||||
|  |     matches: list[tuple[ | ||||||
|  |         Sequence[Hashable],  # matching input key | ||||||
|  |         Any,  # scores | ||||||
|  |         Any, | ||||||
|  |     ]] = fuzzy.extract( | ||||||
|  |         # NOTE: most backends provide keys uppercased | ||||||
|  |         query=query, | ||||||
|  |         choices=keys, | ||||||
|  |         score_cutoff=score_cutoff, | ||||||
|  |         **extract_kwargs, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # pop and repack pairs in output dict | ||||||
|  |     matched_pairs: dict[str, Struct] = {} | ||||||
|  |     for item in matches: | ||||||
|  |         pair_key: str = item[0] | ||||||
|  |         matched_pairs[pair_key] = pairs[pair_key] | ||||||
|  | 
 | ||||||
|  |     return matched_pairs | ||||||
|  |  | ||||||
|  | @ -1,336 +0,0 @@ | ||||||
| # piker: trading gear for hackers |  | ||||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers) |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| Financial time series processing utilities usually |  | ||||||
| pertaining to OHLCV style sampled data. |  | ||||||
| 
 |  | ||||||
| Routines are generally implemented in either ``numpy`` or |  | ||||||
| ``polars`` B) |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| from typing import Literal |  | ||||||
| from math import ( |  | ||||||
|     ceil, |  | ||||||
|     floor, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| import numpy as np |  | ||||||
| import polars as pl |  | ||||||
| 
 |  | ||||||
| from ._sharedmem import ShmArray |  | ||||||
| from ..toolz.profile import ( |  | ||||||
|     Profiler, |  | ||||||
|     pg_profile_enabled, |  | ||||||
|     ms_slower_then, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def slice_from_time( |  | ||||||
|     arr: np.ndarray, |  | ||||||
|     start_t: float, |  | ||||||
|     stop_t: float, |  | ||||||
|     step: float,  # sampler period step-diff |  | ||||||
| 
 |  | ||||||
| ) -> slice: |  | ||||||
|     ''' |  | ||||||
|     Calculate array indices mapped from a time range and return them in |  | ||||||
|     a slice. |  | ||||||
| 
 |  | ||||||
|     Given an input array with an epoch `'time'` series entry, calculate |  | ||||||
|     the indices which span the time range and return in a slice. Presume |  | ||||||
|     each `'time'` step increment is uniform and when the time stamp |  | ||||||
|     series contains gaps (the uniform presumption is untrue) use |  | ||||||
|     ``np.searchsorted()`` binary search to look up the appropriate |  | ||||||
|     index. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     profiler = Profiler( |  | ||||||
|         msg='slice_from_time()', |  | ||||||
|         disabled=not pg_profile_enabled(), |  | ||||||
|         ms_threshold=ms_slower_then, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     times = arr['time'] |  | ||||||
|     t_first = floor(times[0]) |  | ||||||
|     t_last = ceil(times[-1]) |  | ||||||
| 
 |  | ||||||
|     # the greatest index we can return which slices to the |  | ||||||
|     # end of the input array. |  | ||||||
|     read_i_max = arr.shape[0] |  | ||||||
| 
 |  | ||||||
|     # compute (presumed) uniform-time-step index offsets |  | ||||||
|     i_start_t = floor(start_t) |  | ||||||
|     read_i_start = floor(((i_start_t - t_first) // step)) - 1 |  | ||||||
| 
 |  | ||||||
|     i_stop_t = ceil(stop_t) |  | ||||||
| 
 |  | ||||||
|     # XXX: edge case -> always set stop index to last in array whenever |  | ||||||
|     # the input stop time is detected to be greater then the equiv time |  | ||||||
|     # stamp at that last entry. |  | ||||||
|     if i_stop_t >= t_last: |  | ||||||
|         read_i_stop = read_i_max |  | ||||||
|     else: |  | ||||||
|         read_i_stop = ceil((i_stop_t - t_first) // step) + 1 |  | ||||||
| 
 |  | ||||||
|     # always clip outputs to array support |  | ||||||
|     # for read start: |  | ||||||
|     # - never allow a start < the 0 index |  | ||||||
|     # - never allow an end index > the read array len |  | ||||||
|     read_i_start = min( |  | ||||||
|         max(0, read_i_start), |  | ||||||
|         read_i_max - 1, |  | ||||||
|     ) |  | ||||||
|     read_i_stop = max( |  | ||||||
|         0, |  | ||||||
|         min(read_i_stop, read_i_max), |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # check for larger-then-latest calculated index for given start |  | ||||||
|     # time, in which case we do a binary search for the correct index. |  | ||||||
|     # NOTE: this is usually the result of a time series with time gaps |  | ||||||
|     # where it is expected that each index step maps to a uniform step |  | ||||||
|     # in the time stamp series. |  | ||||||
|     t_iv_start = times[read_i_start] |  | ||||||
|     if ( |  | ||||||
|         t_iv_start > i_start_t |  | ||||||
|     ): |  | ||||||
|         # do a binary search for the best index mapping to ``start_t`` |  | ||||||
|         # given we measured an overshoot using the uniform-time-step |  | ||||||
|         # calculation from above. |  | ||||||
| 
 |  | ||||||
|         # TODO: once we start caching these per source-array, |  | ||||||
|         # we can just overwrite ``read_i_start`` directly. |  | ||||||
|         new_read_i_start = np.searchsorted( |  | ||||||
|             times, |  | ||||||
|             i_start_t, |  | ||||||
|             side='left', |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # TODO: minimize binary search work as much as possible: |  | ||||||
|         # - cache these remap values which compensate for gaps in the |  | ||||||
|         #   uniform time step basis where we calc a later start |  | ||||||
|         #   index for the given input ``start_t``. |  | ||||||
|         # - can we shorten the input search sequence by heuristic? |  | ||||||
|         #   up_to_arith_start = index[:read_i_start] |  | ||||||
| 
 |  | ||||||
|         if ( |  | ||||||
|             new_read_i_start <= read_i_start |  | ||||||
|         ): |  | ||||||
|             # t_diff = t_iv_start - start_t |  | ||||||
|             # print( |  | ||||||
|             #     f"WE'RE CUTTING OUT TIME - STEP:{step}\n" |  | ||||||
|             #     f'start_t:{start_t} -> 0index start_t:{t_iv_start}\n' |  | ||||||
|             #     f'diff: {t_diff}\n' |  | ||||||
|             #     f'REMAPPED START i: {read_i_start} -> {new_read_i_start}\n' |  | ||||||
|             # ) |  | ||||||
|             read_i_start = new_read_i_start |  | ||||||
| 
 |  | ||||||
|     t_iv_stop = times[read_i_stop - 1] |  | ||||||
|     if ( |  | ||||||
|         t_iv_stop > i_stop_t |  | ||||||
|     ): |  | ||||||
|         # t_diff = stop_t - t_iv_stop |  | ||||||
|         # print( |  | ||||||
|         #     f"WE'RE CUTTING OUT TIME - STEP:{step}\n" |  | ||||||
|         #     f'calced iv stop:{t_iv_stop} -> stop_t:{stop_t}\n' |  | ||||||
|         #     f'diff: {t_diff}\n' |  | ||||||
|         #     # f'SHOULD REMAP STOP: {read_i_start} -> {new_read_i_start}\n' |  | ||||||
|         # ) |  | ||||||
|         new_read_i_stop = np.searchsorted( |  | ||||||
|             times[read_i_start:], |  | ||||||
|             # times, |  | ||||||
|             i_stop_t, |  | ||||||
|             side='right', |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         if ( |  | ||||||
|             new_read_i_stop <= read_i_stop |  | ||||||
|         ): |  | ||||||
|             read_i_stop = read_i_start + new_read_i_stop + 1 |  | ||||||
| 
 |  | ||||||
|     # sanity checks for range size |  | ||||||
|     # samples = (i_stop_t - i_start_t) // step |  | ||||||
|     # index_diff = read_i_stop - read_i_start + 1 |  | ||||||
|     # if index_diff > (samples + 3): |  | ||||||
|     #     breakpoint() |  | ||||||
| 
 |  | ||||||
|     # read-relative indexes: gives a slice where `shm.array[read_slc]` |  | ||||||
|     # will be the data spanning the input time range `start_t` -> |  | ||||||
|     # `stop_t` |  | ||||||
|     read_slc = slice( |  | ||||||
|         int(read_i_start), |  | ||||||
|         int(read_i_stop), |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     profiler( |  | ||||||
|         'slicing complete' |  | ||||||
|         # f'{start_t} -> {abs_slc.start} | {read_slc.start}\n' |  | ||||||
|         # f'{stop_t} -> {abs_slc.stop} | {read_slc.stop}\n' |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # NOTE: if caller needs absolute buffer indices they can |  | ||||||
|     # slice the buffer abs index like so: |  | ||||||
|     # index = arr['index'] |  | ||||||
|     # abs_indx = index[read_slc] |  | ||||||
|     # abs_slc = slice( |  | ||||||
|     #     int(abs_indx[0]), |  | ||||||
|     #     int(abs_indx[-1]), |  | ||||||
|     # ) |  | ||||||
| 
 |  | ||||||
|     return read_slc |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def detect_null_time_gap( |  | ||||||
|     shm: ShmArray, |  | ||||||
|     imargin: int = 1, |  | ||||||
| 
 |  | ||||||
| ) -> tuple[float, float] | None: |  | ||||||
|     ''' |  | ||||||
|     Detect if there are any zero-epoch stamped rows in |  | ||||||
|     the presumed 'time' field-column. |  | ||||||
| 
 |  | ||||||
|     Filter to the gap and return a surrounding index range. |  | ||||||
| 
 |  | ||||||
|     NOTE: for now presumes only ONE gap XD |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # ensure we read buffer state only once so that ShmArray rt |  | ||||||
|     # circular-buffer updates don't cause a indexing/size mismatch. |  | ||||||
|     array: np.ndarray = shm.array |  | ||||||
| 
 |  | ||||||
|     zero_pred: np.ndarray = array['time'] == 0 |  | ||||||
|     zero_t: np.ndarray = array[zero_pred] |  | ||||||
| 
 |  | ||||||
|     if zero_t.size: |  | ||||||
|         istart, iend = zero_t['index'][[0, -1]] |  | ||||||
|         start, end = shm._array['time'][ |  | ||||||
|             [istart - imargin, iend + imargin] |  | ||||||
|         ] |  | ||||||
|         return ( |  | ||||||
|             istart - imargin, |  | ||||||
|             start, |  | ||||||
|             end, |  | ||||||
|             iend + imargin, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     return None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| t_unit: Literal = Literal[ |  | ||||||
|     'days', |  | ||||||
|     'hours', |  | ||||||
|     'minutes', |  | ||||||
|     'seconds', |  | ||||||
|     'miliseconds', |  | ||||||
|     'microseconds', |  | ||||||
|     'nanoseconds', |  | ||||||
| ] |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def with_dts( |  | ||||||
|     df: pl.DataFrame, |  | ||||||
|     time_col: str = 'time', |  | ||||||
| ) -> pl.DataFrame: |  | ||||||
|     ''' |  | ||||||
|     Insert datetime (casted) columns to a (presumably) OHLC sampled |  | ||||||
|     time series with an epoch-time column keyed by ``time_col``. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     return df.with_columns([ |  | ||||||
|         pl.col(time_col).shift(1).suffix('_prev'), |  | ||||||
|         pl.col(time_col).diff().alias('s_diff'), |  | ||||||
|         pl.from_epoch(pl.col(time_col)).alias('dt'), |  | ||||||
|     ]).with_columns([ |  | ||||||
|         pl.from_epoch(pl.col(f'{time_col}_prev')).alias('dt_prev'), |  | ||||||
|         pl.col('dt').diff().alias('dt_diff'), |  | ||||||
|     ]) #.with_columns( |  | ||||||
|         # pl.col('dt').diff().dt.days().alias('days_dt_diff'), |  | ||||||
|     # ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def detect_time_gaps( |  | ||||||
|     df: pl.DataFrame, |  | ||||||
| 
 |  | ||||||
|     time_col: str = 'time', |  | ||||||
|     # epoch sampling step diff |  | ||||||
|     expect_period: float = 60, |  | ||||||
| 
 |  | ||||||
|     # datetime diff unit and gap value |  | ||||||
|     # crypto mkts |  | ||||||
|     # gap_dt_unit: t_unit = 'minutes', |  | ||||||
|     # gap_thresh: int = 1, |  | ||||||
| 
 |  | ||||||
|     # NOTE: legacy stock mkts have venue operating hours |  | ||||||
|     # and thus gaps normally no more then 1-2 days at |  | ||||||
|     # a time. |  | ||||||
|     # XXX -> must be valid ``polars.Expr.dt.<name>`` |  | ||||||
|     # TODO: allow passing in a frame of operating hours |  | ||||||
|     # durations/ranges for faster legit gap checks. |  | ||||||
|     gap_dt_unit: t_unit = 'days', |  | ||||||
|     gap_thresh: int = 1, |  | ||||||
| 
 |  | ||||||
| ) -> pl.DataFrame: |  | ||||||
|     ''' |  | ||||||
|     Filter to OHLC datums which contain sample step gaps. |  | ||||||
| 
 |  | ||||||
|     For eg. legacy markets which have venue close gaps and/or |  | ||||||
|     actual missing data segments. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     return ( |  | ||||||
|         with_dts(df) |  | ||||||
|         .filter( |  | ||||||
|             pl.col('s_diff').abs() > expect_period |  | ||||||
|         ) |  | ||||||
|         .filter( |  | ||||||
|             getattr( |  | ||||||
|                 pl.col('dt_diff').dt, |  | ||||||
|                 gap_dt_unit, |  | ||||||
|             )().abs() > gap_thresh |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def detect_price_gaps( |  | ||||||
|     df: pl.DataFrame, |  | ||||||
|     gt_multiplier: float = 2., |  | ||||||
|     price_fields: list[str] = ['high', 'low'], |  | ||||||
| 
 |  | ||||||
| ) -> pl.DataFrame: |  | ||||||
|     ''' |  | ||||||
|     Detect gaps in clearing price over an OHLC series. |  | ||||||
| 
 |  | ||||||
|     2 types of gaps generally exist; up gaps and down gaps: |  | ||||||
| 
 |  | ||||||
|     - UP gap: when any next sample's lo price is strictly greater |  | ||||||
|       then the current sample's hi price. |  | ||||||
| 
 |  | ||||||
|     - DOWN gap: when any next sample's hi price is strictly |  | ||||||
|       less then the current samples lo price. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # return df.filter( |  | ||||||
|     #     pl.col('high') - ) > expect_period, |  | ||||||
|     # ).select([ |  | ||||||
|     #     pl.dt.datetime(pl.col(time_col).shift(1)).suffix('_previous'), |  | ||||||
|     #     pl.all(), |  | ||||||
|     # ]).select([ |  | ||||||
|     #     pl.all(), |  | ||||||
|     #     (pl.col(time_col) - pl.col(f'{time_col}_previous')).alias('diff'), |  | ||||||
|     # ]) |  | ||||||
|     ... |  | ||||||
|  | @ -234,10 +234,13 @@ async def _reconnect_forever( | ||||||
|             f'{url} trying (RE)CONNECT' |             f'{url} trying (RE)CONNECT' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         async with trio.open_nursery() as n: |  | ||||||
|             cs = nobsws._cs = n.cancel_scope |  | ||||||
|         ws: WebSocketConnection |         ws: WebSocketConnection | ||||||
|             async with open_websocket_url(url) as ws: |         try: | ||||||
|  |             async with ( | ||||||
|  |                 trio.open_nursery() as n, | ||||||
|  |                 open_websocket_url(url) as ws, | ||||||
|  |             ): | ||||||
|  |                 cs = nobsws._cs = n.cancel_scope | ||||||
|                 nobsws._ws = ws |                 nobsws._ws = ws | ||||||
|                 log.info( |                 log.info( | ||||||
|                     f'{src_mod}\n' |                     f'{src_mod}\n' | ||||||
|  | @ -269,9 +272,11 @@ async def _reconnect_forever( | ||||||
|                 # to let tasks run **inside** the ws open block above. |                 # to let tasks run **inside** the ws open block above. | ||||||
|                 nobsws._connected.set() |                 nobsws._connected.set() | ||||||
|                 await trio.sleep_forever() |                 await trio.sleep_forever() | ||||||
|  |         except HandshakeError: | ||||||
|  |             log.exception('Retrying connection') | ||||||
|  | 
 | ||||||
|  |         # ws & nursery block ends | ||||||
| 
 | 
 | ||||||
|             # ws open block end |  | ||||||
|         # nursery block end |  | ||||||
|         nobsws._connected = trio.Event() |         nobsws._connected = trio.Event() | ||||||
|         if cs.cancelled_caught: |         if cs.cancelled_caught: | ||||||
|             log.cancel( |             log.cancel( | ||||||
|  | @ -284,7 +289,8 @@ async def _reconnect_forever( | ||||||
|                 and not nobsws._connected.is_set() |                 and not nobsws._connected.is_set() | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         # -> from here, move to next reconnect attempt |         # -> from here, move to next reconnect attempt iteration | ||||||
|  |         # in the while loop above Bp | ||||||
| 
 | 
 | ||||||
|     else: |     else: | ||||||
|         log.exception( |         log.exception( | ||||||
|  | @ -353,8 +359,8 @@ async def open_autorecon_ws( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| JSONRPC response-request style machinery for transparent multiplexing of msgs | JSONRPC response-request style machinery for transparent multiplexing | ||||||
| over a NoBsWs. | of msgs over a `NoBsWs`. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| 
 | 
 | ||||||
|  | @ -371,44 +377,83 @@ async def open_jsonrpc_session( | ||||||
|     url: str, |     url: str, | ||||||
|     start_id: int = 0, |     start_id: int = 0, | ||||||
|     response_type: type = JSONRPCResult, |     response_type: type = JSONRPCResult, | ||||||
|     request_type: Optional[type] = None, |     msg_recv_timeout: float = float('inf'), | ||||||
|     request_hook: Optional[Callable] = None, |     # ^NOTE, since only `deribit` is using this jsonrpc stuff atm | ||||||
|     error_hook: Optional[Callable] = None, |     # and options mkts are generally "slow moving".. | ||||||
|  |     # | ||||||
|  |     # FURTHER if we break the underlying ws connection then since we | ||||||
|  |     # don't pass a `fixture` to the task that manages `NoBsWs`, i.e. | ||||||
|  |     # `_reconnect_forever()`, the jsonrpc "transport pipe" get's | ||||||
|  |     # broken and never restored with wtv init sequence is required to | ||||||
|  |     # re-establish a working req-resp session. | ||||||
|  | 
 | ||||||
| ) -> Callable[[str, dict], dict]: | ) -> Callable[[str, dict], dict]: | ||||||
|  |     ''' | ||||||
|  |     Init a json-RPC-over-websocket connection to the provided `url`. | ||||||
|  | 
 | ||||||
|  |     A `json_rpc: Callable[[str, dict], dict` is delivered to the | ||||||
|  |     caller for sending requests and a bg-`trio.Task` handles | ||||||
|  |     processing of response msgs including error reporting/raising in | ||||||
|  |     the parent/caller task. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # NOTE, store all request msgs so we can raise errors on the | ||||||
|  |     # caller side! | ||||||
|  |     req_msgs: dict[int, dict] = {} | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         trio.open_nursery() as n, |         trio.open_nursery() as tn, | ||||||
|         open_autorecon_ws(url) as ws |         open_autorecon_ws( | ||||||
|  |             url=url, | ||||||
|  |             msg_recv_timeout=msg_recv_timeout, | ||||||
|  |         ) as ws | ||||||
|     ): |     ): | ||||||
|         rpc_id: Iterable = count(start_id) |         rpc_id: Iterable[int] = count(start_id) | ||||||
|         rpc_results: dict[int, dict] = {} |         rpc_results: dict[int, dict] = {} | ||||||
| 
 | 
 | ||||||
|         async def json_rpc(method: str, params: dict) -> dict: |         async def json_rpc( | ||||||
|  |             method: str, | ||||||
|  |             params: dict, | ||||||
|  |         ) -> dict: | ||||||
|             ''' |             ''' | ||||||
|             perform a json rpc call and wait for the result, raise exception in |             perform a json rpc call and wait for the result, raise exception in | ||||||
|             case of error field present on response |             case of error field present on response | ||||||
|             ''' |             ''' | ||||||
|  |             nonlocal req_msgs | ||||||
|  | 
 | ||||||
|  |             req_id: int = next(rpc_id) | ||||||
|             msg = { |             msg = { | ||||||
|                 'jsonrpc': '2.0', |                 'jsonrpc': '2.0', | ||||||
|                 'id': next(rpc_id), |                 'id': req_id, | ||||||
|                 'method': method, |                 'method': method, | ||||||
|                 'params': params |                 'params': params | ||||||
|             } |             } | ||||||
|             _id = msg['id'] |             _id = msg['id'] | ||||||
| 
 | 
 | ||||||
|             rpc_results[_id] = { |             result = rpc_results[_id] = { | ||||||
|                 'result': None, |                 'result': None, | ||||||
|                 'event': trio.Event() |                 'error': None, | ||||||
|  |                 'event': trio.Event(),  # signal caller resp arrived | ||||||
|             } |             } | ||||||
|  |             req_msgs[_id] = msg | ||||||
| 
 | 
 | ||||||
|             await ws.send_msg(msg) |             await ws.send_msg(msg) | ||||||
| 
 | 
 | ||||||
|  |             # wait for reponse before unblocking requester code | ||||||
|             await rpc_results[_id]['event'].wait() |             await rpc_results[_id]['event'].wait() | ||||||
| 
 | 
 | ||||||
|             ret = rpc_results[_id]['result'] |             if (maybe_result := result['result']): | ||||||
| 
 |                 ret = maybe_result | ||||||
|                 del rpc_results[_id] |                 del rpc_results[_id] | ||||||
| 
 | 
 | ||||||
|  |             else: | ||||||
|  |                 err = result['error'] | ||||||
|  |                 raise Exception( | ||||||
|  |                     f'JSONRPC request failed\n' | ||||||
|  |                     f'req: {msg}\n' | ||||||
|  |                     f'resp: {err}\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|             if ret.error is not None: |             if ret.error is not None: | ||||||
|                 raise Exception(json.dumps(ret.error, indent=4)) |                 raise Exception(json.dumps(ret.error, indent=4)) | ||||||
| 
 | 
 | ||||||
|  | @ -422,6 +467,7 @@ async def open_jsonrpc_session( | ||||||
|             the server side. |             the server side. | ||||||
| 
 | 
 | ||||||
|             ''' |             ''' | ||||||
|  |             nonlocal req_msgs | ||||||
|             async for msg in ws: |             async for msg in ws: | ||||||
|                 match msg: |                 match msg: | ||||||
|                     case { |                     case { | ||||||
|  | @ -445,19 +491,28 @@ async def open_jsonrpc_session( | ||||||
|                         'params': _, |                         'params': _, | ||||||
|                     }: |                     }: | ||||||
|                         log.debug(f'Recieved\n{msg}') |                         log.debug(f'Recieved\n{msg}') | ||||||
|                         if request_hook: |  | ||||||
|                             await request_hook(request_type(**msg)) |  | ||||||
| 
 | 
 | ||||||
|                     case { |                     case { | ||||||
|                         'error': error |                         'error': error | ||||||
|                     }: |                     }: | ||||||
|                         log.warning(f'Recieved\n{error}') |                         # retreive orig request msg, set error | ||||||
|                         if error_hook: |                         # response in original "result" msg, | ||||||
|                             await error_hook(response_type(**msg)) |                         # THEN FINALLY set the event to signal caller | ||||||
|  |                         # to raise the error in the parent task. | ||||||
|  |                         req_id: int = error['id'] | ||||||
|  |                         req_msg: dict = req_msgs[req_id] | ||||||
|  |                         result: dict = rpc_results[req_id] | ||||||
|  |                         result['error'] = error | ||||||
|  |                         result['event'].set() | ||||||
|  |                         log.error( | ||||||
|  |                             f'JSONRPC request failed\n' | ||||||
|  |                             f'req: {req_msg}\n' | ||||||
|  |                             f'resp: {error}\n' | ||||||
|  |                         ) | ||||||
| 
 | 
 | ||||||
|                     case _: |                     case _: | ||||||
|                         log.warning(f'Unhandled JSON-RPC msg!?\n{msg}') |                         log.warning(f'Unhandled JSON-RPC msg!?\n{msg}') | ||||||
| 
 | 
 | ||||||
|         n.start_soon(recv_task) |         tn.start_soon(recv_task) | ||||||
|         yield json_rpc |         yield json_rpc | ||||||
|         n.cancel_scope.cancel() |         tn.cancel_scope.cancel() | ||||||
|  |  | ||||||
|  | @ -28,6 +28,7 @@ module. | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| from collections import ( | from collections import ( | ||||||
|     defaultdict, |     defaultdict, | ||||||
|  |     abc, | ||||||
| ) | ) | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
| from functools import partial | from functools import partial | ||||||
|  | @ -36,7 +37,6 @@ from types import ModuleType | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     AsyncContextManager, |     AsyncContextManager, | ||||||
|     Optional, |  | ||||||
|     Awaitable, |     Awaitable, | ||||||
|     Sequence, |     Sequence, | ||||||
| ) | ) | ||||||
|  | @ -45,10 +45,7 @@ import trio | ||||||
| from trio.abc import ReceiveChannel | from trio.abc import ReceiveChannel | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import tractor | import tractor | ||||||
| from tractor.trionics import ( | from tractor import trionics | ||||||
|     maybe_open_context, |  | ||||||
|     gather_contexts, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| from piker.accounting import ( | from piker.accounting import ( | ||||||
|     MktPair, |     MktPair, | ||||||
|  | @ -59,7 +56,6 @@ from piker.brokers import get_brokermod | ||||||
| from piker.service import ( | from piker.service import ( | ||||||
|     maybe_spawn_brokerd, |     maybe_spawn_brokerd, | ||||||
| ) | ) | ||||||
| from piker.ui import _search |  | ||||||
| from piker.calc import humanize | from piker.calc import humanize | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     log, |     log, | ||||||
|  | @ -70,7 +66,7 @@ from .validate import ( | ||||||
|     FeedInit, |     FeedInit, | ||||||
|     validate_backend, |     validate_backend, | ||||||
| ) | ) | ||||||
| from .history import ( | from ..tsp import ( | ||||||
|     manage_history, |     manage_history, | ||||||
| ) | ) | ||||||
| from .ingest import get_ingestormod | from .ingest import get_ingestormod | ||||||
|  | @ -80,6 +76,31 @@ from ._sampling import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | class Sub(Struct, frozen=True): | ||||||
|  |     ''' | ||||||
|  |     A live feed subscription entry. | ||||||
|  | 
 | ||||||
|  |     Contains meta-data on the remote-actor type (in functionality | ||||||
|  |     terms) as well as refs to IPC streams and sampler runtime | ||||||
|  |     params. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     ipc: tractor.MsgStream | ||||||
|  |     send_chan: trio.abc.SendChannel | None = None | ||||||
|  | 
 | ||||||
|  |     # tick throttle rate in Hz; determines how live | ||||||
|  |     # quotes/ticks should be downsampled before relay | ||||||
|  |     # to the receiving remote consumer (process). | ||||||
|  |     throttle_rate:  float | None  = None | ||||||
|  |     _throttle_cs: trio.CancelScope | None = None | ||||||
|  | 
 | ||||||
|  |     # TODO: actually stash comms info for the far end to allow | ||||||
|  |     # `.tsp`, `.fsp` and `.data._sampling` sub-systems to re-render | ||||||
|  |     # the data view as needed via msging with the `._remote_ctl` | ||||||
|  |     # ipc ctx. | ||||||
|  |     rc_ui: bool = False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| class _FeedsBus(Struct): | class _FeedsBus(Struct): | ||||||
|     ''' |     ''' | ||||||
|     Data feeds broadcaster and persistence management. |     Data feeds broadcaster and persistence management. | ||||||
|  | @ -104,13 +125,7 @@ class _FeedsBus(Struct): | ||||||
| 
 | 
 | ||||||
|     _subscribers: defaultdict[ |     _subscribers: defaultdict[ | ||||||
|         str, |         str, | ||||||
|         set[ |         set[Sub] | ||||||
|             tuple[ |  | ||||||
|                 tractor.MsgStream | trio.MemorySendChannel, |  | ||||||
|                 # tractor.Context, |  | ||||||
|                 float | None,  # tick throttle in Hz |  | ||||||
|             ] |  | ||||||
|         ] |  | ||||||
|     ] = defaultdict(set) |     ] = defaultdict(set) | ||||||
| 
 | 
 | ||||||
|     async def start_task( |     async def start_task( | ||||||
|  | @ -125,6 +140,8 @@ class _FeedsBus(Struct): | ||||||
|                 trio.CancelScope] = trio.TASK_STATUS_IGNORED, |                 trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||||
|         ) -> None: |         ) -> None: | ||||||
|             with trio.CancelScope() as cs: |             with trio.CancelScope() as cs: | ||||||
|  |                 # TODO: shouldn't this be a direct await to avoid | ||||||
|  |                 # cancellation contagion to the bus nursery!?!?! | ||||||
|                 await self.nursery.start( |                 await self.nursery.start( | ||||||
|                     target, |                     target, | ||||||
|                     *args, |                     *args, | ||||||
|  | @ -142,31 +159,28 @@ class _FeedsBus(Struct): | ||||||
|     def get_subs( |     def get_subs( | ||||||
|         self, |         self, | ||||||
|         key: str, |         key: str, | ||||||
|     ) -> set[ | 
 | ||||||
|         tuple[ |     ) -> set[Sub]: | ||||||
|             tractor.MsgStream | trio.MemorySendChannel, |  | ||||||
|             float | None,  # tick throttle in Hz |  | ||||||
|         ] |  | ||||||
|     ]: |  | ||||||
|         ''' |         ''' | ||||||
|         Get the ``set`` of consumer subscription entries for the given key. |         Get the ``set`` of consumer subscription entries for the given key. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         return self._subscribers[key] |         return self._subscribers[key] | ||||||
| 
 | 
 | ||||||
|  |     def subs_items(self) -> abc.ItemsView[str, set[Sub]]: | ||||||
|  |         return self._subscribers.items() | ||||||
|  | 
 | ||||||
|     def add_subs( |     def add_subs( | ||||||
|         self, |         self, | ||||||
|         key: str, |         key: str, | ||||||
|         subs: set[tuple[ |         subs: set[Sub], | ||||||
|             tractor.MsgStream | trio.MemorySendChannel, | 
 | ||||||
|             float | None,  # tick throttle in Hz |     ) -> set[Sub]: | ||||||
|         ]], |  | ||||||
|     ) -> set[tuple]: |  | ||||||
|         ''' |         ''' | ||||||
|         Add a ``set`` of consumer subscription entries for the given key. |         Add a ``set`` of consumer subscription entries for the given key. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         _subs: set[tuple] = self._subscribers[key] |         _subs: set[Sub] = self._subscribers.setdefault(key, set()) | ||||||
|         _subs.update(subs) |         _subs.update(subs) | ||||||
|         return _subs |         return _subs | ||||||
| 
 | 
 | ||||||
|  | @ -331,7 +345,6 @@ async def allocate_persistent_feed( | ||||||
|     ) = await bus.nursery.start( |     ) = await bus.nursery.start( | ||||||
|         manage_history, |         manage_history, | ||||||
|         mod, |         mod, | ||||||
|         bus, |  | ||||||
|         mkt, |         mkt, | ||||||
|         some_data_ready, |         some_data_ready, | ||||||
|         feed_is_live, |         feed_is_live, | ||||||
|  | @ -408,6 +421,12 @@ async def allocate_persistent_feed( | ||||||
|         rt_shm.array['time'][1] = ts + 1 |         rt_shm.array['time'][1] = ts + 1 | ||||||
| 
 | 
 | ||||||
|     elif hist_shm.array.size == 0: |     elif hist_shm.array.size == 0: | ||||||
|  |         for i in range(100): | ||||||
|  |             await trio.sleep(0.1) | ||||||
|  |             if hist_shm.array.size > 0: | ||||||
|  |                 break | ||||||
|  |         else: | ||||||
|  |             await tractor.pause() | ||||||
|             raise RuntimeError(f'History (1m) Shm for {fqme} is empty!?') |             raise RuntimeError(f'History (1m) Shm for {fqme} is empty!?') | ||||||
| 
 | 
 | ||||||
|     # wait the spawning parent task to register its subscriber |     # wait the spawning parent task to register its subscriber | ||||||
|  | @ -438,8 +457,9 @@ async def open_feed_bus( | ||||||
|     symbols: list[str],  # normally expected to the broker-specific fqme |     symbols: list[str],  # normally expected to the broker-specific fqme | ||||||
| 
 | 
 | ||||||
|     loglevel: str = 'error', |     loglevel: str = 'error', | ||||||
|     tick_throttle: Optional[float] = None, |     tick_throttle: float | None = None, | ||||||
|     start_stream: bool = True, |     start_stream: bool = True, | ||||||
|  |     allow_remote_ctl_ui: bool = False, | ||||||
| 
 | 
 | ||||||
| ) -> dict[ | ) -> dict[ | ||||||
|     str,  # fqme |     str,  # fqme | ||||||
|  | @ -454,8 +474,12 @@ async def open_feed_bus( | ||||||
|     if loglevel is None: |     if loglevel is None: | ||||||
|         loglevel = tractor.current_actor().loglevel |         loglevel = tractor.current_actor().loglevel | ||||||
| 
 | 
 | ||||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging |     # XXX: required to propagate ``tractor`` loglevel to piker | ||||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) |     # logging | ||||||
|  |     get_console_log( | ||||||
|  |         loglevel | ||||||
|  |         or tractor.current_actor().loglevel | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     # local state sanity checks |     # local state sanity checks | ||||||
|     # TODO: check for any stale shm entries for this symbol |     # TODO: check for any stale shm entries for this symbol | ||||||
|  | @ -465,7 +489,7 @@ async def open_feed_bus( | ||||||
|     assert 'brokerd' in servicename |     assert 'brokerd' in servicename | ||||||
|     assert brokername in servicename |     assert brokername in servicename | ||||||
| 
 | 
 | ||||||
|     bus = get_feed_bus(brokername) |     bus: _FeedsBus = get_feed_bus(brokername) | ||||||
|     sub_registered = trio.Event() |     sub_registered = trio.Event() | ||||||
| 
 | 
 | ||||||
|     flumes: dict[str, Flume] = {} |     flumes: dict[str, Flume] = {} | ||||||
|  | @ -512,10 +536,10 @@ async def open_feed_bus( | ||||||
|         # pack for ``.started()`` sync msg |         # pack for ``.started()`` sync msg | ||||||
|         flumes[fqme] = flume |         flumes[fqme] = flume | ||||||
| 
 | 
 | ||||||
|         # we use the broker-specific fqme (bs_fqme) for the |         # we use the broker-specific fqme (bs_fqme) for the sampler | ||||||
|         # sampler subscription since the backend isn't (yet) expected to |         # subscription since the backend isn't (yet) expected to | ||||||
|         # append it's own name to the fqme, so we filter on keys which |         # append it's own name to the fqme, so we filter on keys | ||||||
|         # *do not* include that name (e.g .ib) . |         # which *do not* include that name (e.g .ib) . | ||||||
|         bus._subscribers.setdefault(bs_fqme, set()) |         bus._subscribers.setdefault(bs_fqme, set()) | ||||||
| 
 | 
 | ||||||
|     # sync feed subscribers with flume handles |     # sync feed subscribers with flume handles | ||||||
|  | @ -554,49 +578,60 @@ async def open_feed_bus( | ||||||
|             # that the ``sample_and_broadcast()`` task (spawned inside |             # that the ``sample_and_broadcast()`` task (spawned inside | ||||||
|             # ``allocate_persistent_feed()``) will push real-time quote |             # ``allocate_persistent_feed()``) will push real-time quote | ||||||
|             # (ticks) to this new consumer. |             # (ticks) to this new consumer. | ||||||
| 
 |             cs: trio.CancelScope | None = None | ||||||
|  |             send: trio.MemorySendChannel | None = None | ||||||
|             if tick_throttle: |             if tick_throttle: | ||||||
|                 flume.throttle_rate = tick_throttle |                 flume.throttle_rate = tick_throttle | ||||||
| 
 | 
 | ||||||
|                 # open a bg task which receives quotes over a mem chan |                 # open a bg task which receives quotes over a mem | ||||||
|                 # and only pushes them to the target actor-consumer at |                 # chan and only pushes them to the target | ||||||
|                 # a max ``tick_throttle`` instantaneous rate. |                 # actor-consumer at a max ``tick_throttle`` | ||||||
|  |                 # (instantaneous) rate. | ||||||
|                 send, recv = trio.open_memory_channel(2**10) |                 send, recv = trio.open_memory_channel(2**10) | ||||||
| 
 | 
 | ||||||
|                 cs = await bus.start_task( |                 # NOTE: the ``.send`` channel here is a swapped-in | ||||||
|  |                 # trio mem chan which gets `.send()`-ed by the normal | ||||||
|  |                 # sampler task but instead of being sent directly | ||||||
|  |                 # over the IPC msg stream it's the throttle task | ||||||
|  |                 # does the work of incrementally forwarding to the | ||||||
|  |                 # IPC stream at the throttle rate. | ||||||
|  |                 cs: trio.CancelScope = await bus.start_task( | ||||||
|                     uniform_rate_send, |                     uniform_rate_send, | ||||||
|                     tick_throttle, |                     tick_throttle, | ||||||
|                     recv, |                     recv, | ||||||
|                     stream, |                     stream, | ||||||
|                 ) |                 ) | ||||||
|                 # NOTE: so the ``send`` channel here is actually a swapped |  | ||||||
|                 # in trio mem chan which gets pushed by the normal sampler |  | ||||||
|                 # task but instead of being sent directly over the IPC msg |  | ||||||
|                 # stream it's the throttle task does the work of |  | ||||||
|                 # incrementally forwarding to the IPC stream at the throttle |  | ||||||
|                 # rate. |  | ||||||
|                 send._ctx = ctx  # mock internal ``tractor.MsgStream`` ref |  | ||||||
|                 sub = (send, tick_throttle) |  | ||||||
| 
 | 
 | ||||||
|             else: |             sub = Sub( | ||||||
|                 sub = (stream, tick_throttle) |                 ipc=stream, | ||||||
|  |                 send_chan=send, | ||||||
|  |                 throttle_rate=tick_throttle, | ||||||
|  |                 _throttle_cs=cs, | ||||||
|  |                 rc_ui=allow_remote_ctl_ui, | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             # TODO: add an api for this on the bus? |             # TODO: add an api for this on the bus? | ||||||
|             # maybe use the current task-id to key the sub list that's |             # maybe use the current task-id to key the sub list that's | ||||||
|             # added / removed? Or maybe we can add a general |             # added / removed? Or maybe we can add a general | ||||||
|             # pause-resume by sub-key api? |             # pause-resume by sub-key api? | ||||||
|             bs_fqme = fqme.removesuffix(f'.{brokername}') |             bs_fqme = fqme.removesuffix(f'.{brokername}') | ||||||
|             local_subs.setdefault(bs_fqme, set()).add(sub) |             local_subs.setdefault( | ||||||
|             bus.add_subs(bs_fqme, {sub}) |                 bs_fqme, | ||||||
|  |                 set() | ||||||
|  |             ).add(sub) | ||||||
|  |             bus.add_subs( | ||||||
|  |                 bs_fqme, | ||||||
|  |                 {sub} | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         # sync caller with all subs registered state |         # sync caller with all subs registered state | ||||||
|         sub_registered.set() |         sub_registered.set() | ||||||
| 
 | 
 | ||||||
|         uid = ctx.chan.uid |         uid: tuple[str, str] = ctx.chan.uid | ||||||
|         try: |         try: | ||||||
|             # ctrl protocol for start/stop of quote streams based on UI |             # ctrl protocol for start/stop of live quote streams | ||||||
|             # state (eg. don't need a stream when a symbol isn't being |             # based on UI state (eg. don't need a stream when | ||||||
|             # displayed). |             # a symbol isn't being displayed). | ||||||
|             async for msg in stream: |             async for msg in stream: | ||||||
| 
 | 
 | ||||||
|                 if msg == 'pause': |                 if msg == 'pause': | ||||||
|  | @ -734,6 +769,7 @@ async def install_brokerd_search( | ||||||
|                 except trio.EndOfChannel: |                 except trio.EndOfChannel: | ||||||
|                     return {} |                     return {} | ||||||
| 
 | 
 | ||||||
|  |             from piker.ui import _search | ||||||
|             async with _search.register_symbol_search( |             async with _search.register_symbol_search( | ||||||
| 
 | 
 | ||||||
|                 provider_name=brokermod.name, |                 provider_name=brokermod.name, | ||||||
|  | @ -752,7 +788,7 @@ async def install_brokerd_search( | ||||||
| async def maybe_open_feed( | async def maybe_open_feed( | ||||||
| 
 | 
 | ||||||
|     fqmes: list[str], |     fqmes: list[str], | ||||||
|     loglevel: Optional[str] = None, |     loglevel: str | None = None, | ||||||
| 
 | 
 | ||||||
|     **kwargs, |     **kwargs, | ||||||
| 
 | 
 | ||||||
|  | @ -768,7 +804,7 @@ async def maybe_open_feed( | ||||||
|     ''' |     ''' | ||||||
|     fqme = fqmes[0] |     fqme = fqmes[0] | ||||||
| 
 | 
 | ||||||
|     async with maybe_open_context( |     async with trionics.maybe_open_context( | ||||||
|         acm_func=open_feed, |         acm_func=open_feed, | ||||||
|         kwargs={ |         kwargs={ | ||||||
|             'fqmes': fqmes, |             'fqmes': fqmes, | ||||||
|  | @ -788,7 +824,7 @@ async def maybe_open_feed( | ||||||
|             # add a new broadcast subscription for the quote stream |             # add a new broadcast subscription for the quote stream | ||||||
|             # if this feed is likely already in use |             # if this feed is likely already in use | ||||||
| 
 | 
 | ||||||
|             async with gather_contexts( |             async with trionics.gather_contexts( | ||||||
|                 mngrs=[stream.subscribe() for stream in feed.streams.values()] |                 mngrs=[stream.subscribe() for stream in feed.streams.values()] | ||||||
|             ) as bstreams: |             ) as bstreams: | ||||||
|                 for bstream, flume in zip(bstreams, feed.flumes.values()): |                 for bstream, flume in zip(bstreams, feed.flumes.values()): | ||||||
|  | @ -812,6 +848,8 @@ async def open_feed( | ||||||
|     start_stream: bool = True, |     start_stream: bool = True, | ||||||
|     tick_throttle: float | None = None,  # Hz |     tick_throttle: float | None = None,  # Hz | ||||||
| 
 | 
 | ||||||
|  |     allow_remote_ctl_ui: bool = False, | ||||||
|  | 
 | ||||||
| ) -> Feed: | ) -> Feed: | ||||||
|     ''' |     ''' | ||||||
|     Open a "data feed" which provides streamed real-time quotes. |     Open a "data feed" which provides streamed real-time quotes. | ||||||
|  | @ -848,7 +886,7 @@ async def open_feed( | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     portals: tuple[tractor.Portal] |     portals: tuple[tractor.Portal] | ||||||
|     async with gather_contexts( |     async with trionics.gather_contexts( | ||||||
|         brokerd_ctxs, |         brokerd_ctxs, | ||||||
|     ) as portals: |     ) as portals: | ||||||
| 
 | 
 | ||||||
|  | @ -894,13 +932,19 @@ async def open_feed( | ||||||
|                     # of these stream open sequences sequentially per |                     # of these stream open sequences sequentially per | ||||||
|                     # backend? .. need some thot! |                     # backend? .. need some thot! | ||||||
|                     allow_overruns=True, |                     allow_overruns=True, | ||||||
|  | 
 | ||||||
|  |                     # NOTE: UI actors (like charts) can allow | ||||||
|  |                     # remote control of certain graphics rendering | ||||||
|  |                     # capabilities via the | ||||||
|  |                     # `.ui._remote_ctl.remote_annotate()` msg loop. | ||||||
|  |                     allow_remote_ctl_ui=allow_remote_ctl_ui, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         assert len(feed.mods) == len(feed.portals) |         assert len(feed.mods) == len(feed.portals) | ||||||
| 
 | 
 | ||||||
|         async with ( |         async with ( | ||||||
|             gather_contexts(bus_ctxs) as ctxs, |             trionics.gather_contexts(bus_ctxs) as ctxs, | ||||||
|         ): |         ): | ||||||
|             stream_ctxs: list[tractor.MsgStream] = [] |             stream_ctxs: list[tractor.MsgStream] = [] | ||||||
|             for ( |             for ( | ||||||
|  | @ -942,7 +986,7 @@ async def open_feed( | ||||||
|             brokermod: ModuleType |             brokermod: ModuleType | ||||||
|             fqmes: list[str] |             fqmes: list[str] | ||||||
|             async with ( |             async with ( | ||||||
|                 gather_contexts(stream_ctxs) as streams, |                 trionics.gather_contexts(stream_ctxs) as streams, | ||||||
|             ): |             ): | ||||||
|                 for ( |                 for ( | ||||||
|                     stream, |                     stream, | ||||||
|  | @ -958,6 +1002,12 @@ async def open_feed( | ||||||
|                         if brokermod.name == flume.mkt.broker: |                         if brokermod.name == flume.mkt.broker: | ||||||
|                             flume.stream = stream |                             flume.stream = stream | ||||||
| 
 | 
 | ||||||
|                 assert len(feed.mods) == len(feed.portals) == len(feed.streams) |                 assert ( | ||||||
|  |                     len(feed.mods) | ||||||
|  |                     == | ||||||
|  |                     len(feed.portals) | ||||||
|  |                     == | ||||||
|  |                     len(feed.streams) | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|                 yield feed |                 yield feed | ||||||
|  |  | ||||||
|  | @ -42,35 +42,15 @@ if TYPE_CHECKING: | ||||||
|     from .feed import Feed |     from .feed import Feed | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: ideas for further abstractions as per |  | ||||||
| # https://github.com/pikers/piker/issues/216 and |  | ||||||
| # https://github.com/pikers/piker/issues/270: |  | ||||||
| # - a ``Cascade`` would be the minimal "connection" of 2 ``Flumes`` |  | ||||||
| #   as per circuit parlance: |  | ||||||
| #   https://en.wikipedia.org/wiki/Two-port_network#Cascade_connection |  | ||||||
| #     - could cover the combination of our `FspAdmin` and the |  | ||||||
| #       backend `.fsp._engine` related machinery to "connect" one flume |  | ||||||
| #       to another? |  | ||||||
| # - a (financial signal) ``Flow`` would be the a "collection" of such |  | ||||||
| #    minmial cascades. Some engineering based jargon concepts: |  | ||||||
| #     - https://en.wikipedia.org/wiki/Signal_chain |  | ||||||
| #     - https://en.wikipedia.org/wiki/Daisy_chain_(electrical_engineering) |  | ||||||
| #     - https://en.wikipedia.org/wiki/Audio_signal_flow |  | ||||||
| #     - https://en.wikipedia.org/wiki/Digital_signal_processing#Implementation |  | ||||||
| #     - https://en.wikipedia.org/wiki/Dataflow_programming |  | ||||||
| #     - https://en.wikipedia.org/wiki/Signal_programming |  | ||||||
| #     - https://en.wikipedia.org/wiki/Incremental_computing |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class Flume(Struct): | class Flume(Struct): | ||||||
|     ''' |     ''' | ||||||
|     Composite reference type which points to all the addressing handles |     Composite reference type which points to all the addressing | ||||||
|     and other meta-data necessary for the read, measure and management |     handles and other meta-data necessary for the read, measure and | ||||||
|     of a set of real-time updated data flows. |     management of a set of real-time updated data flows. | ||||||
| 
 | 
 | ||||||
|     Can be thought of as a "flow descriptor" or "flow frame" which |     Can be thought of as a "flow descriptor" or "flow frame" which | ||||||
|     describes the high level properties of a set of data flows that can |     describes the high level properties of a set of data flows that | ||||||
|     be used seamlessly across process-memory boundaries. |     can be used seamlessly across process-memory boundaries. | ||||||
| 
 | 
 | ||||||
|     Each instance's sub-components normally includes: |     Each instance's sub-components normally includes: | ||||||
|      - a msg oriented quote stream provided via an IPC transport |      - a msg oriented quote stream provided via an IPC transport | ||||||
|  | @ -93,6 +73,7 @@ class Flume(Struct): | ||||||
|     # private shm refs loaded dynamically from tokens |     # private shm refs loaded dynamically from tokens | ||||||
|     _hist_shm: ShmArray | None = None |     _hist_shm: ShmArray | None = None | ||||||
|     _rt_shm: ShmArray | None = None |     _rt_shm: ShmArray | None = None | ||||||
|  |     _readonly: bool = True | ||||||
| 
 | 
 | ||||||
|     stream: tractor.MsgStream | None = None |     stream: tractor.MsgStream | None = None | ||||||
|     izero_hist: int = 0 |     izero_hist: int = 0 | ||||||
|  | @ -109,7 +90,7 @@ class Flume(Struct): | ||||||
|         if self._rt_shm is None: |         if self._rt_shm is None: | ||||||
|             self._rt_shm = attach_shm_array( |             self._rt_shm = attach_shm_array( | ||||||
|                 token=self._rt_shm_token, |                 token=self._rt_shm_token, | ||||||
|                 readonly=True, |                 readonly=self._readonly, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         return self._rt_shm |         return self._rt_shm | ||||||
|  | @ -122,12 +103,10 @@ class Flume(Struct): | ||||||
|                 'No shm token has been set for the history buffer?' |                 'No shm token has been set for the history buffer?' | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         if ( |         if self._hist_shm is None: | ||||||
|             self._hist_shm is None |  | ||||||
|         ): |  | ||||||
|             self._hist_shm = attach_shm_array( |             self._hist_shm = attach_shm_array( | ||||||
|                 token=self._hist_shm_token, |                 token=self._hist_shm_token, | ||||||
|                 readonly=True, |                 readonly=self._readonly, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         return self._hist_shm |         return self._hist_shm | ||||||
|  | @ -146,10 +125,10 @@ class Flume(Struct): | ||||||
|         period and ratio between them. |         period and ratio between them. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         times = self.hist_shm.array['time'] |         times: np.ndarray = self.hist_shm.array['time'] | ||||||
|         end = pendulum.from_timestamp(times[-1]) |         end: float | int = pendulum.from_timestamp(times[-1]) | ||||||
|         start = pendulum.from_timestamp(times[times != times[-1]][-1]) |         start: float | int = pendulum.from_timestamp(times[times != times[-1]][-1]) | ||||||
|         hist_step_size_s = (end - start).seconds |         hist_step_size_s: float = (end - start).seconds | ||||||
| 
 | 
 | ||||||
|         times = self.rt_shm.array['time'] |         times = self.rt_shm.array['time'] | ||||||
|         end = pendulum.from_timestamp(times[-1]) |         end = pendulum.from_timestamp(times[-1]) | ||||||
|  | @ -169,17 +148,25 @@ class Flume(Struct): | ||||||
|         msg = self.to_dict() |         msg = self.to_dict() | ||||||
|         msg['mkt'] = self.mkt.to_dict() |         msg['mkt'] = self.mkt.to_dict() | ||||||
| 
 | 
 | ||||||
|         # can't serialize the stream or feed objects, it's expected |         # NOTE: pop all un-msg-serializable fields: | ||||||
|         # you'll have a ref to it since this msg should be rxed on |         # - `tractor.MsgStream` | ||||||
|         # a stream on whatever far end IPC.. |         # - `Feed` | ||||||
|  |         # - `Shmarray` | ||||||
|  |         # it's expected the `.from_msg()` on the other side | ||||||
|  |         # will get instead some kind of msg-compat version | ||||||
|  |         # that it can load. | ||||||
|         msg.pop('stream') |         msg.pop('stream') | ||||||
|         msg.pop('feed') |         msg.pop('feed') | ||||||
|  |         msg.pop('_rt_shm') | ||||||
|  |         msg.pop('_hist_shm') | ||||||
|  | 
 | ||||||
|         return msg |         return msg | ||||||
| 
 | 
 | ||||||
|     @classmethod |     @classmethod | ||||||
|     def from_msg( |     def from_msg( | ||||||
|         cls, |         cls, | ||||||
|         msg: dict, |         msg: dict, | ||||||
|  |         readonly: bool = True, | ||||||
| 
 | 
 | ||||||
|     ) -> dict: |     ) -> dict: | ||||||
|         ''' |         ''' | ||||||
|  | @ -190,7 +177,11 @@ class Flume(Struct): | ||||||
|         mkt_msg = msg.pop('mkt') |         mkt_msg = msg.pop('mkt') | ||||||
|         from ..accounting import MktPair  # cycle otherwise.. |         from ..accounting import MktPair  # cycle otherwise.. | ||||||
|         mkt = MktPair.from_msg(mkt_msg) |         mkt = MktPair.from_msg(mkt_msg) | ||||||
|         return cls(mkt=mkt, **msg) |         msg |= {'_readonly': readonly} | ||||||
|  |         return cls( | ||||||
|  |             mkt=mkt, | ||||||
|  |             **msg, | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     def get_index( |     def get_index( | ||||||
|         self, |         self, | ||||||
|  |  | ||||||
|  | @ -1,982 +0,0 @@ | ||||||
| # piker: trading gear for hackers |  | ||||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| Historical data business logic for load, backfill and tsdb storage. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from __future__ import annotations |  | ||||||
| # from collections import ( |  | ||||||
| #     Counter, |  | ||||||
| # ) |  | ||||||
| from datetime import datetime |  | ||||||
| from functools import partial |  | ||||||
| # import time |  | ||||||
| from types import ModuleType |  | ||||||
| from typing import ( |  | ||||||
|     Callable, |  | ||||||
|     TYPE_CHECKING, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| import trio |  | ||||||
| from trio_typing import TaskStatus |  | ||||||
| import tractor |  | ||||||
| from pendulum import ( |  | ||||||
|     Duration, |  | ||||||
|     from_timestamp, |  | ||||||
| ) |  | ||||||
| import numpy as np |  | ||||||
| 
 |  | ||||||
| from ..accounting import ( |  | ||||||
|     MktPair, |  | ||||||
| ) |  | ||||||
| from ._util import ( |  | ||||||
|     log, |  | ||||||
| ) |  | ||||||
| from ._sharedmem import ( |  | ||||||
|     maybe_open_shm_array, |  | ||||||
|     ShmArray, |  | ||||||
| ) |  | ||||||
| from ._source import def_iohlcv_fields |  | ||||||
| from ._sampling import ( |  | ||||||
|     open_sample_stream, |  | ||||||
| ) |  | ||||||
| from ..brokers._util import ( |  | ||||||
|     DataUnavailable, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from bidict import bidict |  | ||||||
|     from ..service.marketstore import StorageClient |  | ||||||
|     from .feed import _FeedsBus |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # `ShmArray` buffer sizing configuration: |  | ||||||
| _mins_in_day = int(60 * 24) |  | ||||||
| # how much is probably dependent on lifestyle |  | ||||||
| # but we reco a buncha times (but only on a |  | ||||||
| # run-every-other-day kinda week). |  | ||||||
| _secs_in_day = int(60 * _mins_in_day) |  | ||||||
| _days_in_week: int = 7 |  | ||||||
| 
 |  | ||||||
| _days_worth: int = 3 |  | ||||||
| _default_hist_size: int = 6 * 365 * _mins_in_day |  | ||||||
| _hist_buffer_start = int( |  | ||||||
|     _default_hist_size - round(7 * _mins_in_day) |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| _default_rt_size: int = _days_worth * _secs_in_day |  | ||||||
| # NOTE: start the append index in rt buffer such that 1 day's worth |  | ||||||
| # can be appenened before overrun. |  | ||||||
| _rt_buffer_start = int((_days_worth - 1) * _secs_in_day) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def diff_history( |  | ||||||
|     array: np.ndarray, |  | ||||||
|     append_until_dt: datetime | None = None, |  | ||||||
|     prepend_until_dt: datetime | None = None, |  | ||||||
| 
 |  | ||||||
| ) -> np.ndarray: |  | ||||||
| 
 |  | ||||||
|     # no diffing with tsdb dt index possible.. |  | ||||||
|     if ( |  | ||||||
|         prepend_until_dt is None |  | ||||||
|         and append_until_dt is None |  | ||||||
|     ): |  | ||||||
|         return array |  | ||||||
| 
 |  | ||||||
|     times = array['time'] |  | ||||||
| 
 |  | ||||||
|     if append_until_dt: |  | ||||||
|         return array[times < append_until_dt.timestamp()] |  | ||||||
|     else: |  | ||||||
|         return array[times >= prepend_until_dt.timestamp()] |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def shm_push_in_between( |  | ||||||
|     shm: ShmArray, |  | ||||||
|     to_push: np.ndarray, |  | ||||||
|     prepend_index: int, |  | ||||||
| 
 |  | ||||||
|     update_start_on_prepend: bool = False, |  | ||||||
| 
 |  | ||||||
| ) -> int: |  | ||||||
|     shm.push( |  | ||||||
|         to_push, |  | ||||||
|         prepend=True, |  | ||||||
| 
 |  | ||||||
|         # XXX: only update the ._first index if no tsdb |  | ||||||
|         # segment was previously prepended by the |  | ||||||
|         # parent task. |  | ||||||
|         update_first=update_start_on_prepend, |  | ||||||
| 
 |  | ||||||
|         # XXX: only prepend from a manually calculated shm |  | ||||||
|         # index if there was already a tsdb history |  | ||||||
|         # segment prepended (since then the |  | ||||||
|         # ._first.value is going to be wayyy in the |  | ||||||
|         # past!) |  | ||||||
|         start=( |  | ||||||
|             prepend_index |  | ||||||
|             if not update_start_on_prepend |  | ||||||
|             else None |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|     # XXX: extremely important, there can be no checkpoints |  | ||||||
|     # in the block above to avoid entering new ``frames`` |  | ||||||
|     # values while we're pipelining the current ones to |  | ||||||
|     # memory... |  | ||||||
|     array = shm.array |  | ||||||
|     zeros = array[array['low'] == 0] |  | ||||||
| 
 |  | ||||||
|     # always backfill gaps with the earliest (price) datum's |  | ||||||
|     # value to avoid the y-ranger including zeros and completely |  | ||||||
|     # stretching the y-axis.. |  | ||||||
|     if 0 < zeros.size: |  | ||||||
|         zeros[[ |  | ||||||
|             'open', |  | ||||||
|             'high', |  | ||||||
|             'low', |  | ||||||
|             'close', |  | ||||||
|         ]] = shm._array[zeros['index'][0] - 1]['close'] |  | ||||||
|         # await tractor.pause() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def start_backfill( |  | ||||||
|     get_hist, |  | ||||||
|     mod: ModuleType, |  | ||||||
|     mkt: MktPair, |  | ||||||
|     shm: ShmArray, |  | ||||||
|     timeframe: float, |  | ||||||
| 
 |  | ||||||
|     backfill_from_shm_index: int, |  | ||||||
|     backfill_from_dt: datetime, |  | ||||||
| 
 |  | ||||||
|     sampler_stream: tractor.MsgStream, |  | ||||||
| 
 |  | ||||||
|     backfill_until_dt: datetime | None = None, |  | ||||||
|     storage: StorageClient | None = None, |  | ||||||
| 
 |  | ||||||
|     write_tsdb: bool = True, |  | ||||||
| 
 |  | ||||||
|     task_status: TaskStatus[tuple] = trio.TASK_STATUS_IGNORED, |  | ||||||
| 
 |  | ||||||
| ) -> int: |  | ||||||
| 
 |  | ||||||
|         # let caller unblock and deliver latest history frame |  | ||||||
|         # and use to signal that backfilling the shm gap until |  | ||||||
|         # the tsdb end is complete! |  | ||||||
|         bf_done = trio.Event() |  | ||||||
|         task_status.started(bf_done) |  | ||||||
| 
 |  | ||||||
|         # based on the sample step size, maybe load a certain amount history |  | ||||||
|         update_start_on_prepend: bool = False |  | ||||||
|         if backfill_until_dt is None: |  | ||||||
| 
 |  | ||||||
|             # TODO: drop this right and just expose the backfill |  | ||||||
|             # limits inside a [storage] section in conf.toml? |  | ||||||
|             # when no tsdb "last datum" is provided, we just load |  | ||||||
|             # some near-term history. |  | ||||||
|             # periods = { |  | ||||||
|             #     1: {'days': 1}, |  | ||||||
|             #     60: {'days': 14}, |  | ||||||
|             # } |  | ||||||
| 
 |  | ||||||
|             # do a decently sized backfill and load it into storage. |  | ||||||
|             periods = { |  | ||||||
|                 1: {'days': 6}, |  | ||||||
|                 60: {'years': 6}, |  | ||||||
|             } |  | ||||||
|             period_duration: int = periods[timeframe] |  | ||||||
| 
 |  | ||||||
|             update_start_on_prepend = True |  | ||||||
| 
 |  | ||||||
|             # NOTE: manually set the "latest" datetime which we intend to |  | ||||||
|             # backfill history "until" so as to adhere to the history |  | ||||||
|             # settings above when the tsdb is detected as being empty. |  | ||||||
|             backfill_until_dt = backfill_from_dt.subtract(**period_duration) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|         # TODO: can we drop this? without conc i don't think this |  | ||||||
|         # is necessary any more? |  | ||||||
|         # configure async query throttling |  | ||||||
|         # rate = config.get('rate', 1) |  | ||||||
|         # XXX: legacy from ``trimeter`` code but unsupported now. |  | ||||||
|         # erlangs = config.get('erlangs', 1) |  | ||||||
|         # avoid duplicate history frames with a set of datetime frame |  | ||||||
|         # starts and associated counts of how many duplicates we see |  | ||||||
|         # per time stamp. |  | ||||||
|         # starts: Counter[datetime] = Counter() |  | ||||||
| 
 |  | ||||||
|         # conduct "backward history gap filling" where we push to |  | ||||||
|         # the shm buffer until we have history back until the |  | ||||||
|         # latest entry loaded from the tsdb's table B) |  | ||||||
|         last_start_dt: datetime = backfill_from_dt |  | ||||||
|         next_prepend_index: int = backfill_from_shm_index |  | ||||||
| 
 |  | ||||||
|         while last_start_dt > backfill_until_dt: |  | ||||||
| 
 |  | ||||||
|             log.debug( |  | ||||||
|                 f'Requesting {timeframe}s frame ending in {last_start_dt}' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             try: |  | ||||||
|                 ( |  | ||||||
|                     array, |  | ||||||
|                     next_start_dt, |  | ||||||
|                     next_end_dt, |  | ||||||
|                 ) = await get_hist( |  | ||||||
|                     timeframe, |  | ||||||
|                     end_dt=last_start_dt, |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # broker says there never was or is no more history to pull |  | ||||||
|             except DataUnavailable: |  | ||||||
|                 log.warning( |  | ||||||
|                     f'NO-MORE-DATA: backend {mod.name} halted history!?' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 # ugh, what's a better way? |  | ||||||
|                 # TODO: fwiw, we probably want a way to signal a throttle |  | ||||||
|                 # condition (eg. with ib) so that we can halt the |  | ||||||
|                 # request loop until the condition is resolved? |  | ||||||
|                 return |  | ||||||
| 
 |  | ||||||
|             # TODO: drop this? see todo above.. |  | ||||||
|             # if ( |  | ||||||
|             #     next_start_dt in starts |  | ||||||
|             #     and starts[next_start_dt] <= 6 |  | ||||||
|             # ): |  | ||||||
|             #     start_dt = min(starts) |  | ||||||
|             #     log.warning( |  | ||||||
|             #         f"{mkt.fqme}: skipping duplicate frame @ {next_start_dt}" |  | ||||||
|             #     ) |  | ||||||
|             #     starts[start_dt] += 1 |  | ||||||
|             #     await tractor.pause() |  | ||||||
|             #     continue |  | ||||||
| 
 |  | ||||||
|             # elif starts[next_start_dt] > 6: |  | ||||||
|             #     log.warning( |  | ||||||
|             #         f'NO-MORE-DATA: backend {mod.name} before {next_start_dt}?' |  | ||||||
|             #     ) |  | ||||||
|             #     return |  | ||||||
| 
 |  | ||||||
|             # # only update new start point if not-yet-seen |  | ||||||
|             # starts[next_start_dt] += 1 |  | ||||||
| 
 |  | ||||||
|             assert array['time'][0] == next_start_dt.timestamp() |  | ||||||
| 
 |  | ||||||
|             diff = last_start_dt - next_start_dt |  | ||||||
|             frame_time_diff_s = diff.seconds |  | ||||||
| 
 |  | ||||||
|             # frame's worth of sample-period-steps, in seconds |  | ||||||
|             frame_size_s = len(array) * timeframe |  | ||||||
|             expected_frame_size_s = frame_size_s + timeframe |  | ||||||
|             if frame_time_diff_s > expected_frame_size_s: |  | ||||||
| 
 |  | ||||||
|                 # XXX: query result includes a start point prior to our |  | ||||||
|                 # expected "frame size" and thus is likely some kind of |  | ||||||
|                 # history gap (eg. market closed period, outage, etc.) |  | ||||||
|                 # so just report it to console for now. |  | ||||||
|                 log.warning( |  | ||||||
|                     f'History frame ending @ {last_start_dt} appears to have a gap:\n' |  | ||||||
|                     f'{diff} ~= {frame_time_diff_s} seconds' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             to_push = diff_history( |  | ||||||
|                 array, |  | ||||||
|                 prepend_until_dt=backfill_until_dt, |  | ||||||
|             ) |  | ||||||
|             ln = len(to_push) |  | ||||||
|             if ln: |  | ||||||
|                 log.info(f'{ln} bars for {next_start_dt} -> {last_start_dt}') |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 log.warning( |  | ||||||
|                     '0 BARS TO PUSH after diff!?\n' |  | ||||||
|                     f'{next_start_dt} -> {last_start_dt}' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # bail gracefully on shm allocation overrun/full |  | ||||||
|             # condition |  | ||||||
|             try: |  | ||||||
|                 await shm_push_in_between( |  | ||||||
|                     shm, |  | ||||||
|                     to_push, |  | ||||||
|                     prepend_index=next_prepend_index, |  | ||||||
|                     update_start_on_prepend=update_start_on_prepend, |  | ||||||
|                 ) |  | ||||||
|                 await sampler_stream.send({ |  | ||||||
|                     'broadcast_all': { |  | ||||||
|                         'backfilling': (mkt.fqme, timeframe), |  | ||||||
|                     }, |  | ||||||
|                 }) |  | ||||||
| 
 |  | ||||||
|                 # decrement next prepend point |  | ||||||
|                 next_prepend_index = next_prepend_index - ln |  | ||||||
|                 last_start_dt = next_start_dt |  | ||||||
| 
 |  | ||||||
|             except ValueError as ve: |  | ||||||
|                 _ve = ve |  | ||||||
|                 log.error( |  | ||||||
|                     f'Shm prepend OVERRUN on: {next_start_dt} -> {last_start_dt}?' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 if next_prepend_index < ln: |  | ||||||
|                     log.warning( |  | ||||||
|                         f'Shm buffer can only hold {next_prepend_index} more rows..\n' |  | ||||||
|                         f'Appending those from recent {ln}-sized frame, no more!' |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|                 to_push = to_push[-next_prepend_index + 1:] |  | ||||||
|                 await shm_push_in_between( |  | ||||||
|                     shm, |  | ||||||
|                     to_push, |  | ||||||
|                     prepend_index=next_prepend_index, |  | ||||||
|                     update_start_on_prepend=update_start_on_prepend, |  | ||||||
|                 ) |  | ||||||
|                 await sampler_stream.send({ |  | ||||||
|                     'broadcast_all': { |  | ||||||
|                         'backfilling': (mkt.fqme, timeframe), |  | ||||||
|                     }, |  | ||||||
|                 }) |  | ||||||
| 
 |  | ||||||
|                 # can't push the entire frame? so |  | ||||||
|                 # push only the amount that can fit.. |  | ||||||
|                 break |  | ||||||
| 
 |  | ||||||
|             log.info( |  | ||||||
|                 f'Shm pushed {ln} frame:\n' |  | ||||||
|                 f'{next_start_dt} -> {last_start_dt}' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # FINALLY, maybe write immediately to the tsdb backend for |  | ||||||
|             # long-term storage. |  | ||||||
|             if ( |  | ||||||
|                 storage is not None |  | ||||||
|                 and write_tsdb |  | ||||||
|             ): |  | ||||||
|                 log.info( |  | ||||||
|                     f'Writing {ln} frame to storage:\n' |  | ||||||
|                     f'{next_start_dt} -> {last_start_dt}' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 # always drop the src asset token for |  | ||||||
|                 # non-currency-pair like market types (for now) |  | ||||||
|                 if mkt.dst.atype not in { |  | ||||||
|                     'crypto', |  | ||||||
|                     'crypto_currency', |  | ||||||
|                     'fiat',  # a "forex pair" |  | ||||||
|                 }: |  | ||||||
|                     # for now, our table key schema is not including |  | ||||||
|                     # the dst[/src] source asset token. |  | ||||||
|                     col_sym_key: str = mkt.get_fqme( |  | ||||||
|                         delim_char='', |  | ||||||
|                         without_src=True, |  | ||||||
|                     ) |  | ||||||
|                 else: |  | ||||||
|                     col_sym_key: str = mkt.get_fqme(delim_char='') |  | ||||||
| 
 |  | ||||||
|                 # TODO: implement parquet append!? |  | ||||||
|                 await storage.write_ohlcv( |  | ||||||
|                     col_sym_key, |  | ||||||
|                     shm.array, |  | ||||||
|                     timeframe, |  | ||||||
|                 ) |  | ||||||
|         else: |  | ||||||
|             # finally filled gap |  | ||||||
|             log.info( |  | ||||||
|                 f'Finished filling gap to tsdb start @ {backfill_until_dt}!' |  | ||||||
|             ) |  | ||||||
|             # conduct tsdb timestamp gap detection and backfill any |  | ||||||
|             # seemingly missing sequence segments.. |  | ||||||
|             # TODO: ideally these never exist but somehow it seems |  | ||||||
|             # sometimes we're writing zero-ed segments on certain |  | ||||||
|             # (teardown) cases? |  | ||||||
|             from ._timeseries import detect_null_time_gap |  | ||||||
| 
 |  | ||||||
|             gap_indices: tuple | None = detect_null_time_gap(shm) |  | ||||||
|             while gap_indices: |  | ||||||
|                 ( |  | ||||||
|                     istart, |  | ||||||
|                     start, |  | ||||||
|                     end, |  | ||||||
|                     iend, |  | ||||||
|                 ) = gap_indices |  | ||||||
| 
 |  | ||||||
|                 start_dt = from_timestamp(start) |  | ||||||
|                 end_dt = from_timestamp(end) |  | ||||||
|                 ( |  | ||||||
|                     array, |  | ||||||
|                     next_start_dt, |  | ||||||
|                     next_end_dt, |  | ||||||
|                 ) = await get_hist( |  | ||||||
|                     timeframe, |  | ||||||
|                     start_dt=start_dt, |  | ||||||
|                     end_dt=end_dt, |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 # XXX TODO: pretty sure if i plot tsla, btcusdt.binance |  | ||||||
|                 # and mnq.cme.ib this causes a Qt crash XXDDD |  | ||||||
| 
 |  | ||||||
|                 # make sure we don't overrun the buffer start |  | ||||||
|                 len_to_push: int = min(iend, array.size) |  | ||||||
|                 to_push: np.ndarray = array[-len_to_push:] |  | ||||||
|                 await shm_push_in_between( |  | ||||||
|                     shm, |  | ||||||
|                     to_push, |  | ||||||
|                     prepend_index=iend, |  | ||||||
|                     update_start_on_prepend=False, |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 # TODO: UI side needs IPC event to update.. |  | ||||||
|                 # - make sure the UI actually always handles |  | ||||||
|                 #  this update! |  | ||||||
|                 # - remember that in the display side, only refersh this |  | ||||||
|                 #   if the respective history is actually "in view". |  | ||||||
|                 #   loop |  | ||||||
|                 await sampler_stream.send({ |  | ||||||
|                     'broadcast_all': { |  | ||||||
|                         'backfilling': (mkt.fqme, timeframe), |  | ||||||
|                     }, |  | ||||||
|                 }) |  | ||||||
|                 gap_indices: tuple | None = detect_null_time_gap(shm) |  | ||||||
| 
 |  | ||||||
|         # XXX: extremely important, there can be no checkpoints |  | ||||||
|         # in the block above to avoid entering new ``frames`` |  | ||||||
|         # values while we're pipelining the current ones to |  | ||||||
|         # memory... |  | ||||||
|         # await sampler_stream.send('broadcast_all') |  | ||||||
| 
 |  | ||||||
|         # short-circuit (for now) |  | ||||||
|         bf_done.set() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def back_load_from_tsdb( |  | ||||||
|     storemod: ModuleType, |  | ||||||
|     storage: StorageClient, |  | ||||||
| 
 |  | ||||||
|     fqme: str, |  | ||||||
| 
 |  | ||||||
|     tsdb_history: np.ndarray, |  | ||||||
| 
 |  | ||||||
|     last_tsdb_dt: datetime, |  | ||||||
|     latest_start_dt: datetime, |  | ||||||
|     latest_end_dt: datetime, |  | ||||||
| 
 |  | ||||||
|     bf_done: trio.Event, |  | ||||||
| 
 |  | ||||||
|     timeframe: int, |  | ||||||
|     shm: ShmArray, |  | ||||||
| ): |  | ||||||
|     assert len(tsdb_history) |  | ||||||
| 
 |  | ||||||
|     # sync to backend history task's query/load completion |  | ||||||
|     # if bf_done: |  | ||||||
|     #     await bf_done.wait() |  | ||||||
| 
 |  | ||||||
|     # TODO: eventually it'd be nice to not require a shm array/buffer |  | ||||||
|     # to accomplish this.. maybe we can do some kind of tsdb direct to |  | ||||||
|     # graphics format eventually in a child-actor? |  | ||||||
|     if storemod.name == 'nativedb': |  | ||||||
|         return |  | ||||||
| 
 |  | ||||||
|         await tractor.pause() |  | ||||||
|         assert shm._first.value == 0 |  | ||||||
| 
 |  | ||||||
|     array = shm.array |  | ||||||
| 
 |  | ||||||
|     # if timeframe == 1: |  | ||||||
|     #     times = shm.array['time'] |  | ||||||
|     #     assert (times[1] - times[0]) == 1 |  | ||||||
| 
 |  | ||||||
|     if len(array): |  | ||||||
|         shm_last_dt = from_timestamp( |  | ||||||
|             shm.array[0]['time'] |  | ||||||
|         ) |  | ||||||
|     else: |  | ||||||
|         shm_last_dt = None |  | ||||||
| 
 |  | ||||||
|     if last_tsdb_dt: |  | ||||||
|         assert shm_last_dt >= last_tsdb_dt |  | ||||||
| 
 |  | ||||||
|     # do diff against start index of last frame of history and only |  | ||||||
|     # fill in an amount of datums from tsdb allows for most recent |  | ||||||
|     # to be loaded into mem *before* tsdb data. |  | ||||||
|     if ( |  | ||||||
|         last_tsdb_dt |  | ||||||
|         and latest_start_dt |  | ||||||
|     ): |  | ||||||
|         backfilled_size_s = ( |  | ||||||
|             latest_start_dt - last_tsdb_dt |  | ||||||
|         ).seconds |  | ||||||
|         # if the shm buffer len is not large enough to contain |  | ||||||
|         # all missing data between the most recent backend-queried frame |  | ||||||
|         # and the most recent dt-index in the db we warn that we only |  | ||||||
|         # want to load a portion of the next tsdb query to fill that |  | ||||||
|         # space. |  | ||||||
|         log.info( |  | ||||||
|             f'{backfilled_size_s} seconds worth of {timeframe}s loaded' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     # Load TSDB history into shm buffer (for display) if there is |  | ||||||
|     # remaining buffer space. |  | ||||||
| 
 |  | ||||||
|     time_key: str = 'time' |  | ||||||
|     if getattr(storemod, 'ohlc_key_map', False): |  | ||||||
|         keymap: bidict = storemod.ohlc_key_map |  | ||||||
|         time_key: str = keymap.inverse['time'] |  | ||||||
| 
 |  | ||||||
|     # if ( |  | ||||||
|     #     not len(tsdb_history) |  | ||||||
|     # ): |  | ||||||
|     #     return |  | ||||||
| 
 |  | ||||||
|     tsdb_last_frame_start: datetime = last_tsdb_dt |  | ||||||
|     # load as much from storage into shm possible (depends on |  | ||||||
|     # user's shm size settings). |  | ||||||
|     while shm._first.value > 0: |  | ||||||
| 
 |  | ||||||
|         tsdb_history = await storage.read_ohlcv( |  | ||||||
|             fqme, |  | ||||||
|             timeframe=timeframe, |  | ||||||
|             end=tsdb_last_frame_start, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # # empty query |  | ||||||
|         # if not len(tsdb_history): |  | ||||||
|         #     break |  | ||||||
| 
 |  | ||||||
|         next_start = tsdb_history[time_key][0] |  | ||||||
|         if next_start >= tsdb_last_frame_start: |  | ||||||
|             # no earlier data detected |  | ||||||
|             break |  | ||||||
| 
 |  | ||||||
|         else: |  | ||||||
|             tsdb_last_frame_start = next_start |  | ||||||
| 
 |  | ||||||
|         # TODO: see if there's faster multi-field reads: |  | ||||||
|         # https://numpy.org/doc/stable/user/basics.rec.html#accessing-multiple-fields |  | ||||||
|         # re-index  with a `time` and index field |  | ||||||
|         prepend_start = shm._first.value |  | ||||||
| 
 |  | ||||||
|         to_push = tsdb_history[-prepend_start:] |  | ||||||
|         shm.push( |  | ||||||
|             to_push, |  | ||||||
| 
 |  | ||||||
|             # insert the history pre a "days worth" of samples |  | ||||||
|             # to leave some real-time buffer space at the end. |  | ||||||
|             prepend=True, |  | ||||||
|             # update_first=False, |  | ||||||
|             # start=prepend_start, |  | ||||||
|             field_map=storemod.ohlc_key_map, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         log.info(f'Loaded {to_push.shape} datums from storage') |  | ||||||
|         tsdb_last_frame_start = tsdb_history[time_key][0] |  | ||||||
| 
 |  | ||||||
|         # manually trigger step update to update charts/fsps |  | ||||||
|         # which need an incremental update. |  | ||||||
|         # NOTE: the way this works is super duper |  | ||||||
|         # un-intuitive right now: |  | ||||||
|         # - the broadcaster fires a msg to the fsp subsystem. |  | ||||||
|         # - fsp subsys then checks for a sample step diff and |  | ||||||
|         #   possibly recomputes prepended history. |  | ||||||
|         # - the fsp then sends back to the parent actor |  | ||||||
|         #   (usually a chart showing graphics for said fsp) |  | ||||||
|         #   which tells the chart to conduct a manual full |  | ||||||
|         #   graphics loop cycle. |  | ||||||
|         # await sampler_stream.send('broadcast_all') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def tsdb_backfill( |  | ||||||
|     mod: ModuleType, |  | ||||||
|     storemod: ModuleType, |  | ||||||
|     tn: trio.Nursery, |  | ||||||
| 
 |  | ||||||
|     storage: StorageClient, |  | ||||||
|     mkt: MktPair, |  | ||||||
|     shm: ShmArray, |  | ||||||
|     timeframe: float, |  | ||||||
| 
 |  | ||||||
|     sampler_stream: tractor.MsgStream, |  | ||||||
| 
 |  | ||||||
|     task_status: TaskStatus[ |  | ||||||
|         tuple[ShmArray, ShmArray] |  | ||||||
|     ] = trio.TASK_STATUS_IGNORED, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
| 
 |  | ||||||
|     get_hist: Callable[ |  | ||||||
|         [int, datetime, datetime], |  | ||||||
|         tuple[np.ndarray, str] |  | ||||||
|     ] |  | ||||||
|     config: dict[str, int] |  | ||||||
|     async with mod.open_history_client( |  | ||||||
|         mkt, |  | ||||||
|     ) as (get_hist, config): |  | ||||||
|         log.info(f'{mod} history client returned backfill config: {config}') |  | ||||||
| 
 |  | ||||||
|         # get latest query's worth of history all the way |  | ||||||
|         # back to what is recorded in the tsdb |  | ||||||
|         try: |  | ||||||
|             array, mr_start_dt, mr_end_dt = await get_hist( |  | ||||||
|                 timeframe, |  | ||||||
|                 end_dt=None, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|         # XXX: timeframe not supported for backend (since |  | ||||||
|         # above exception type), terminate immediately since |  | ||||||
|         # there's no backfilling possible. |  | ||||||
|         except DataUnavailable: |  | ||||||
|             task_status.started() |  | ||||||
|             return |  | ||||||
| 
 |  | ||||||
|         # TODO: fill in non-zero epoch time values ALWAYS! |  | ||||||
|         # hist_shm._array['time'] = np.arange( |  | ||||||
|         #     start= |  | ||||||
| 
 |  | ||||||
|         # NOTE: removed for now since it'll always break |  | ||||||
|         # on the first 60s of the venue open.. |  | ||||||
|         # times: np.ndarray = array['time'] |  | ||||||
|         # # sample period step size in seconds |  | ||||||
|         # step_size_s = ( |  | ||||||
|         #     from_timestamp(times[-1]) |  | ||||||
|         #     - from_timestamp(times[-2]) |  | ||||||
|         # ).seconds |  | ||||||
| 
 |  | ||||||
|         # if step_size_s not in (1, 60): |  | ||||||
|         #     log.error(f'Last 2 sample period is off!? -> {step_size_s}') |  | ||||||
|         #     step_size_s = ( |  | ||||||
|         #         from_timestamp(times[-2]) |  | ||||||
|         #         - from_timestamp(times[-3]) |  | ||||||
|         #     ).seconds |  | ||||||
| 
 |  | ||||||
|         # NOTE: on the first history, most recent history |  | ||||||
|         # frame we PREPEND from the current shm ._last index |  | ||||||
|         # and thus a gap between the earliest datum loaded here |  | ||||||
|         # and the latest loaded from the tsdb may exist! |  | ||||||
|         log.info(f'Pushing {array.size} to shm!') |  | ||||||
|         shm.push( |  | ||||||
|             array, |  | ||||||
|             prepend=True,  # append on first frame |  | ||||||
|         ) |  | ||||||
|         backfill_gap_from_shm_index: int = shm._first.value + 1 |  | ||||||
| 
 |  | ||||||
|         # tell parent task to continue |  | ||||||
|         task_status.started() |  | ||||||
| 
 |  | ||||||
|         # loads a (large) frame of data from the tsdb depending |  | ||||||
|         # on the db's query size limit; our "nativedb" (using |  | ||||||
|         # parquet) generally can load the entire history into mem |  | ||||||
|         # but if not then below the remaining history can be lazy |  | ||||||
|         # loaded? |  | ||||||
|         fqme: str = mkt.fqme |  | ||||||
|         tsdb_entry: tuple | None =  await storage.load( |  | ||||||
|             fqme, |  | ||||||
|             timeframe=timeframe, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         last_tsdb_dt: datetime | None = None |  | ||||||
|         if tsdb_entry: |  | ||||||
|             ( |  | ||||||
|                 tsdb_history, |  | ||||||
|                 first_tsdb_dt, |  | ||||||
|                 last_tsdb_dt, |  | ||||||
|             ) = tsdb_entry |  | ||||||
| 
 |  | ||||||
|             # calc the index from which the tsdb data should be |  | ||||||
|             # prepended, presuming there is a gap between the |  | ||||||
|             # latest frame (loaded/read above) and the latest |  | ||||||
|             # sample loaded from the tsdb. |  | ||||||
|             backfill_diff: Duration =  mr_start_dt - last_tsdb_dt |  | ||||||
|             offset_s: float = backfill_diff.in_seconds() |  | ||||||
|             offset_samples: int = round(offset_s / timeframe) |  | ||||||
| 
 |  | ||||||
|             # TODO: see if there's faster multi-field reads: |  | ||||||
|             # https://numpy.org/doc/stable/user/basics.rec.html#accessing-multiple-fields |  | ||||||
|             # re-index  with a `time` and index field |  | ||||||
|             prepend_start = shm._first.value - offset_samples + 1 |  | ||||||
| 
 |  | ||||||
|             # tsdb history is so far in the past we can't fit it in |  | ||||||
|             # shm buffer space so simply don't load it! |  | ||||||
|             if prepend_start > 0: |  | ||||||
|                 to_push = tsdb_history[-prepend_start:] |  | ||||||
|                 shm.push( |  | ||||||
|                     to_push, |  | ||||||
| 
 |  | ||||||
|                     # insert the history pre a "days worth" of samples |  | ||||||
|                     # to leave some real-time buffer space at the end. |  | ||||||
|                     prepend=True, |  | ||||||
|                     # update_first=False, |  | ||||||
|                     start=prepend_start, |  | ||||||
|                     field_map=storemod.ohlc_key_map, |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 log.info(f'Loaded {to_push.shape} datums from storage') |  | ||||||
| 
 |  | ||||||
|         # TODO: maybe start history anal and load missing "history |  | ||||||
|         # gaps" via backend.. |  | ||||||
| 
 |  | ||||||
|         if timeframe not in (1, 60): |  | ||||||
|             raise ValueError( |  | ||||||
|                 '`piker` only needs to support 1m and 1s sampling ' |  | ||||||
|                 'but ur api is trying to deliver a longer ' |  | ||||||
|                 f'timeframe of {timeframe} seconds..\n' |  | ||||||
|                 'So yuh.. dun do dat brudder.' |  | ||||||
|             ) |  | ||||||
|         # if there is a gap to backfill from the first |  | ||||||
|         # history frame until the last datum loaded from the tsdb |  | ||||||
|         # continue that now in the background |  | ||||||
|         bf_done = await tn.start( |  | ||||||
|             partial( |  | ||||||
|                 start_backfill, |  | ||||||
|                 get_hist, |  | ||||||
|                 mod, |  | ||||||
|                 mkt, |  | ||||||
|                 shm, |  | ||||||
|                 timeframe, |  | ||||||
| 
 |  | ||||||
|                 backfill_from_shm_index=backfill_gap_from_shm_index, |  | ||||||
|                 backfill_from_dt=mr_start_dt, |  | ||||||
| 
 |  | ||||||
|                 sampler_stream=sampler_stream, |  | ||||||
| 
 |  | ||||||
|                 backfill_until_dt=last_tsdb_dt, |  | ||||||
|                 storage=storage, |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # if len(hist_shm.array) < 2: |  | ||||||
|         # TODO: there's an edge case here to solve where if the last |  | ||||||
|         # frame before market close (at least on ib) was pushed and |  | ||||||
|         # there was only "1 new" row pushed from the first backfill |  | ||||||
|         # query-iteration, then the sample step sizing calcs will |  | ||||||
|         # break upstream from here since you can't diff on at least |  | ||||||
|         # 2 steps... probably should also add logic to compute from |  | ||||||
|         # the tsdb series and stash that somewhere as meta data on |  | ||||||
|         # the shm buffer?.. no se. |  | ||||||
| 
 |  | ||||||
|         # backload any further data from tsdb (concurrently per |  | ||||||
|         # timeframe) if not all data was able to be loaded (in memory) |  | ||||||
|         # from the ``StorageClient.load()`` call above. |  | ||||||
|         try: |  | ||||||
|             await trio.sleep_forever() |  | ||||||
|         finally: |  | ||||||
|             return |  | ||||||
| 
 |  | ||||||
|         # IF we need to continue backloading incrementally from the |  | ||||||
|         # tsdb client.. |  | ||||||
|         tn.start_soon( |  | ||||||
|             back_load_from_tsdb, |  | ||||||
| 
 |  | ||||||
|             storemod, |  | ||||||
|             storage, |  | ||||||
|             fqme, |  | ||||||
| 
 |  | ||||||
|             tsdb_history, |  | ||||||
|             last_tsdb_dt, |  | ||||||
|             mr_start_dt, |  | ||||||
|             mr_end_dt, |  | ||||||
|             bf_done, |  | ||||||
| 
 |  | ||||||
|             timeframe, |  | ||||||
|             shm, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def manage_history( |  | ||||||
|     mod: ModuleType, |  | ||||||
|     bus: _FeedsBus, |  | ||||||
|     mkt: MktPair, |  | ||||||
|     some_data_ready: trio.Event, |  | ||||||
|     feed_is_live: trio.Event, |  | ||||||
|     timeframe: float = 60,  # in seconds |  | ||||||
| 
 |  | ||||||
|     task_status: TaskStatus[ |  | ||||||
|         tuple[ShmArray, ShmArray] |  | ||||||
|     ] = trio.TASK_STATUS_IGNORED, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Load and manage historical data including the loading of any |  | ||||||
|     available series from any connected tsdb as well as conduct |  | ||||||
|     real-time update of both that existing db and the allocated |  | ||||||
|     shared memory buffer. |  | ||||||
| 
 |  | ||||||
|     Init sequence: |  | ||||||
|     - allocate shm (numpy array) buffers for 60s & 1s sample rates |  | ||||||
|     - configure "zero index" for each buffer: the index where |  | ||||||
|       history will prepended *to* and new live data will be |  | ||||||
|       appened *from*. |  | ||||||
|     - open a ``.storage.StorageClient`` and load any existing tsdb |  | ||||||
|       history as well as (async) start a backfill task which loads |  | ||||||
|       missing (newer) history from the data provider backend: |  | ||||||
|       - tsdb history is loaded first and pushed to shm ASAP. |  | ||||||
|       - the backfill task loads the most recent history before |  | ||||||
|         unblocking its parent task, so that the `ShmArray._last` is |  | ||||||
|         up to date to allow the OHLC sampler to begin writing new |  | ||||||
|         samples as the correct buffer index once the provider feed |  | ||||||
|         engages. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # TODO: is there a way to make each shm file key |  | ||||||
|     # actor-tree-discovery-addr unique so we avoid collisions |  | ||||||
|     # when doing tests which also allocate shms for certain instruments |  | ||||||
|     # that may be in use on the system by some other running daemons? |  | ||||||
|     # from tractor._state import _runtime_vars |  | ||||||
|     # port = _runtime_vars['_root_mailbox'][1] |  | ||||||
| 
 |  | ||||||
|     uid: tuple = tractor.current_actor().uid |  | ||||||
|     name, uuid = uid |  | ||||||
|     service: str = name.rstrip(f'.{mod.name}') |  | ||||||
|     fqme: str = mkt.get_fqme(delim_char='') |  | ||||||
| 
 |  | ||||||
|     # (maybe) allocate shm array for this broker/symbol which will |  | ||||||
|     # be used for fast near-term history capture and processing. |  | ||||||
|     hist_shm, opened = maybe_open_shm_array( |  | ||||||
|         size=_default_hist_size, |  | ||||||
|         append_start_index=_hist_buffer_start, |  | ||||||
| 
 |  | ||||||
|         key=f'piker.{service}[{uuid[:16]}].{fqme}.hist', |  | ||||||
| 
 |  | ||||||
|         # use any broker defined ohlc dtype: |  | ||||||
|         dtype=getattr(mod, '_ohlc_dtype', def_iohlcv_fields), |  | ||||||
| 
 |  | ||||||
|         # we expect the sub-actor to write |  | ||||||
|         readonly=False, |  | ||||||
|     ) |  | ||||||
|     hist_zero_index = hist_shm.index - 1 |  | ||||||
| 
 |  | ||||||
|     # TODO: history validation |  | ||||||
|     if not opened: |  | ||||||
|         raise RuntimeError( |  | ||||||
|             "Persistent shm for sym was already open?!" |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     rt_shm, opened = maybe_open_shm_array( |  | ||||||
|         size=_default_rt_size, |  | ||||||
|         append_start_index=_rt_buffer_start, |  | ||||||
|         key=f'piker.{service}[{uuid[:16]}].{fqme}.rt', |  | ||||||
| 
 |  | ||||||
|         # use any broker defined ohlc dtype: |  | ||||||
|         dtype=getattr(mod, '_ohlc_dtype', def_iohlcv_fields), |  | ||||||
| 
 |  | ||||||
|         # we expect the sub-actor to write |  | ||||||
|         readonly=False, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # (for now) set the rt (hft) shm array with space to prepend |  | ||||||
|     # only a few days worth of 1s history. |  | ||||||
|     days: int = 2 |  | ||||||
|     start_index: int = days*_secs_in_day |  | ||||||
|     rt_shm._first.value = start_index |  | ||||||
|     rt_shm._last.value = start_index |  | ||||||
|     rt_zero_index = rt_shm.index - 1 |  | ||||||
| 
 |  | ||||||
|     if not opened: |  | ||||||
|         raise RuntimeError( |  | ||||||
|             "Persistent shm for sym was already open?!" |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     open_history_client = getattr( |  | ||||||
|         mod, |  | ||||||
|         'open_history_client', |  | ||||||
|     ) |  | ||||||
|     assert open_history_client |  | ||||||
| 
 |  | ||||||
|     # TODO: maybe it should be a subpkg of `.data`? |  | ||||||
|     from piker import storage |  | ||||||
| 
 |  | ||||||
|     async with ( |  | ||||||
|         storage.open_storage_client() as (storemod, client), |  | ||||||
|         trio.open_nursery() as tn, |  | ||||||
|     ): |  | ||||||
|         log.info( |  | ||||||
|             f'Connecting to storage backend `{storemod.name}`:\n' |  | ||||||
|             f'location: {client.address}\n' |  | ||||||
|             f'db cardinality: {client.cardinality}\n' |  | ||||||
|             # TODO: show backend config, eg: |  | ||||||
|             # - network settings |  | ||||||
|             # - storage size with compression |  | ||||||
|             # - number of loaded time series? |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         # NOTE: this call ONLY UNBLOCKS once the latest-most frame |  | ||||||
|         # (i.e. history just before the live feed latest datum) of |  | ||||||
|         # history has been loaded and written to the shm buffer: |  | ||||||
|         # - the backfiller task can write in reverse chronological |  | ||||||
|         #   to the shm and tsdb |  | ||||||
|         # - the tsdb data can be loaded immediately and the |  | ||||||
|         #   backfiller can do a single append from it's end datum and |  | ||||||
|         #   then prepends backward to that from the current time |  | ||||||
|         #   step. |  | ||||||
|         tf2mem: dict = { |  | ||||||
|             1: rt_shm, |  | ||||||
|             60: hist_shm, |  | ||||||
|         } |  | ||||||
|         async with open_sample_stream( |  | ||||||
|             period_s=1., |  | ||||||
|             shms_by_period={ |  | ||||||
|                 1.: rt_shm.token, |  | ||||||
|                 60.: hist_shm.token, |  | ||||||
|             }, |  | ||||||
| 
 |  | ||||||
|             # NOTE: we want to only open a stream for doing |  | ||||||
|             # broadcasts on backfill operations, not receive the |  | ||||||
|             # sample index-stream (since there's no code in this |  | ||||||
|             # data feed layer that needs to consume it). |  | ||||||
|             open_index_stream=True, |  | ||||||
|             sub_for_broadcasts=False, |  | ||||||
| 
 |  | ||||||
|         ) as sample_stream: |  | ||||||
|             # register 1s and 1m buffers with the global incrementer task |  | ||||||
|             log.info(f'Connected to sampler stream: {sample_stream}') |  | ||||||
| 
 |  | ||||||
|             for timeframe in [60, 1]: |  | ||||||
|                 await tn.start( |  | ||||||
|                     tsdb_backfill, |  | ||||||
|                     mod, |  | ||||||
|                     storemod, |  | ||||||
|                     tn, |  | ||||||
|                     # bus, |  | ||||||
|                     client, |  | ||||||
|                     mkt, |  | ||||||
|                     tf2mem[timeframe], |  | ||||||
|                     timeframe, |  | ||||||
| 
 |  | ||||||
|                     sample_stream, |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|             # indicate to caller that feed can be delivered to |  | ||||||
|             # remote requesting client since we've loaded history |  | ||||||
|             # data that can be used. |  | ||||||
|             some_data_ready.set() |  | ||||||
| 
 |  | ||||||
|             # wait for a live feed before starting the sampler. |  | ||||||
|             await feed_is_live.wait() |  | ||||||
| 
 |  | ||||||
|             # yield back after client connect with filled shm |  | ||||||
|             task_status.started(( |  | ||||||
|                 hist_zero_index, |  | ||||||
|                 hist_shm, |  | ||||||
|                 rt_zero_index, |  | ||||||
|                 rt_shm, |  | ||||||
|             )) |  | ||||||
| 
 |  | ||||||
|             # history retreival loop depending on user interaction |  | ||||||
|             # and thus a small RPC-prot for remotely controllinlg |  | ||||||
|             # what data is loaded for viewing. |  | ||||||
|             await trio.sleep_forever() |  | ||||||
|  | @ -26,7 +26,10 @@ from ._api import ( | ||||||
|     maybe_mk_fsp_shm, |     maybe_mk_fsp_shm, | ||||||
|     Fsp, |     Fsp, | ||||||
| ) | ) | ||||||
| from ._engine import cascade | from ._engine import ( | ||||||
|  |     cascade, | ||||||
|  |     Cascade, | ||||||
|  | ) | ||||||
| from ._volume import ( | from ._volume import ( | ||||||
|     dolla_vlm, |     dolla_vlm, | ||||||
|     flow_rates, |     flow_rates, | ||||||
|  | @ -35,6 +38,7 @@ from ._volume import ( | ||||||
| 
 | 
 | ||||||
| __all__: list[str] = [ | __all__: list[str] = [ | ||||||
|     'cascade', |     'cascade', | ||||||
|  |     'Cascade', | ||||||
|     'maybe_mk_fsp_shm', |     'maybe_mk_fsp_shm', | ||||||
|     'Fsp', |     'Fsp', | ||||||
|     'dolla_vlm', |     'dolla_vlm', | ||||||
|  | @ -46,9 +50,12 @@ __all__: list[str] = [ | ||||||
| async def latency( | async def latency( | ||||||
|     source: 'TickStream[Dict[str, float]]',  # noqa |     source: 'TickStream[Dict[str, float]]',  # noqa | ||||||
|     ohlcv: np.ndarray |     ohlcv: np.ndarray | ||||||
|  | 
 | ||||||
| ) -> AsyncIterator[np.ndarray]: | ) -> AsyncIterator[np.ndarray]: | ||||||
|     """Latency measurements, broker to piker. |     ''' | ||||||
|     """ |     Latency measurements, broker to piker. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|     # TODO: do we want to offer yielding this async |     # TODO: do we want to offer yielding this async | ||||||
|     # before the rt data connection comes up? |     # before the rt data connection comes up? | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -18,13 +18,12 @@ | ||||||
| core task logic for processing chains | core task logic for processing chains | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from dataclasses import dataclass | from __future__ import annotations | ||||||
|  | from contextlib import asynccontextmanager as acm | ||||||
| from functools import partial | from functools import partial | ||||||
| from typing import ( | from typing import ( | ||||||
|     AsyncIterator, |     AsyncIterator, | ||||||
|     Callable, |     Callable, | ||||||
|     Optional, |  | ||||||
|     Union, |  | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import numpy as np | import numpy as np | ||||||
|  | @ -33,9 +32,9 @@ from trio_typing import TaskStatus | ||||||
| import tractor | import tractor | ||||||
| from tractor.msg import NamespacePath | from tractor.msg import NamespacePath | ||||||
| 
 | 
 | ||||||
|  | from piker.types import Struct | ||||||
| from ..log import get_logger, get_console_log | from ..log import get_logger, get_console_log | ||||||
| from .. import data | from .. import data | ||||||
| from ..data import attach_shm_array |  | ||||||
| from ..data.feed import ( | from ..data.feed import ( | ||||||
|     Flume, |     Flume, | ||||||
|     Feed, |     Feed, | ||||||
|  | @ -56,12 +55,6 @@ from ..toolz import Profiler | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @dataclass |  | ||||||
| class TaskTracker: |  | ||||||
|     complete: trio.Event |  | ||||||
|     cs: trio.CancelScope |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| async def filter_quotes_by_sym( | async def filter_quotes_by_sym( | ||||||
| 
 | 
 | ||||||
|     sym: str, |     sym: str, | ||||||
|  | @ -82,30 +75,168 @@ async def filter_quotes_by_sym( | ||||||
|         if quote: |         if quote: | ||||||
|             yield quote |             yield quote | ||||||
| 
 | 
 | ||||||
|  | # TODO: unifying the abstractions in this FSP subsys/layer: | ||||||
|  | # -[ ] move the `.data.flows.Flume` type into this | ||||||
|  | #   module/subsys/pkg? | ||||||
|  | # -[ ] ideas for further abstractions as per | ||||||
|  | #   - https://github.com/pikers/piker/issues/216, | ||||||
|  | #   - https://github.com/pikers/piker/issues/270: | ||||||
|  | #   - a (financial signal) ``Flow`` would be the a "collection" of such | ||||||
|  | #     minmial cascades. Some engineering based jargon concepts: | ||||||
|  | #     - https://en.wikipedia.org/wiki/Signal_chain | ||||||
|  | #     - https://en.wikipedia.org/wiki/Daisy_chain_(electrical_engineering) | ||||||
|  | #     - https://en.wikipedia.org/wiki/Audio_signal_flow | ||||||
|  | #     - https://en.wikipedia.org/wiki/Digital_signal_processing#Implementation | ||||||
|  | #     - https://en.wikipedia.org/wiki/Dataflow_programming | ||||||
|  | #     - https://en.wikipedia.org/wiki/Signal_programming | ||||||
|  | #     - https://en.wikipedia.org/wiki/Incremental_computing | ||||||
|  | #     - https://en.wikipedia.org/wiki/Signal-flow_graph | ||||||
|  | #     - https://en.wikipedia.org/wiki/Signal-flow_graph#Basic_components | ||||||
| 
 | 
 | ||||||
| async def fsp_compute( | # -[ ] we probably want to eval THE BELOW design and unify with the | ||||||
|  | #   proto `TaskManager` in the `tractor` dev branch as well as with | ||||||
|  | #   our below idea for `Cascade`: | ||||||
|  | #   - https://github.com/goodboy/tractor/pull/363 | ||||||
|  | class Cascade(Struct): | ||||||
|  |     ''' | ||||||
|  |     As per sig-proc engineering parlance, this is a chaining of | ||||||
|  |     `Flume`s, which are themselves collections of "Streams" | ||||||
|  |     implemented currently via `ShmArray`s. | ||||||
| 
 | 
 | ||||||
|  |     A `Cascade` is be the minimal "connection" of 2 `Flumes` | ||||||
|  |     as per circuit parlance: | ||||||
|  |     https://en.wikipedia.org/wiki/Two-port_network#Cascade_connection | ||||||
|  | 
 | ||||||
|  |     TODO: | ||||||
|  |       -[ ] could cover the combination of our `FspAdmin` and the | ||||||
|  |         backend `.fsp._engine` related machinery to "connect" one flume | ||||||
|  |         to another? | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # TODO: make these `Flume`s | ||||||
|  |     src: Flume | ||||||
|  |     dst: Flume | ||||||
|  |     tn: trio.Nursery | ||||||
|  |     fsp: Fsp  # UI-side middleware ctl API | ||||||
|  | 
 | ||||||
|  |     # filled during cascade/.bind_func() (fsp_compute) init phases | ||||||
|  |     bind_func: Callable | None = None | ||||||
|  |     complete: trio.Event | None = None | ||||||
|  |     cs: trio.CancelScope | None = None | ||||||
|  |     client_stream: tractor.MsgStream | None = None | ||||||
|  | 
 | ||||||
|  |     async def resync(self) -> int: | ||||||
|  |         # TODO: adopt an incremental update engine/approach | ||||||
|  |         # where possible here eventually! | ||||||
|  |         log.info(f're-syncing fsp {self.fsp.name} to source') | ||||||
|  |         self.cs.cancel() | ||||||
|  |         await self.complete.wait() | ||||||
|  |         index: int = await self.tn.start(self.bind_func) | ||||||
|  | 
 | ||||||
|  |         # always trigger UI refresh after history update, | ||||||
|  |         # see ``piker.ui._fsp.FspAdmin.open_chain()`` and | ||||||
|  |         # ``piker.ui._display.trigger_update()``. | ||||||
|  |         dst_shm: ShmArray = self.dst.rt_shm | ||||||
|  |         await self.client_stream.send({ | ||||||
|  |             'fsp_update': { | ||||||
|  |                 'key': dst_shm.token, | ||||||
|  |                 'first': dst_shm._first.value, | ||||||
|  |                 'last': dst_shm._last.value, | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |         return index | ||||||
|  | 
 | ||||||
|  |     def is_synced(self) -> tuple[bool, int, int]: | ||||||
|  |         ''' | ||||||
|  |         Predicate to dertmine if a destination FSP | ||||||
|  |         output array is aligned to its source array. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         src_shm: ShmArray = self.src.rt_shm | ||||||
|  |         dst_shm: ShmArray = self.dst.rt_shm | ||||||
|  |         step_diff = src_shm.index - dst_shm.index | ||||||
|  |         len_diff = abs(len(src_shm.array) - len(dst_shm.array)) | ||||||
|  |         synced: bool = not ( | ||||||
|  |             # the source is likely backfilling and we must | ||||||
|  |             # sync history calculations | ||||||
|  |             len_diff > 2 | ||||||
|  | 
 | ||||||
|  |             # we aren't step synced to the source and may be | ||||||
|  |             # leading/lagging by a step | ||||||
|  |             or step_diff > 1 | ||||||
|  |             or step_diff < 0 | ||||||
|  |         ) | ||||||
|  |         if not synced: | ||||||
|  |             fsp: Fsp = self.fsp | ||||||
|  |             log.warning( | ||||||
|  |                 '***DESYNCED FSP***\n' | ||||||
|  |                 f'{fsp.ns_path}@{src_shm.token}\n' | ||||||
|  |                 f'step_diff: {step_diff}\n' | ||||||
|  |                 f'len_diff: {len_diff}\n' | ||||||
|  |             ) | ||||||
|  |         return ( | ||||||
|  |             synced, | ||||||
|  |             step_diff, | ||||||
|  |             len_diff, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     async def poll_and_sync_to_step(self) -> int: | ||||||
|  |         synced, step_diff, _ = self.is_synced() | ||||||
|  |         while not synced: | ||||||
|  |             await self.resync() | ||||||
|  |             synced, step_diff, _ = self.is_synced() | ||||||
|  | 
 | ||||||
|  |         return step_diff | ||||||
|  | 
 | ||||||
|  |     @acm | ||||||
|  |     async def open_edge( | ||||||
|  |         self, | ||||||
|  |         bind_func: Callable, | ||||||
|  |     ) -> int: | ||||||
|  |         self.bind_func = bind_func | ||||||
|  |         index = await self.tn.start(bind_func) | ||||||
|  |         yield index | ||||||
|  |         # TODO: what do we want on teardown/error? | ||||||
|  |         # -[ ] dynamic reconnection after update? | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def connect_streams( | ||||||
|  |     casc: Cascade, | ||||||
|     mkt: MktPair, |     mkt: MktPair, | ||||||
|     flume: Flume, |  | ||||||
|     quote_stream: trio.abc.ReceiveChannel, |     quote_stream: trio.abc.ReceiveChannel, | ||||||
|  |     src: Flume, | ||||||
|  |     dst: Flume, | ||||||
| 
 | 
 | ||||||
|     src: ShmArray, |     edge_func: Callable, | ||||||
|     dst: ShmArray, |  | ||||||
| 
 |  | ||||||
|     func: Callable, |  | ||||||
| 
 | 
 | ||||||
|     # attach_stream: bool = False, |     # attach_stream: bool = False, | ||||||
|     task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED, |     task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     Stream and per-sample compute and write the cascade of | ||||||
|  |     2 `Flumes`/streams given some operating `func`. | ||||||
| 
 | 
 | ||||||
|  |     https://en.wikipedia.org/wiki/Signal-flow_graph#Basic_components | ||||||
|  | 
 | ||||||
|  |     Not literally, but something like: | ||||||
|  | 
 | ||||||
|  |         edge_func(Flume_in) -> Flume_out | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|     profiler = Profiler( |     profiler = Profiler( | ||||||
|         delayed=False, |         delayed=False, | ||||||
|         disabled=True |         disabled=True | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     fqme = mkt.fqme |     # TODO: just pull it from src.mkt.fqme no? | ||||||
|     out_stream = func( |     # fqme: str = mkt.fqme | ||||||
|  |     fqme: str = src.mkt.fqme | ||||||
|  | 
 | ||||||
|  |     # TODO: dynamic introspection of what the underlying (vertex) | ||||||
|  |     # function actually requires from input node (flumes) then | ||||||
|  |     # deliver those inputs as part of a graph "compilation" step? | ||||||
|  |     out_stream = edge_func( | ||||||
| 
 | 
 | ||||||
|         # TODO: do we even need this if we do the feed api right? |         # TODO: do we even need this if we do the feed api right? | ||||||
|         # shouldn't a local stream do this before we get a handle |         # shouldn't a local stream do this before we get a handle | ||||||
|  | @ -113,20 +244,21 @@ async def fsp_compute( | ||||||
|         # async itertools style? |         # async itertools style? | ||||||
|         filter_quotes_by_sym(fqme, quote_stream), |         filter_quotes_by_sym(fqme, quote_stream), | ||||||
| 
 | 
 | ||||||
|         # XXX: currently the ``ohlcv`` arg |         # XXX: currently the ``ohlcv`` arg, but we should allow | ||||||
|         flume.rt_shm, |         # (dynamic) requests for src flume (node) streams? | ||||||
|  |         src.rt_shm, | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     # HISTORY COMPUTE PHASE |     # HISTORY COMPUTE PHASE | ||||||
|     # conduct a single iteration of fsp with historical bars input |     # conduct a single iteration of fsp with historical bars input | ||||||
|     # and get historical output. |     # and get historical output. | ||||||
|     history_output: Union[ |     history_output: ( | ||||||
|         dict[str, np.ndarray],  # multi-output case |         dict[str, np.ndarray]  # multi-output case | ||||||
|         np.ndarray,  # single output case |         | np.ndarray,  # single output case | ||||||
|     ] |     ) | ||||||
|     history_output = await anext(out_stream) |     history_output = await anext(out_stream) | ||||||
| 
 | 
 | ||||||
|     func_name = func.__name__ |     func_name = edge_func.__name__ | ||||||
|     profiler(f'{func_name} generated history') |     profiler(f'{func_name} generated history') | ||||||
| 
 | 
 | ||||||
|     # build struct array with an 'index' field to push as history |     # build struct array with an 'index' field to push as history | ||||||
|  | @ -134,10 +266,12 @@ async def fsp_compute( | ||||||
|     # TODO: push using a[['f0', 'f1', .., 'fn']] = .. syntax no? |     # TODO: push using a[['f0', 'f1', .., 'fn']] = .. syntax no? | ||||||
|     # if the output array is multi-field then push |     # if the output array is multi-field then push | ||||||
|     # each respective field. |     # each respective field. | ||||||
|     fields = getattr(dst.array.dtype, 'fields', None).copy() |     dst_shm: ShmArray = dst.rt_shm | ||||||
|  |     fields = getattr(dst_shm.array.dtype, 'fields', None).copy() | ||||||
|     fields.pop('index') |     fields.pop('index') | ||||||
|     history_by_field: Optional[np.ndarray] = None |     history_by_field: np.ndarray | None = None | ||||||
|     src_time = src.array['time'] |     src_shm: ShmArray = src.rt_shm | ||||||
|  |     src_time = src_shm.array['time'] | ||||||
| 
 | 
 | ||||||
|     if ( |     if ( | ||||||
|         fields and |         fields and | ||||||
|  | @ -156,7 +290,7 @@ async def fsp_compute( | ||||||
|                 if history_by_field is None: |                 if history_by_field is None: | ||||||
| 
 | 
 | ||||||
|                     if output is None: |                     if output is None: | ||||||
|                         length = len(src.array) |                         length = len(src_shm.array) | ||||||
|                     else: |                     else: | ||||||
|                         length = len(output) |                         length = len(output) | ||||||
| 
 | 
 | ||||||
|  | @ -165,7 +299,7 @@ async def fsp_compute( | ||||||
|                     # will be pushed to shm. |                     # will be pushed to shm. | ||||||
|                     history_by_field = np.zeros( |                     history_by_field = np.zeros( | ||||||
|                         length, |                         length, | ||||||
|                         dtype=dst.array.dtype |                         dtype=dst_shm.array.dtype | ||||||
|                     ) |                     ) | ||||||
| 
 | 
 | ||||||
|                 if output is None: |                 if output is None: | ||||||
|  | @ -182,13 +316,13 @@ async def fsp_compute( | ||||||
|             ) |             ) | ||||||
|         history_by_field = np.zeros( |         history_by_field = np.zeros( | ||||||
|             len(history_output), |             len(history_output), | ||||||
|             dtype=dst.array.dtype |             dtype=dst_shm.array.dtype | ||||||
|         ) |         ) | ||||||
|         history_by_field[func_name] = history_output |         history_by_field[func_name] = history_output | ||||||
| 
 | 
 | ||||||
|     history_by_field['time'] = src_time[-len(history_by_field):] |     history_by_field['time'] = src_time[-len(history_by_field):] | ||||||
| 
 | 
 | ||||||
|     history_output['time'] = src.array['time'] |     history_output['time'] = src_shm.array['time'] | ||||||
| 
 | 
 | ||||||
|     # TODO: XXX: |     # TODO: XXX: | ||||||
|     # THERE'S A BIG BUG HERE WITH THE `index` field since we're |     # THERE'S A BIG BUG HERE WITH THE `index` field since we're | ||||||
|  | @ -201,11 +335,11 @@ async def fsp_compute( | ||||||
|     #   is `index` aware such that historical data can be indexed |     #   is `index` aware such that historical data can be indexed | ||||||
|     #   relative to the true first datum? Not sure if this is sane |     #   relative to the true first datum? Not sure if this is sane | ||||||
|     #   for incremental compuations. |     #   for incremental compuations. | ||||||
|     first = dst._first.value = src._first.value |     first = dst_shm._first.value = src_shm._first.value | ||||||
| 
 | 
 | ||||||
|     # TODO: can we use this `start` flag instead of the manual |     # TODO: can we use this `start` flag instead of the manual | ||||||
|     # setting above? |     # setting above? | ||||||
|     index = dst.push( |     index = dst_shm.push( | ||||||
|         history_by_field, |         history_by_field, | ||||||
|         start=first, |         start=first, | ||||||
|     ) |     ) | ||||||
|  | @ -216,12 +350,9 @@ async def fsp_compute( | ||||||
|     # setup a respawn handle |     # setup a respawn handle | ||||||
|     with trio.CancelScope() as cs: |     with trio.CancelScope() as cs: | ||||||
| 
 | 
 | ||||||
|         # TODO: might be better to just make a "restart" method where |         casc.cs = cs | ||||||
|         # the target task is spawned implicitly and then the event is |         casc.complete = trio.Event() | ||||||
|         # set via some higher level api? At that poing we might as well |         task_status.started(index) | ||||||
|         # be writing a one-cancels-one nursery though right? |  | ||||||
|         tracker = TaskTracker(trio.Event(), cs) |  | ||||||
|         task_status.started((tracker, index)) |  | ||||||
| 
 | 
 | ||||||
|         profiler(f'{func_name} yield last index') |         profiler(f'{func_name} yield last index') | ||||||
| 
 | 
 | ||||||
|  | @ -235,12 +366,12 @@ async def fsp_compute( | ||||||
|                 log.debug(f"{func_name}: {processed}") |                 log.debug(f"{func_name}: {processed}") | ||||||
|                 key, output = processed |                 key, output = processed | ||||||
|                 # dst.array[-1][key] = output |                 # dst.array[-1][key] = output | ||||||
|                 dst.array[[key, 'time']][-1] = ( |                 dst_shm.array[[key, 'time']][-1] = ( | ||||||
|                     output, |                     output, | ||||||
|                     # TODO: what about pushing ``time.time_ns()`` |                     # TODO: what about pushing ``time.time_ns()`` | ||||||
|                     # in which case we'll need to round at the graphics |                     # in which case we'll need to round at the graphics | ||||||
|                     # processing / sampling layer? |                     # processing / sampling layer? | ||||||
|                     src.array[-1]['time'] |                     src_shm.array[-1]['time'] | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|                 # NOTE: for now we aren't streaming this to the consumer |                 # NOTE: for now we aren't streaming this to the consumer | ||||||
|  | @ -252,7 +383,7 @@ async def fsp_compute( | ||||||
|                 # N-consumers who subscribe for the real-time output, |                 # N-consumers who subscribe for the real-time output, | ||||||
|                 # which we'll likely want to implement using local-mem |                 # which we'll likely want to implement using local-mem | ||||||
|                 # chans for the fan out? |                 # chans for the fan out? | ||||||
|                 # index = src.index |                 # index = src_shm.index | ||||||
|                 # if attach_stream: |                 # if attach_stream: | ||||||
|                 #     await client_stream.send(index) |                 #     await client_stream.send(index) | ||||||
| 
 | 
 | ||||||
|  | @ -262,7 +393,7 @@ async def fsp_compute( | ||||||
|                 #     log.info(f'FSP quote too fast: {hz}') |                 #     log.info(f'FSP quote too fast: {hz}') | ||||||
|                 # last = time.time() |                 # last = time.time() | ||||||
|         finally: |         finally: | ||||||
|             tracker.complete.set() |             casc.complete.set() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
|  | @ -273,15 +404,15 @@ async def cascade( | ||||||
|     # data feed key |     # data feed key | ||||||
|     fqme: str, |     fqme: str, | ||||||
| 
 | 
 | ||||||
|     src_shm_token: dict, |     # flume pair cascaded using an "edge function" | ||||||
|     dst_shm_token: tuple[str, np.dtype], |     src_flume_addr: dict, | ||||||
| 
 |     dst_flume_addr: dict, | ||||||
|     ns_path: NamespacePath, |     ns_path: NamespacePath, | ||||||
| 
 | 
 | ||||||
|     shm_registry: dict[str, _Token], |     shm_registry: dict[str, _Token], | ||||||
| 
 | 
 | ||||||
|     zero_on_step: bool = False, |     zero_on_step: bool = False, | ||||||
|     loglevel: Optional[str] = None, |     loglevel: str | None = None, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|  | @ -297,8 +428,14 @@ async def cascade( | ||||||
|     if loglevel: |     if loglevel: | ||||||
|         get_console_log(loglevel) |         get_console_log(loglevel) | ||||||
| 
 | 
 | ||||||
|     src = attach_shm_array(token=src_shm_token) |     src: Flume = Flume.from_msg(src_flume_addr) | ||||||
|     dst = attach_shm_array(readonly=False, token=dst_shm_token) |     dst: Flume = Flume.from_msg( | ||||||
|  |         dst_flume_addr, | ||||||
|  |         readonly=False, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # src: ShmArray = attach_shm_array(token=src_shm_token) | ||||||
|  |     # dst: ShmArray = attach_shm_array(readonly=False, token=dst_shm_token) | ||||||
| 
 | 
 | ||||||
|     reg = _load_builtins() |     reg = _load_builtins() | ||||||
|     lines = '\n'.join([f'{key.rpartition(":")[2]} => {key}' for key in reg]) |     lines = '\n'.join([f'{key.rpartition(":")[2]} => {key}' for key in reg]) | ||||||
|  | @ -306,11 +443,11 @@ async def cascade( | ||||||
|         f'Registered FSP set:\n{lines}' |         f'Registered FSP set:\n{lines}' | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     # update actorlocal flows table which registers |     # NOTE XXX: update actorlocal flows table which registers | ||||||
|     # readonly "instances" of this fsp for symbol/source |     # readonly "instances" of this fsp for symbol/source so that | ||||||
|     # so that consumer fsps can look it up by source + fsp. |     # consumer fsps can look it up by source + fsp. | ||||||
|     # TODO: ugh i hate this wind/unwind to list over the wire |     # TODO: ugh i hate this wind/unwind to list over the wire but | ||||||
|     # but not sure how else to do it. |     # not sure how else to do it. | ||||||
|     for (token, fsp_name, dst_token) in shm_registry: |     for (token, fsp_name, dst_token) in shm_registry: | ||||||
|         Fsp._flow_registry[( |         Fsp._flow_registry[( | ||||||
|             _Token.from_msg(token), |             _Token.from_msg(token), | ||||||
|  | @ -320,12 +457,15 @@ async def cascade( | ||||||
|     fsp: Fsp = reg.get( |     fsp: Fsp = reg.get( | ||||||
|         NamespacePath(ns_path) |         NamespacePath(ns_path) | ||||||
|     ) |     ) | ||||||
|     func = fsp.func |     func: Callable = fsp.func | ||||||
| 
 | 
 | ||||||
|     if not func: |     if not func: | ||||||
|         # TODO: assume it's a func target path |         # TODO: assume it's a func target path | ||||||
|         raise ValueError(f'Unknown fsp target: {ns_path}') |         raise ValueError(f'Unknown fsp target: {ns_path}') | ||||||
| 
 | 
 | ||||||
|  |     _fqme: str = src.mkt.fqme | ||||||
|  |     assert _fqme == fqme | ||||||
|  | 
 | ||||||
|     # open a data feed stream with requested broker |     # open a data feed stream with requested broker | ||||||
|     feed: Feed |     feed: Feed | ||||||
|     async with data.feed.maybe_open_feed( |     async with data.feed.maybe_open_feed( | ||||||
|  | @ -339,40 +479,68 @@ async def cascade( | ||||||
| 
 | 
 | ||||||
|     ) as feed: |     ) as feed: | ||||||
| 
 | 
 | ||||||
|         flume = feed.flumes[fqme] |         flume: Flume = feed.flumes[fqme] | ||||||
|         mkt = flume.mkt |         # XXX: can't do this since flume.feed will be set XD | ||||||
|         assert src.token == flume.rt_shm.token |         # assert flume == src | ||||||
|  |         assert flume.mkt == src.mkt | ||||||
|  |         mkt: MktPair = flume.mkt | ||||||
|  | 
 | ||||||
|  |         # NOTE: FOR NOW, sanity checks around the feed as being | ||||||
|  |         # always the src flume (until we get to fancier/lengthier | ||||||
|  |         # chains/graphs. | ||||||
|  |         assert src.rt_shm.token == flume.rt_shm.token | ||||||
|  | 
 | ||||||
|  |         # XXX: won't work bc the _hist_shm_token value will be | ||||||
|  |         # list[list] after IPC.. | ||||||
|  |         # assert flume.to_msg() == src_flume_addr | ||||||
|  | 
 | ||||||
|         profiler(f'{func}: feed up') |         profiler(f'{func}: feed up') | ||||||
| 
 | 
 | ||||||
|         func_name = func.__name__ |         func_name: str = func.__name__ | ||||||
|         async with ( |         async with ( | ||||||
|             trio.open_nursery() as n, |             trio.open_nursery() as tn, | ||||||
|         ): |         ): | ||||||
|  |             # TODO: might be better to just make a "restart" method where | ||||||
|  |             # the target task is spawned implicitly and then the event is | ||||||
|  |             # set via some higher level api? At that poing we might as well | ||||||
|  |             # be writing a one-cancels-one nursery though right? | ||||||
|  |             casc = Cascade( | ||||||
|  |                 src, | ||||||
|  |                 dst, | ||||||
|  |                 tn, | ||||||
|  |                 fsp, | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|  |             # TODO: this seems like it should be wrapped somewhere? | ||||||
|             fsp_target = partial( |             fsp_target = partial( | ||||||
| 
 |                 connect_streams, | ||||||
|                 fsp_compute, |                 casc=casc, | ||||||
|                 mkt=mkt, |                 mkt=mkt, | ||||||
|                 flume=flume, |  | ||||||
|                 quote_stream=flume.stream, |                 quote_stream=flume.stream, | ||||||
| 
 | 
 | ||||||
|                 # shm |                 # flumes and shm passthrough | ||||||
|                 src=src, |                 src=src, | ||||||
|                 dst=dst, |                 dst=dst, | ||||||
| 
 | 
 | ||||||
|                 # target |                 # chain function which takes src flume input(s) | ||||||
|                 func=func |                 # and renders dst flume output(s) | ||||||
|  |                 edge_func=func | ||||||
|             ) |             ) | ||||||
| 
 |             async with casc.open_edge( | ||||||
|             tracker, index = await n.start(fsp_target) |                 bind_func=fsp_target, | ||||||
|  |             ) as index: | ||||||
|  |                 # casc.bind_func = fsp_target | ||||||
|  |                 # index = await tn.start(fsp_target) | ||||||
|  |                 dst_shm: ShmArray = dst.rt_shm | ||||||
|  |                 src_shm: ShmArray = src.rt_shm | ||||||
| 
 | 
 | ||||||
|                 if zero_on_step: |                 if zero_on_step: | ||||||
|                 last = dst.array[-1:] |                     last = dst.rt_shm.array[-1:] | ||||||
|                     zeroed = np.zeros(last.shape, dtype=last.dtype) |                     zeroed = np.zeros(last.shape, dtype=last.dtype) | ||||||
| 
 | 
 | ||||||
|                 profiler(f'{func_name}: fsp up') |                 profiler(f'{func_name}: fsp up') | ||||||
| 
 | 
 | ||||||
|             # sync client |                 # sync to client-side actor | ||||||
|                 await ctx.started(index) |                 await ctx.started(index) | ||||||
| 
 | 
 | ||||||
|                 # XXX:  rt stream with client which we MUST |                 # XXX:  rt stream with client which we MUST | ||||||
|  | @ -380,85 +548,26 @@ async def cascade( | ||||||
|                 # incremental "updates" as history prepends take |                 # incremental "updates" as history prepends take | ||||||
|                 # place. |                 # place. | ||||||
|                 async with ctx.open_stream() as client_stream: |                 async with ctx.open_stream() as client_stream: | ||||||
|  |                     casc.client_stream: tractor.MsgStream = client_stream | ||||||
| 
 | 
 | ||||||
|                 # TODO: these likely should all become |                     s, step, ld = casc.is_synced() | ||||||
|                 # methods of this ``TaskLifetime`` or wtv |  | ||||||
|                 # abstraction.. |  | ||||||
|                 async def resync( |  | ||||||
|                     tracker: TaskTracker, |  | ||||||
| 
 |  | ||||||
|                 ) -> tuple[TaskTracker, int]: |  | ||||||
|                     # TODO: adopt an incremental update engine/approach |  | ||||||
|                     # where possible here eventually! |  | ||||||
|                     log.info(f're-syncing fsp {func_name} to source') |  | ||||||
|                     tracker.cs.cancel() |  | ||||||
|                     await tracker.complete.wait() |  | ||||||
|                     tracker, index = await n.start(fsp_target) |  | ||||||
| 
 |  | ||||||
|                     # always trigger UI refresh after history update, |  | ||||||
|                     # see ``piker.ui._fsp.FspAdmin.open_chain()`` and |  | ||||||
|                     # ``piker.ui._display.trigger_update()``. |  | ||||||
|                     await client_stream.send({ |  | ||||||
|                         'fsp_update': { |  | ||||||
|                             'key': dst_shm_token, |  | ||||||
|                             'first': dst._first.value, |  | ||||||
|                             'last': dst._last.value, |  | ||||||
|                         } |  | ||||||
|                     }) |  | ||||||
|                     return tracker, index |  | ||||||
| 
 |  | ||||||
|                 def is_synced( |  | ||||||
|                     src: ShmArray, |  | ||||||
|                     dst: ShmArray |  | ||||||
|                 ) -> tuple[bool, int, int]: |  | ||||||
|                     ''' |  | ||||||
|                     Predicate to dertmine if a destination FSP |  | ||||||
|                     output array is aligned to its source array. |  | ||||||
| 
 |  | ||||||
|                     ''' |  | ||||||
|                     step_diff = src.index - dst.index |  | ||||||
|                     len_diff = abs(len(src.array) - len(dst.array)) |  | ||||||
|                     return not ( |  | ||||||
|                         # the source is likely backfilling and we must |  | ||||||
|                         # sync history calculations |  | ||||||
|                         len_diff > 2 |  | ||||||
| 
 |  | ||||||
|                         # we aren't step synced to the source and may be |  | ||||||
|                         # leading/lagging by a step |  | ||||||
|                         or step_diff > 1 |  | ||||||
|                         or step_diff < 0 |  | ||||||
|                     ), step_diff, len_diff |  | ||||||
| 
 |  | ||||||
|                 async def poll_and_sync_to_step( |  | ||||||
|                     tracker: TaskTracker, |  | ||||||
|                     src: ShmArray, |  | ||||||
|                     dst: ShmArray, |  | ||||||
| 
 |  | ||||||
|                 ) -> tuple[TaskTracker, int]: |  | ||||||
| 
 |  | ||||||
|                     synced, step_diff, _ = is_synced(src, dst) |  | ||||||
|                     while not synced: |  | ||||||
|                         tracker, index = await resync(tracker) |  | ||||||
|                         synced, step_diff, _ = is_synced(src, dst) |  | ||||||
| 
 |  | ||||||
|                     return tracker, step_diff |  | ||||||
| 
 |  | ||||||
|                 s, step, ld = is_synced(src, dst) |  | ||||||
| 
 | 
 | ||||||
|                     # detect sample period step for subscription to increment |                     # detect sample period step for subscription to increment | ||||||
|                     # signal |                     # signal | ||||||
|                 times = src.array['time'] |                     times = src.rt_shm.array['time'] | ||||||
|                     if len(times) > 1: |                     if len(times) > 1: | ||||||
|                         last_ts = times[-1] |                         last_ts = times[-1] | ||||||
|                     delay_s = float(last_ts - times[times != last_ts][-1]) |                         delay_s: float = float(last_ts - times[times != last_ts][-1]) | ||||||
|                     else: |                     else: | ||||||
|                         # our default "HFT" sample rate. |                         # our default "HFT" sample rate. | ||||||
|                     delay_s = _default_delay_s |                         delay_s: float = _default_delay_s | ||||||
| 
 | 
 | ||||||
|                     # sub and increment the underlying shared memory buffer |                     # sub and increment the underlying shared memory buffer | ||||||
|                     # on every step msg received from the global `samplerd` |                     # on every step msg received from the global `samplerd` | ||||||
|                     # service. |                     # service. | ||||||
|                 async with open_sample_stream(float(delay_s)) as istream: |                     async with open_sample_stream( | ||||||
|  |                         float(delay_s) | ||||||
|  |                     ) as istream: | ||||||
| 
 | 
 | ||||||
|                         profiler(f'{func_name}: sample stream up') |                         profiler(f'{func_name}: sample stream up') | ||||||
|                         profiler.finish() |                         profiler.finish() | ||||||
|  | @ -469,13 +578,9 @@ async def cascade( | ||||||
|                             # respawn the compute task if the source |                             # respawn the compute task if the source | ||||||
|                             # array has been updated such that we compute |                             # array has been updated such that we compute | ||||||
|                             # new history from the (prepended) source. |                             # new history from the (prepended) source. | ||||||
|                         synced, step_diff, _ = is_synced(src, dst) |                             synced, step_diff, _ = casc.is_synced() | ||||||
|                             if not synced: |                             if not synced: | ||||||
|                             tracker, step_diff = await poll_and_sync_to_step( |                                 step_diff: int = await casc.poll_and_sync_to_step() | ||||||
|                                 tracker, |  | ||||||
|                                 src, |  | ||||||
|                                 dst, |  | ||||||
|                             ) |  | ||||||
| 
 | 
 | ||||||
|                                 # skip adding a last bar since we should already |                                 # skip adding a last bar since we should already | ||||||
|                                 # be step alinged |                                 # be step alinged | ||||||
|  | @ -483,7 +588,7 @@ async def cascade( | ||||||
|                                     continue |                                     continue | ||||||
| 
 | 
 | ||||||
|                             # read out last shm row, copy and write new row |                             # read out last shm row, copy and write new row | ||||||
|                         array = dst.array |                             array = dst_shm.array | ||||||
| 
 | 
 | ||||||
|                             # some metrics like vlm should be reset |                             # some metrics like vlm should be reset | ||||||
|                             # to zero every step. |                             # to zero every step. | ||||||
|  | @ -492,14 +597,14 @@ async def cascade( | ||||||
|                             else: |                             else: | ||||||
|                                 last = array[-1:].copy() |                                 last = array[-1:].copy() | ||||||
| 
 | 
 | ||||||
|                         dst.push(last) |                             dst.rt_shm.push(last) | ||||||
| 
 | 
 | ||||||
|                             # sync with source buffer's time step |                             # sync with source buffer's time step | ||||||
|                         src_l2 = src.array[-2:] |                             src_l2 = src_shm.array[-2:] | ||||||
|                             src_li, src_lt = src_l2[-1][['index', 'time']] |                             src_li, src_lt = src_l2[-1][['index', 'time']] | ||||||
|                             src_2li, src_2lt = src_l2[-2][['index', 'time']] |                             src_2li, src_2lt = src_l2[-2][['index', 'time']] | ||||||
|                         dst._array['time'][src_li] = src_lt |                             dst_shm._array['time'][src_li] = src_lt | ||||||
|                         dst._array['time'][src_2li] = src_2lt |                             dst_shm._array['time'][src_2li] = src_2lt | ||||||
| 
 | 
 | ||||||
|                             # last2 = dst.array[-2:] |                             # last2 = dst.array[-2:] | ||||||
|                             # if ( |                             # if ( | ||||||
|  |  | ||||||
|  | @ -14,49 +14,45 @@ | ||||||
| # You should have received a copy of the GNU Affero General Public License | # You should have received a copy of the GNU Affero General Public License | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| """ | ''' | ||||||
| Actor-runtime service orchestration machinery. | Actor runtime primtives and (distributed) service APIs for, | ||||||
| 
 | 
 | ||||||
| """ | - daemon-service mgmt: `_daemon` (i.e. low-level spawn and supervise machinery | ||||||
| from __future__ import annotations |   for sub-actors like `brokerd`, `emsd`, datad`, etc.) | ||||||
| 
 | 
 | ||||||
| from ._mngr import Services | - service-actor supervision (via `trio` tasks) API: `._mngr` | ||||||
| from ._registry import (  # noqa | 
 | ||||||
|     _tractor_kwargs, | - discovery interface (via light wrapping around `tractor`'s built-in | ||||||
|     _default_reg_addr, |   prot): `._registry` | ||||||
|     _default_registry_host, | 
 | ||||||
|     _default_registry_port, | - `docker` cntr SC supervision for use with `trio`: `_ahab` | ||||||
|     open_registry, |   - wrappers for marketstore and elasticsearch dbs | ||||||
|     find_service, |   => TODO: maybe to (re)move elsewhere? | ||||||
|     check_for_service, | 
 | ||||||
|  | ''' | ||||||
|  | from ._mngr import Services as Services | ||||||
|  | from ._registry import ( | ||||||
|  |     _tractor_kwargs as _tractor_kwargs, | ||||||
|  |     _default_reg_addr as _default_reg_addr, | ||||||
|  |     _default_registry_host as _default_registry_host, | ||||||
|  |     _default_registry_port as _default_registry_port, | ||||||
|  | 
 | ||||||
|  |     open_registry as open_registry, | ||||||
|  |     find_service as find_service, | ||||||
|  |     check_for_service as check_for_service, | ||||||
| ) | ) | ||||||
| from ._daemon import (  # noqa | from ._daemon import ( | ||||||
|     maybe_spawn_daemon, |     maybe_spawn_daemon as maybe_spawn_daemon, | ||||||
|     spawn_emsd, |     spawn_emsd as spawn_emsd, | ||||||
|     maybe_open_emsd, |     maybe_open_emsd as maybe_open_emsd, | ||||||
| ) | ) | ||||||
| from ._actor_runtime import ( | from ._actor_runtime import ( | ||||||
|     open_piker_runtime, |     open_piker_runtime as open_piker_runtime, | ||||||
|     maybe_open_pikerd, |     maybe_open_pikerd as maybe_open_pikerd, | ||||||
|     open_pikerd, |     open_pikerd as open_pikerd, | ||||||
|     get_tractor_runtime_kwargs, |     get_runtime_vars as get_runtime_vars, | ||||||
| ) | ) | ||||||
| from ..brokers._daemon import ( | from ..brokers._daemon import ( | ||||||
|     spawn_brokerd, |     spawn_brokerd as spawn_brokerd, | ||||||
|     maybe_spawn_brokerd, |     maybe_spawn_brokerd as maybe_spawn_brokerd, | ||||||
| ) | ) | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| __all__ = [ |  | ||||||
|     'check_for_service', |  | ||||||
|     'Services', |  | ||||||
|     'maybe_spawn_daemon', |  | ||||||
|     'spawn_brokerd', |  | ||||||
|     'maybe_spawn_brokerd', |  | ||||||
|     'spawn_emsd', |  | ||||||
|     'maybe_open_emsd', |  | ||||||
|     'open_piker_runtime', |  | ||||||
|     'maybe_open_pikerd', |  | ||||||
|     'open_pikerd', |  | ||||||
|     'get_tractor_runtime_kwargs', |  | ||||||
| ] |  | ||||||
|  |  | ||||||
|  | @ -45,7 +45,7 @@ from ._registry import (  # noqa | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_tractor_runtime_kwargs() -> dict[str, Any]: | def get_runtime_vars() -> dict[str, Any]: | ||||||
|     ''' |     ''' | ||||||
|     Deliver ``tractor`` related runtime variables in a `dict`. |     Deliver ``tractor`` related runtime variables in a `dict`. | ||||||
| 
 | 
 | ||||||
|  | @ -56,6 +56,8 @@ def get_tractor_runtime_kwargs() -> dict[str, Any]: | ||||||
| @acm | @acm | ||||||
| async def open_piker_runtime( | async def open_piker_runtime( | ||||||
|     name: str, |     name: str, | ||||||
|  |     registry_addrs: list[tuple[str, int]] = [], | ||||||
|  | 
 | ||||||
|     enable_modules: list[str] = [], |     enable_modules: list[str] = [], | ||||||
|     loglevel: Optional[str] = None, |     loglevel: Optional[str] = None, | ||||||
| 
 | 
 | ||||||
|  | @ -63,8 +65,6 @@ async def open_piker_runtime( | ||||||
|     # for data daemons when running in production. |     # for data daemons when running in production. | ||||||
|     debug_mode: bool = False, |     debug_mode: bool = False, | ||||||
| 
 | 
 | ||||||
|     registry_addr: None | tuple[str, int] = None, |  | ||||||
| 
 |  | ||||||
|     # TODO: once we have `rsyscall` support we will read a config |     # TODO: once we have `rsyscall` support we will read a config | ||||||
|     # and spawn the service tree distributed per that. |     # and spawn the service tree distributed per that. | ||||||
|     start_method: str = 'trio', |     start_method: str = 'trio', | ||||||
|  | @ -74,7 +74,7 @@ async def open_piker_runtime( | ||||||
| 
 | 
 | ||||||
| ) -> tuple[ | ) -> tuple[ | ||||||
|     tractor.Actor, |     tractor.Actor, | ||||||
|     tuple[str, int], |     list[tuple[str, int]], | ||||||
| ]: | ]: | ||||||
|     ''' |     ''' | ||||||
|     Start a piker actor who's runtime will automatically sync with |     Start a piker actor who's runtime will automatically sync with | ||||||
|  | @ -84,21 +84,31 @@ async def open_piker_runtime( | ||||||
|     a root actor. |     a root actor. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  |     # check for existing runtime, boot it | ||||||
|  |     # if not already running. | ||||||
|     try: |     try: | ||||||
|         # check for existing runtime |         actor = tractor.current_actor() | ||||||
|         actor = tractor.current_actor().uid |  | ||||||
| 
 |  | ||||||
|     except tractor._exceptions.NoRuntime: |     except tractor._exceptions.NoRuntime: | ||||||
|         tractor._state._runtime_vars[ |         tractor._state._runtime_vars[ | ||||||
|             'piker_vars'] = tractor_runtime_overrides |             'piker_vars' | ||||||
|  |         ] = tractor_runtime_overrides | ||||||
| 
 | 
 | ||||||
|         registry_addr = registry_addr or _default_reg_addr |         # NOTE: if no registrar list passed used the default of just | ||||||
|  |         # setting it as the root actor on localhost. | ||||||
|  |         registry_addrs = ( | ||||||
|  |             registry_addrs | ||||||
|  |             or [_default_reg_addr] | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         if ems := tractor_kwargs.pop('enable_modules', None): | ||||||
|  |             # import pdbp; pdbp.set_trace() | ||||||
|  |             enable_modules.extend(ems) | ||||||
| 
 | 
 | ||||||
|         async with ( |         async with ( | ||||||
|             tractor.open_root_actor( |             tractor.open_root_actor( | ||||||
| 
 | 
 | ||||||
|                 # passed through to ``open_root_actor`` |                 # passed through to ``open_root_actor`` | ||||||
|                 arbiter_addr=registry_addr, |                 registry_addrs=registry_addrs, | ||||||
|                 name=name, |                 name=name, | ||||||
|                 loglevel=loglevel, |                 loglevel=loglevel, | ||||||
|                 debug_mode=debug_mode, |                 debug_mode=debug_mode, | ||||||
|  | @ -110,24 +120,30 @@ async def open_piker_runtime( | ||||||
|                 enable_modules=enable_modules, |                 enable_modules=enable_modules, | ||||||
| 
 | 
 | ||||||
|                 **tractor_kwargs, |                 **tractor_kwargs, | ||||||
|             ) as _, |             ) as actor, | ||||||
| 
 | 
 | ||||||
|             open_registry(registry_addr, ensure_exists=False) as addr, |             open_registry( | ||||||
|  |                 registry_addrs, | ||||||
|  |                 ensure_exists=False, | ||||||
|  |             ) as addrs, | ||||||
|         ): |         ): | ||||||
|             yield ( |             assert actor is tractor.current_actor() | ||||||
|                 tractor.current_actor(), |  | ||||||
|                 addr, |  | ||||||
|             ) |  | ||||||
|     else: |  | ||||||
|         async with open_registry(registry_addr) as addr: |  | ||||||
|             yield ( |             yield ( | ||||||
|                 actor, |                 actor, | ||||||
|                 addr, |                 addrs, | ||||||
|  |             ) | ||||||
|  |     else: | ||||||
|  |         async with open_registry( | ||||||
|  |             registry_addrs | ||||||
|  |         ) as addrs: | ||||||
|  |             yield ( | ||||||
|  |                 actor, | ||||||
|  |                 addrs, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _root_dname = 'pikerd' | _root_dname: str = 'pikerd' | ||||||
| _root_modules = [ | _root_modules: list[str] = [ | ||||||
|     __name__, |     __name__, | ||||||
|     'piker.service._daemon', |     'piker.service._daemon', | ||||||
|     'piker.brokers._daemon', |     'piker.brokers._daemon', | ||||||
|  | @ -141,13 +157,13 @@ _root_modules = [ | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def open_pikerd( | async def open_pikerd( | ||||||
|  |     registry_addrs: list[tuple[str, int]], | ||||||
| 
 | 
 | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
| 
 | 
 | ||||||
|     # XXX: you should pretty much never want debug mode |     # XXX: you should pretty much never want debug mode | ||||||
|     # for data daemons when running in production. |     # for data daemons when running in production. | ||||||
|     debug_mode: bool = False, |     debug_mode: bool = False, | ||||||
|     registry_addr: None | tuple[str, int] = None, |  | ||||||
| 
 | 
 | ||||||
|     **kwargs, |     **kwargs, | ||||||
| 
 | 
 | ||||||
|  | @ -159,27 +175,37 @@ async def open_pikerd( | ||||||
|     alive underling services (see below). |     alive underling services (see below). | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  |     # NOTE: for the root daemon we always enable the root | ||||||
|  |     # mod set and we `list.extend()` it into wtv the | ||||||
|  |     # caller requested. | ||||||
|  |     # TODO: make this mod set more strict? | ||||||
|  |     # -[ ] eventually we should be able to avoid | ||||||
|  |     #    having the root have more then permissions to spawn other | ||||||
|  |     #    specialized daemons I think? | ||||||
|  |     ems: list[str] = kwargs.setdefault('enable_modules', []) | ||||||
|  |     ems.extend(_root_modules) | ||||||
|  | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         open_piker_runtime( |         open_piker_runtime( | ||||||
| 
 | 
 | ||||||
|             name=_root_dname, |             name=_root_dname, | ||||||
|             # TODO: eventually we should be able to avoid |  | ||||||
|             # having the root have more then permissions to |  | ||||||
|             # spawn other specialized daemons I think? |  | ||||||
|             enable_modules=_root_modules, |  | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
|             debug_mode=debug_mode, |             debug_mode=debug_mode, | ||||||
|             registry_addr=registry_addr, |             registry_addrs=registry_addrs, | ||||||
| 
 | 
 | ||||||
|             **kwargs, |             **kwargs, | ||||||
| 
 | 
 | ||||||
|         ) as (root_actor, reg_addr), |         ) as ( | ||||||
|  |             root_actor, | ||||||
|  |             reg_addrs, | ||||||
|  |         ), | ||||||
|         tractor.open_nursery() as actor_nursery, |         tractor.open_nursery() as actor_nursery, | ||||||
|         trio.open_nursery() as service_nursery, |         trio.open_nursery() as service_nursery, | ||||||
|     ): |     ): | ||||||
|         if root_actor.accept_addr != reg_addr: |         for addr in reg_addrs: | ||||||
|  |             if addr not in root_actor.accept_addrs: | ||||||
|                 raise RuntimeError( |                 raise RuntimeError( | ||||||
|                 f'`pikerd` failed to bind on {reg_addr}!\n' |                     f'`pikerd` failed to bind on {addr}!\n' | ||||||
|                     'Maybe you have another daemon already running?' |                     'Maybe you have another daemon already running?' | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|  | @ -225,9 +251,9 @@ async def open_pikerd( | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def maybe_open_pikerd( | async def maybe_open_pikerd( | ||||||
|     loglevel: Optional[str] = None, |     registry_addrs: list[tuple[str, int]] | None = None, | ||||||
|     registry_addr: None | tuple = None, |  | ||||||
| 
 | 
 | ||||||
|  |     loglevel: str | None = None, | ||||||
|     **kwargs, |     **kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> tractor._portal.Portal | ClassVar[Services]: | ) -> tractor._portal.Portal | ClassVar[Services]: | ||||||
|  | @ -253,32 +279,51 @@ async def maybe_open_pikerd( | ||||||
|     #     async with open_portal(chan) as arb_portal: |     #     async with open_portal(chan) as arb_portal: | ||||||
|     #         yield arb_portal |     #         yield arb_portal | ||||||
| 
 | 
 | ||||||
|  |     registry_addrs: list[tuple[str, int]] = ( | ||||||
|  |         registry_addrs | ||||||
|  |         or [_default_reg_addr] | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     pikerd_portal: tractor.Portal | None | ||||||
|     async with ( |     async with ( | ||||||
|         open_piker_runtime( |         open_piker_runtime( | ||||||
|             name=query_name, |             name=query_name, | ||||||
|             registry_addr=registry_addr, |             registry_addrs=registry_addrs, | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
|             **kwargs, |             **kwargs, | ||||||
|         ) as _, |         ) as (actor, addrs), | ||||||
|  |     ): | ||||||
|  |         if _root_dname in actor.uid: | ||||||
|  |             yield None | ||||||
|  |             return | ||||||
| 
 | 
 | ||||||
|         tractor.find_actor( |         # NOTE: IFF running in disti mode, try to attach to any | ||||||
|  |         # existing (host-local) `pikerd`. | ||||||
|  |         else: | ||||||
|  |             async with tractor.find_actor( | ||||||
|                 _root_dname, |                 _root_dname, | ||||||
|             arbiter_sockaddr=registry_addr, |                 registry_addrs=registry_addrs, | ||||||
|         ) as portal |                 only_first=True, | ||||||
|     ): |                 # raise_on_none=True, | ||||||
|         # connect to any existing daemon presuming |             ) as pikerd_portal: | ||||||
|         # its registry socket was selected. | 
 | ||||||
|         if ( |                 # connect to any existing remote daemon presuming its | ||||||
|             portal is not None |                 # registry socket was selected. | ||||||
|         ): |                 if pikerd_portal is not None: | ||||||
|             yield portal | 
 | ||||||
|  |                     # sanity check that we are actually connecting to | ||||||
|  |                     # a remote process and not ourselves. | ||||||
|  |                     assert actor.uid != pikerd_portal.channel.uid | ||||||
|  |                     assert registry_addrs | ||||||
|  | 
 | ||||||
|  |                     yield pikerd_portal | ||||||
|                     return |                     return | ||||||
| 
 | 
 | ||||||
|     # presume pikerd role since no daemon could be found at |     # presume pikerd role since no daemon could be found at | ||||||
|     # configured address |     # configured address | ||||||
|     async with open_pikerd( |     async with open_pikerd( | ||||||
|         loglevel=loglevel, |         loglevel=loglevel, | ||||||
|         registry_addr=registry_addr, |         registry_addrs=registry_addrs, | ||||||
| 
 | 
 | ||||||
|         # passthrough to ``tractor`` init |         # passthrough to ``tractor`` init | ||||||
|         **kwargs, |         **kwargs, | ||||||
|  |  | ||||||
|  | @ -15,8 +15,8 @@ | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| Supervisor for ``docker`` with included async and SC wrapping | Supervisor for ``docker`` with included async and SC wrapping to | ||||||
| to ensure a cancellable container lifetime system. | ensure a cancellable container lifetime system. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  |  | ||||||
|  | @ -70,7 +70,10 @@ async def maybe_spawn_daemon( | ||||||
|     lock = Services.locks[service_name] |     lock = Services.locks[service_name] | ||||||
|     await lock.acquire() |     await lock.acquire() | ||||||
| 
 | 
 | ||||||
|     async with find_service(service_name) as portal: |     async with find_service( | ||||||
|  |         service_name, | ||||||
|  |         registry_addrs=[('127.0.0.1', 6116)], | ||||||
|  |     ) as portal: | ||||||
|         if portal is not None: |         if portal is not None: | ||||||
|             lock.release() |             lock.release() | ||||||
|             yield portal |             yield portal | ||||||
|  |  | ||||||
|  | @ -27,6 +27,12 @@ from typing import ( | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import tractor | import tractor | ||||||
|  | from tractor import ( | ||||||
|  |     current_actor, | ||||||
|  |     ContextCancelled, | ||||||
|  |     Context, | ||||||
|  |     Portal, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     log,  # sub-sys logger |     log,  # sub-sys logger | ||||||
|  | @ -38,6 +44,8 @@ from ._util import ( | ||||||
| #   library. | #   library. | ||||||
| # - wrap a "remote api" wherein you can get a method proxy | # - wrap a "remote api" wherein you can get a method proxy | ||||||
| #   to the pikerd actor for starting services remotely! | #   to the pikerd actor for starting services remotely! | ||||||
|  | # - prolly rename this to ActorServicesNursery since it spawns | ||||||
|  | #   new actors and supervises them to completion? | ||||||
| class Services: | class Services: | ||||||
| 
 | 
 | ||||||
|     actor_n: tractor._supervise.ActorNursery |     actor_n: tractor._supervise.ActorNursery | ||||||
|  | @ -47,7 +55,7 @@ class Services: | ||||||
|         str, |         str, | ||||||
|         tuple[ |         tuple[ | ||||||
|             trio.CancelScope, |             trio.CancelScope, | ||||||
|             tractor.Portal, |             Portal, | ||||||
|             trio.Event, |             trio.Event, | ||||||
|         ] |         ] | ||||||
|     ] = {} |     ] = {} | ||||||
|  | @ -57,12 +65,12 @@ class Services: | ||||||
|     async def start_service_task( |     async def start_service_task( | ||||||
|         self, |         self, | ||||||
|         name: str, |         name: str, | ||||||
|         portal: tractor.Portal, |         portal: Portal, | ||||||
|         target: Callable, |         target: Callable, | ||||||
|         allow_overruns: bool = False, |         allow_overruns: bool = False, | ||||||
|         **ctx_kwargs, |         **ctx_kwargs, | ||||||
| 
 | 
 | ||||||
|     ) -> (trio.CancelScope, tractor.Context): |     ) -> (trio.CancelScope, Context): | ||||||
|         ''' |         ''' | ||||||
|         Open a context in a service sub-actor, add to a stack |         Open a context in a service sub-actor, add to a stack | ||||||
|         that gets unwound at ``pikerd`` teardown. |         that gets unwound at ``pikerd`` teardown. | ||||||
|  | @ -101,13 +109,30 @@ class Services: | ||||||
|                         # wait on any context's return value |                         # wait on any context's return value | ||||||
|                         # and any final portal result from the |                         # and any final portal result from the | ||||||
|                         # sub-actor. |                         # sub-actor. | ||||||
|                         ctx_res = await ctx.result() |                         ctx_res: Any = await ctx.result() | ||||||
| 
 | 
 | ||||||
|                         # NOTE: blocks indefinitely until cancelled |                         # NOTE: blocks indefinitely until cancelled | ||||||
|                         # either by error from the target context |                         # either by error from the target context | ||||||
|                         # function or by being cancelled here by the |                         # function or by being cancelled here by the | ||||||
|                         # surrounding cancel scope. |                         # surrounding cancel scope. | ||||||
|                         return (await portal.result(), ctx_res) |                         return (await portal.result(), ctx_res) | ||||||
|  |                     except ContextCancelled as ctxe: | ||||||
|  |                         canceller: tuple[str, str] = ctxe.canceller | ||||||
|  |                         our_uid: tuple[str, str] = current_actor().uid | ||||||
|  |                         if ( | ||||||
|  |                             canceller != portal.channel.uid | ||||||
|  |                             and | ||||||
|  |                             canceller != our_uid | ||||||
|  |                         ): | ||||||
|  |                             log.cancel( | ||||||
|  |                                 f'Actor-service {name} was remotely cancelled?\n' | ||||||
|  |                                 f'remote canceller: {canceller}\n' | ||||||
|  |                                 f'Keeping {our_uid} alive, ignoring sub-actor cancel..\n' | ||||||
|  |                             ) | ||||||
|  |                         else: | ||||||
|  |                             raise | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
|                     finally: |                     finally: | ||||||
|                         await portal.cancel_actor() |                         await portal.cancel_actor() | ||||||
|  |  | ||||||
|  | @ -27,6 +27,7 @@ from typing import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
|  | from tractor import Portal | ||||||
| 
 | 
 | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     log,  # sub-sys logger |     log,  # sub-sys logger | ||||||
|  | @ -46,7 +47,9 @@ _registry: Registry | None = None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class Registry: | class Registry: | ||||||
|     addr: None | tuple[str, int] = None |     # TODO: should this be a set or should we complain | ||||||
|  |     # on duplicates? | ||||||
|  |     addrs: list[tuple[str, int]] = [] | ||||||
| 
 | 
 | ||||||
|     # TODO: table of uids to sockaddrs |     # TODO: table of uids to sockaddrs | ||||||
|     peers: dict[ |     peers: dict[ | ||||||
|  | @ -60,69 +63,115 @@ _tractor_kwargs: dict[str, Any] = {} | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def open_registry( | async def open_registry( | ||||||
|     addr: None | tuple[str, int] = None, |     addrs: list[tuple[str, int]], | ||||||
|     ensure_exists: bool = True, |     ensure_exists: bool = True, | ||||||
| 
 | 
 | ||||||
| ) -> tuple[str, int]: | ) -> list[tuple[str, int]]: | ||||||
|  |     ''' | ||||||
|  |     Open the service-actor-discovery registry by returning a set of | ||||||
|  |     tranport socket-addrs to registrar actors which may be | ||||||
|  |     contacted and queried for similar addresses for other | ||||||
|  |     non-registrar actors. | ||||||
| 
 | 
 | ||||||
|  |     ''' | ||||||
|     global _tractor_kwargs |     global _tractor_kwargs | ||||||
|     actor = tractor.current_actor() |     actor = tractor.current_actor() | ||||||
|     uid = actor.uid |     uid = actor.uid | ||||||
|  |     preset_reg_addrs: list[tuple[str, int]] = Registry.addrs | ||||||
|     if ( |     if ( | ||||||
|         Registry.addr is not None |         preset_reg_addrs | ||||||
|         and addr |         and addrs | ||||||
|     ): |     ): | ||||||
|  |         if preset_reg_addrs != addrs: | ||||||
|  |             # if any(addr in preset_reg_addrs for addr in addrs): | ||||||
|  |             diff: set[tuple[str, int]] = set(preset_reg_addrs) - set(addrs) | ||||||
|  |             if diff: | ||||||
|  |                 log.warning( | ||||||
|  |                     f'`{uid}` requested only subset of registrars: {addrs}\n' | ||||||
|  |                     f'However there are more @{diff}' | ||||||
|  |                 ) | ||||||
|  |             else: | ||||||
|                 raise RuntimeError( |                 raise RuntimeError( | ||||||
|             f'`{uid}` registry addr already bound @ {_registry.sockaddr}' |                     f'`{uid}` has non-matching registrar addresses?\n' | ||||||
|  |                     f'request: {addrs}\n' | ||||||
|  |                     f'already set: {preset_reg_addrs}' | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|     was_set: bool = False |     was_set: bool = False | ||||||
| 
 | 
 | ||||||
|     if ( |     if ( | ||||||
|         not tractor.is_root_process() |         not tractor.is_root_process() | ||||||
|         and Registry.addr is None |         and not Registry.addrs | ||||||
|     ): |     ): | ||||||
|         Registry.addr = actor._arb_addr |         Registry.addrs.extend(actor.reg_addrs) | ||||||
| 
 | 
 | ||||||
|     if ( |     if ( | ||||||
|         ensure_exists |         ensure_exists | ||||||
|         and Registry.addr is None |         and not Registry.addrs | ||||||
|     ): |     ): | ||||||
|         raise RuntimeError( |         raise RuntimeError( | ||||||
|             f"`{uid}` registry should already exist bug doesn't?" |             f"`{uid}` registry should already exist but doesn't?" | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     if ( |     if ( | ||||||
|         Registry.addr is None |         not Registry.addrs | ||||||
|     ): |     ): | ||||||
|         was_set = True |         was_set = True | ||||||
|         Registry.addr = addr or _default_reg_addr |         Registry.addrs = addrs or [_default_reg_addr] | ||||||
| 
 | 
 | ||||||
|     _tractor_kwargs['arbiter_addr'] = Registry.addr |     # NOTE: only spot this seems currently used is inside | ||||||
|  |     # `.ui._exec` which is the (eventual qtloops) bootstrapping | ||||||
|  |     # with guest mode. | ||||||
|  |     _tractor_kwargs['registry_addrs'] = Registry.addrs | ||||||
| 
 | 
 | ||||||
|     try: |     try: | ||||||
|         yield Registry.addr |         yield Registry.addrs | ||||||
|     finally: |     finally: | ||||||
|         # XXX: always clear the global addr if we set it so that the |         # XXX: always clear the global addr if we set it so that the | ||||||
|         # next (set of) calls will apply whatever new one is passed |         # next (set of) calls will apply whatever new one is passed | ||||||
|         # in. |         # in. | ||||||
|         if was_set: |         if was_set: | ||||||
|             Registry.addr = None |             Registry.addrs = None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def find_service( | async def find_service( | ||||||
|     service_name: str, |     service_name: str, | ||||||
| ) -> tractor.Portal | None: |     registry_addrs: list[tuple[str, int]] | None = None, | ||||||
| 
 | 
 | ||||||
|     async with open_registry() as reg_addr: |     first_only: bool = True, | ||||||
|  | 
 | ||||||
|  | ) -> ( | ||||||
|  |     Portal | ||||||
|  |     | list[Portal] | ||||||
|  |     | None | ||||||
|  | ): | ||||||
|  | 
 | ||||||
|  |     reg_addrs: list[tuple[str, int]] | ||||||
|  |     async with open_registry( | ||||||
|  |         addrs=( | ||||||
|  |             registry_addrs | ||||||
|  |             # NOTE: if no addr set is passed assume the registry has | ||||||
|  |             # already been opened and use the previously applied | ||||||
|  |             # startup set. | ||||||
|  |             or Registry.addrs | ||||||
|  |         ), | ||||||
|  |     ) as reg_addrs: | ||||||
|         log.info(f'Scanning for service `{service_name}`') |         log.info(f'Scanning for service `{service_name}`') | ||||||
|  | 
 | ||||||
|  |         maybe_portals: list[Portal] | Portal | None | ||||||
|  | 
 | ||||||
|         # attach to existing daemon by name if possible |         # attach to existing daemon by name if possible | ||||||
|         async with tractor.find_actor( |         async with tractor.find_actor( | ||||||
|             service_name, |             service_name, | ||||||
|             arbiter_sockaddr=reg_addr, |             registry_addrs=reg_addrs, | ||||||
|         ) as maybe_portal: |             only_first=first_only,  # if set only returns single ref | ||||||
|             yield maybe_portal |         ) as maybe_portals: | ||||||
|  |             if not maybe_portals: | ||||||
|  |                 yield None | ||||||
|  |                 return | ||||||
|  | 
 | ||||||
|  |             yield maybe_portals | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def check_for_service( | async def check_for_service( | ||||||
|  | @ -133,9 +182,11 @@ async def check_for_service( | ||||||
|     Service daemon "liveness" predicate. |     Service daemon "liveness" predicate. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async with open_registry(ensure_exists=False) as reg_addr: |     async with ( | ||||||
|         async with tractor.query_actor( |         open_registry(ensure_exists=False) as reg_addr, | ||||||
|  |         tractor.query_actor( | ||||||
|             service_name, |             service_name, | ||||||
|             arbiter_sockaddr=reg_addr, |             arbiter_sockaddr=reg_addr, | ||||||
|         ) as sockaddr: |         ) as sockaddr, | ||||||
|  |     ): | ||||||
|         return sockaddr |         return sockaddr | ||||||
|  |  | ||||||
|  | @ -139,6 +139,13 @@ class StorageClient( | ||||||
|         ... |         ... | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | class TimeseriesNotFound(Exception): | ||||||
|  |     ''' | ||||||
|  |     No timeseries entry can be found for this backend. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| class StorageConnectionError(ConnectionError): | class StorageConnectionError(ConnectionError): | ||||||
|     ''' |     ''' | ||||||
|     Can't connect to the desired tsdb subsys/service. |     Can't connect to the desired tsdb subsys/service. | ||||||
|  | @ -169,10 +176,13 @@ async def open_storage_client( | ||||||
|     tsdb_host: str = 'localhost' |     tsdb_host: str = 'localhost' | ||||||
| 
 | 
 | ||||||
|     # load root config and any tsdb user defined settings |     # load root config and any tsdb user defined settings | ||||||
|     conf, path = config.load('conf', touch_if_dne=True) |     conf, path = config.load( | ||||||
|  |         conf_name='conf', | ||||||
|  |         touch_if_dne=True, | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     # TODO: maybe not under a "network" section.. since |     # TODO: maybe not under a "network" section.. since | ||||||
|     # no more chitty mkts.. |     # no more chitty `marketstore`.. | ||||||
|     tsdbconf: dict = {} |     tsdbconf: dict = {} | ||||||
|     service_section = conf.get('service') |     service_section = conf.get('service') | ||||||
|     if ( |     if ( | ||||||
|  | @ -183,8 +193,11 @@ async def open_storage_client( | ||||||
| 
 | 
 | ||||||
|         # lookup backend tsdb module by name and load any user service |         # lookup backend tsdb module by name and load any user service | ||||||
|         # settings for connecting to the tsdb service. |         # settings for connecting to the tsdb service. | ||||||
|         backend: str = tsdbconf.pop('backend') |         backend: str = tsdbconf.pop( | ||||||
|         tsdb_host: str = tsdbconf['host'] |             'name', | ||||||
|  |             def_backend, | ||||||
|  |         ) | ||||||
|  |         tsdb_host: str = tsdbconf.get('maddrs', []) | ||||||
| 
 | 
 | ||||||
|     if backend is None: |     if backend is None: | ||||||
|         backend: str = def_backend |         backend: str = def_backend | ||||||
|  |  | ||||||
|  | @ -1,5 +1,5 @@ | ||||||
| # piker: trading gear for hackers | # piker: trading gear for hackers | ||||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0) | # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers) | ||||||
| 
 | 
 | ||||||
| # This program is free software: you can redistribute it and/or modify | # This program is free software: you can redistribute it and/or modify | ||||||
| # it under the terms of the GNU Affero General Public License as published by | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | @ -19,10 +19,18 @@ Storage middle-ware CLIs. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  | # from datetime import datetime | ||||||
|  | # from contextlib import ( | ||||||
|  | #     AsyncExitStack, | ||||||
|  | # ) | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
|  | from math import copysign | ||||||
| import time | import time | ||||||
| from typing import Generator | from types import ModuleType | ||||||
| # from typing import TYPE_CHECKING | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     TYPE_CHECKING, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| import polars as pl | import polars as pl | ||||||
| import numpy as np | import numpy as np | ||||||
|  | @ -35,24 +43,21 @@ import typer | ||||||
| 
 | 
 | ||||||
| from piker.service import open_piker_runtime | from piker.service import open_piker_runtime | ||||||
| from piker.cli import cli | from piker.cli import cli | ||||||
| from piker.config import get_conf_dir |  | ||||||
| from piker.data import ( | from piker.data import ( | ||||||
|     maybe_open_shm_array, |  | ||||||
|     def_iohlcv_fields, |  | ||||||
|     ShmArray, |     ShmArray, | ||||||
| ) | ) | ||||||
| from piker.data.history import ( | from piker import tsp | ||||||
|     _default_hist_size, | from piker.data._formatters import BGM | ||||||
|     _default_rt_size, | from . import log | ||||||
| ) |  | ||||||
| from . import ( |  | ||||||
|     log, |  | ||||||
| ) |  | ||||||
| from . import ( | from . import ( | ||||||
|     __tsdbs__, |     __tsdbs__, | ||||||
|     open_storage_client, |     open_storage_client, | ||||||
|  |     StorageClient, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | if TYPE_CHECKING: | ||||||
|  |     from piker.ui._remote_ctl import AnnotCtl | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| store = typer.Typer() | store = typer.Typer() | ||||||
| 
 | 
 | ||||||
|  | @ -77,7 +82,6 @@ def ls( | ||||||
|         async with ( |         async with ( | ||||||
|             open_piker_runtime( |             open_piker_runtime( | ||||||
|                 'tsdb_storage', |                 'tsdb_storage', | ||||||
|                 enable_modules=['piker.service._ahab'], |  | ||||||
|             ), |             ), | ||||||
|         ): |         ): | ||||||
|             for i, backend in enumerate(backends): |             for i, backend in enumerate(backends): | ||||||
|  | @ -99,6 +103,18 @@ def ls( | ||||||
|     trio.run(query_all) |     trio.run(query_all) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO: like ls but takes in a pattern and matches | ||||||
|  | # @store.command() | ||||||
|  | # def search( | ||||||
|  | #     patt: str, | ||||||
|  | #     backends: list[str] = typer.Argument( | ||||||
|  | #         default=None, | ||||||
|  | #         help='Storage backends to query, default is all.' | ||||||
|  | #     ), | ||||||
|  | # ): | ||||||
|  | #     ... | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @store.command() | @store.command() | ||||||
| def delete( | def delete( | ||||||
|     symbols: list[str], |     symbols: list[str], | ||||||
|  | @ -121,7 +137,6 @@ def delete( | ||||||
|         async with ( |         async with ( | ||||||
|             open_piker_runtime( |             open_piker_runtime( | ||||||
|                 'tsdb_storage', |                 'tsdb_storage', | ||||||
|                 enable_modules=['piker.service._ahab'] |  | ||||||
|             ), |             ), | ||||||
|             open_storage_client(backend) as (_, client), |             open_storage_client(backend) as (_, client), | ||||||
|             trio.open_nursery() as n, |             trio.open_nursery() as n, | ||||||
|  | @ -142,21 +157,33 @@ def delete( | ||||||
| def anal( | def anal( | ||||||
|     fqme: str, |     fqme: str, | ||||||
|     period: int = 60, |     period: int = 60, | ||||||
|  |     pdb: bool = False, | ||||||
| 
 | 
 | ||||||
| ) -> np.ndarray: | ) -> np.ndarray: | ||||||
|  |     ''' | ||||||
|  |     Anal-ysis is when you take the data do stuff to it. | ||||||
| 
 | 
 | ||||||
|  |     NOTE: This ONLY loads the offline timeseries data (by default | ||||||
|  |     from a parquet file) NOT the in-shm version you might be seeing | ||||||
|  |     in a chart. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|     async def main(): |     async def main(): | ||||||
|         async with ( |         async with ( | ||||||
|             open_piker_runtime( |             open_piker_runtime( | ||||||
|  |                 # are you a bear or boi? | ||||||
|                 'tsdb_polars_anal', |                 'tsdb_polars_anal', | ||||||
|                 # enable_modules=['piker.service._ahab'] |                 debug_mode=pdb, | ||||||
|                 debug_mode=True, |             ), | ||||||
|  |             open_storage_client() as ( | ||||||
|  |                 mod, | ||||||
|  |                 client, | ||||||
|             ), |             ), | ||||||
|             open_storage_client() as (mod, client), |  | ||||||
|         ): |         ): | ||||||
|             syms: list[str] = await client.list_keys() |             syms: list[str] = await client.list_keys() | ||||||
|             print(f'{len(syms)} FOUND for {mod.name}') |             log.info(f'{len(syms)} FOUND for {mod.name}') | ||||||
| 
 | 
 | ||||||
|  |             history: ShmArray  # np buffer format | ||||||
|             ( |             ( | ||||||
|                 history, |                 history, | ||||||
|                 first_dt, |                 first_dt, | ||||||
|  | @ -167,179 +194,357 @@ def anal( | ||||||
|             ) |             ) | ||||||
|             assert first_dt < last_dt |             assert first_dt < last_dt | ||||||
| 
 | 
 | ||||||
|             src_df = await client.as_df(fqme, period) |             null_segs: tuple = tsp.get_null_segs( | ||||||
|             from piker.data import _timeseries as tsmod |                 frame=history, | ||||||
|             df: pl.DataFrame = tsmod.with_dts(src_df) |                 period=period, | ||||||
|             gaps: pl.DataFrame = tsmod.detect_time_gaps(df) |             ) | ||||||
|  |             # TODO: do tsp queries to backcend to fill i missing | ||||||
|  |             # history and then prolly write it to tsdb! | ||||||
| 
 | 
 | ||||||
|             if not gaps.is_empty(): |             shm_df: pl.DataFrame = await client.as_df( | ||||||
|                 print(f'Gaps found:\n{gaps}') |                 fqme, | ||||||
|  |                 period, | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|  |             df: pl.DataFrame  # with dts | ||||||
|  |             deduped: pl.DataFrame  # deduplicated dts | ||||||
|  |             ( | ||||||
|  |                 df, | ||||||
|  |                 deduped, | ||||||
|  |                 diff, | ||||||
|  |             ) = tsp.dedupe( | ||||||
|  |                 shm_df, | ||||||
|  |                 period=period, | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             write_edits: bool = True | ||||||
|  |             if ( | ||||||
|  |                 write_edits | ||||||
|  |                 and ( | ||||||
|  |                     diff | ||||||
|  |                     or null_segs | ||||||
|  |                 ) | ||||||
|  |             ): | ||||||
|  |                 await tractor.pause() | ||||||
|  |                 await client.write_ohlcv( | ||||||
|  |                     fqme, | ||||||
|  |                     ohlcv=deduped, | ||||||
|  |                     timeframe=period, | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|                 # TODO: something better with tab completion.. |                 # TODO: something better with tab completion.. | ||||||
|                 # is there something more minimal but nearly as |                 # is there something more minimal but nearly as | ||||||
|                 # functional as ipython? |                 # functional as ipython? | ||||||
|                 await tractor.pause() |                 await tractor.pause() | ||||||
|  |                 assert not null_segs | ||||||
| 
 | 
 | ||||||
|     trio.run(main) |     trio.run(main) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def iter_dfs_from_shms(fqme: str) -> Generator[ | async def markup_gaps( | ||||||
|     tuple[Path, ShmArray, pl.DataFrame], |     fqme: str, | ||||||
|     None, |     timeframe: float, | ||||||
|     None, |     actl: AnnotCtl, | ||||||
| ]: |     wdts: pl.DataFrame, | ||||||
|     # shm buffer size table based on known sample rates |     gaps: pl.DataFrame, | ||||||
|     sizes: dict[str, int] = { |  | ||||||
|         'hist': _default_hist_size, |  | ||||||
|         'rt': _default_rt_size, |  | ||||||
|     } |  | ||||||
| 
 | 
 | ||||||
|     # load all detected shm buffer files which have the | ) -> dict[int, dict]: | ||||||
|     # passed FQME pattern in the file name. |     ''' | ||||||
|     shmfiles: list[Path] = [] |     Remote annotate time-gaps in a dt-fielded ts (normally OHLC) | ||||||
|     shmdir = Path('/dev/shm/') |     with rectangles. | ||||||
| 
 | 
 | ||||||
|     for shmfile in shmdir.glob(f'*{fqme}*'): |     ''' | ||||||
|         filename: str = shmfile.name |     aids: dict[int] = {} | ||||||
|  |     for i in range(gaps.height): | ||||||
| 
 | 
 | ||||||
|         # skip index files |         row: pl.DataFrame = gaps[i] | ||||||
|         if ( |  | ||||||
|             '_first' in filename |  | ||||||
|             or '_last' in filename |  | ||||||
|         ): |  | ||||||
|             continue |  | ||||||
| 
 | 
 | ||||||
|         assert shmfile.is_file() |         # the gap's RIGHT-most bar's OPEN value | ||||||
|         log.debug(f'Found matching shm buffer file: {filename}') |         # at that time (sample) step. | ||||||
|         shmfiles.append(shmfile) |         iend: int = row['index'][0] | ||||||
|  |         # dt: datetime = row['dt'][0] | ||||||
|  |         # dt_prev: datetime = row['dt_prev'][0] | ||||||
|  |         # dt_end_t: float = dt.timestamp() | ||||||
| 
 | 
 | ||||||
|     for shmfile in shmfiles: |  | ||||||
| 
 | 
 | ||||||
|         # lookup array buffer size based on file suffix |         # TODO: can we eventually remove this | ||||||
|         # being either .rt or .hist |         # once we figure out why the epoch cols | ||||||
|         key: str = shmfile.name.rsplit('.')[-1] |         # don't match? | ||||||
|  |         # TODO: FIX HOW/WHY these aren't matching | ||||||
|  |         # and are instead off by 4hours (EST | ||||||
|  |         # vs. UTC?!?!) | ||||||
|  |         # end_t: float = row['time'] | ||||||
|  |         # assert ( | ||||||
|  |         #     dt.timestamp() | ||||||
|  |         #     == | ||||||
|  |         #     end_t | ||||||
|  |         # ) | ||||||
| 
 | 
 | ||||||
|         # skip FSP buffers for now.. |         # the gap's LEFT-most bar's CLOSE value | ||||||
|         if key not in sizes: |         # at that time (sample) step. | ||||||
|             continue |         prev_r: pl.DataFrame = wdts.filter( | ||||||
| 
 |             pl.col('index') == iend - 1 | ||||||
|         size: int = sizes[key] |  | ||||||
| 
 |  | ||||||
|         # attach to any shm buffer, load array into polars df, |  | ||||||
|         # write to local parquet file. |  | ||||||
|         shm, opened = maybe_open_shm_array( |  | ||||||
|             key=shmfile.name, |  | ||||||
|             size=size, |  | ||||||
|             dtype=def_iohlcv_fields, |  | ||||||
|             readonly=True, |  | ||||||
|         ) |         ) | ||||||
|         assert not opened |         # XXX: probably a gap in the (newly sorted or de-duplicated) | ||||||
|         ohlcv = shm.array |         # dt-df, so we might need to re-index first.. | ||||||
|  |         if prev_r.is_empty(): | ||||||
|  |             await tractor.pause() | ||||||
| 
 | 
 | ||||||
|         start = time.time() |         istart: int = prev_r['index'][0] | ||||||
|  |         # dt_start_t: float = dt_prev.timestamp() | ||||||
| 
 | 
 | ||||||
|         # XXX: thanks to this SO answer for this conversion tip: |         # start_t: float = prev_r['time'] | ||||||
|         # https://stackoverflow.com/a/72054819 |         # assert ( | ||||||
|         df = pl.DataFrame({ |         #     dt_start_t | ||||||
|             field_name: ohlcv[field_name] |         #     == | ||||||
|             for field_name in ohlcv.dtype.fields |         #     start_t | ||||||
|         }) |         # ) | ||||||
|         delay: float = round( | 
 | ||||||
|             time.time() - start, |         # TODO: implement px-col width measure | ||||||
|             ndigits=6, |         # and ensure at least as many px-cols | ||||||
|  |         # shown per rect as configured by user. | ||||||
|  |         # gap_w: float = abs((iend - istart)) | ||||||
|  |         # if gap_w < 6: | ||||||
|  |         #     margin: float = 6 | ||||||
|  |         #     iend += margin | ||||||
|  |         #     istart -= margin | ||||||
|  | 
 | ||||||
|  |         rect_gap: float = BGM*3/8 | ||||||
|  |         opn: float = row['open'][0] | ||||||
|  |         ro: tuple[float, float] = ( | ||||||
|  |             # dt_end_t, | ||||||
|  |             iend + rect_gap + 1, | ||||||
|  |             opn, | ||||||
|         ) |         ) | ||||||
|         log.info( |         cls: float = prev_r['close'][0] | ||||||
|             f'numpy -> polars conversion took {delay} secs\n' |         lc: tuple[float, float] = ( | ||||||
|             f'polars df: {df}' |             # dt_start_t, | ||||||
|  |             istart - rect_gap, # + 1 , | ||||||
|  |             cls, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         yield ( |         color: str = 'dad_blue' | ||||||
|             shmfile, |         diff: float = cls - opn | ||||||
|             shm, |         sgn: float = copysign(1, diff) | ||||||
|             df, |         color: str = { | ||||||
|  |             -1: 'buy_green', | ||||||
|  |             1: 'sell_red', | ||||||
|  |         }[sgn] | ||||||
|  | 
 | ||||||
|  |         rect_kwargs: dict[str, Any] = dict( | ||||||
|  |             fqme=fqme, | ||||||
|  |             timeframe=timeframe, | ||||||
|  |             start_pos=lc, | ||||||
|  |             end_pos=ro, | ||||||
|  |             color=color, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |         aid: int = await actl.add_rect(**rect_kwargs) | ||||||
|  |         assert aid | ||||||
|  |         aids[aid] = rect_kwargs | ||||||
|  | 
 | ||||||
|  |     # tell chart to redraw all its | ||||||
|  |     # graphics view layers Bo | ||||||
|  |     await actl.redraw( | ||||||
|  |         fqme=fqme, | ||||||
|  |         timeframe=timeframe, | ||||||
|  |     ) | ||||||
|  |     return aids | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| @store.command() | @store.command() | ||||||
| def ldshm( | def ldshm( | ||||||
|     fqme: str, |     fqme: str, | ||||||
| 
 |     write_parquet: bool = True, | ||||||
|     write_parquet: bool = False, |     reload_parquet_to_shm: bool = True, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|     Linux ONLY: load any fqme file name matching shm buffer from |     Linux ONLY: load any fqme file name matching shm buffer from | ||||||
|     /dev/shm/ into an OHLCV numpy array and polars DataFrame, |     /dev/shm/ into an OHLCV numpy array and polars DataFrame, | ||||||
|     optionally write to .parquet file. |     optionally write to offline storage via `.parquet` file. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async def main(): |     async def main(): | ||||||
|  |         from piker.ui._remote_ctl import ( | ||||||
|  |             open_annot_ctl, | ||||||
|  |         ) | ||||||
|  |         actl: AnnotCtl | ||||||
|  |         mod: ModuleType | ||||||
|  |         client: StorageClient | ||||||
|         async with ( |         async with ( | ||||||
|             open_piker_runtime( |             open_piker_runtime( | ||||||
|                 'polars_boi', |                 'polars_boi', | ||||||
|                 enable_modules=['piker.data._sharedmem'], |                 enable_modules=['piker.data._sharedmem'], | ||||||
|                 debug_mode=True, |                 debug_mode=True, | ||||||
|             ), |             ), | ||||||
|  |             open_storage_client() as ( | ||||||
|  |                 mod, | ||||||
|  |                 client, | ||||||
|  |             ), | ||||||
|  |             open_annot_ctl() as actl, | ||||||
|         ): |         ): | ||||||
|             df: pl.DataFrame | None = None |             shm_df: pl.DataFrame | None = None | ||||||
|             for shmfile, shm, src_df in iter_dfs_from_shms(fqme): |             tf2aids: dict[float, dict] = {} | ||||||
|  | 
 | ||||||
|  |             for ( | ||||||
|  |                 shmfile, | ||||||
|  |                 shm, | ||||||
|  |                 # parquet_path, | ||||||
|  |                 shm_df, | ||||||
|  |             ) in tsp.iter_dfs_from_shms(fqme): | ||||||
| 
 | 
 | ||||||
|                 # compute ohlc properties for naming |  | ||||||
|                 times: np.ndarray = shm.array['time'] |                 times: np.ndarray = shm.array['time'] | ||||||
|                 secs: float = times[-1] - times[-2] |                 d1: float = float(times[-1] - times[-2]) | ||||||
|                 if secs < 1.: |                 d2: float = float(times[-2] - times[-3]) | ||||||
|  |                 med: float = np.median(np.diff(times)) | ||||||
|  |                 if ( | ||||||
|  |                     d1 < 1. | ||||||
|  |                     and d2 < 1. | ||||||
|  |                     and med < 1. | ||||||
|  |                 ): | ||||||
|                     raise ValueError( |                     raise ValueError( | ||||||
|                         f'Something is wrong with time period for {shm}:\n{times}' |                         f'Something is wrong with time period for {shm}:\n{times}' | ||||||
|                     ) |                     ) | ||||||
| 
 | 
 | ||||||
|                 from piker.data import _timeseries as tsmod |                 period_s: float = float(max(d1, d2, med)) | ||||||
|                 df: pl.DataFrame = tsmod.with_dts(src_df) |  | ||||||
|                 gaps: pl.DataFrame = tsmod.detect_time_gaps(df) |  | ||||||
| 
 | 
 | ||||||
|                 # TODO: maybe only optionally enter this depending |                 null_segs: tuple = tsp.get_null_segs( | ||||||
|                 # on some CLI flags and/or gap detection? |                     frame=shm.array, | ||||||
|                 if ( |                     period=period_s, | ||||||
|                     not gaps.is_empty() |                 ) | ||||||
|                     or secs > 2 | 
 | ||||||
|                 ): |                 # TODO: call null-seg fixer somehow? | ||||||
|  |                 if null_segs: | ||||||
|                     await tractor.pause() |                     await tractor.pause() | ||||||
|  |                 #     async with ( | ||||||
|  |                 #         trio.open_nursery() as tn, | ||||||
|  |                 #         mod.open_history_client( | ||||||
|  |                 #             mkt, | ||||||
|  |                 #         ) as (get_hist, config), | ||||||
|  |                 #     ): | ||||||
|  |                 #         nulls_detected: trio.Event = await tn.start(partial( | ||||||
|  |                 #             tsp.maybe_fill_null_segments, | ||||||
| 
 | 
 | ||||||
|                 # write to parquet file? |                 #             shm=shm, | ||||||
|  |                 #             timeframe=timeframe, | ||||||
|  |                 #             get_hist=get_hist, | ||||||
|  |                 #             sampler_stream=sampler_stream, | ||||||
|  |                 #             mkt=mkt, | ||||||
|  |                 #         )) | ||||||
|  | 
 | ||||||
|  |                 # over-write back to shm? | ||||||
|  |                 wdts: pl.DataFrame  # with dts | ||||||
|  |                 deduped: pl.DataFrame  # deduplicated dts | ||||||
|  |                 ( | ||||||
|  |                     wdts, | ||||||
|  |                     deduped, | ||||||
|  |                     diff, | ||||||
|  |                 ) = tsp.dedupe( | ||||||
|  |                     shm_df, | ||||||
|  |                     period=period_s, | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |                 # detect gaps from in expected (uniform OHLC) sample period | ||||||
|  |                 step_gaps: pl.DataFrame = tsp.detect_time_gaps( | ||||||
|  |                     deduped, | ||||||
|  |                     expect_period=period_s, | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |                 # TODO: by default we always want to mark these up | ||||||
|  |                 # with rects showing up/down gaps Bo | ||||||
|  |                 venue_gaps: pl.DataFrame = tsp.detect_time_gaps( | ||||||
|  |                     deduped, | ||||||
|  |                     expect_period=period_s, | ||||||
|  | 
 | ||||||
|  |                     # TODO: actually pull the exact duration | ||||||
|  |                     # expected for each venue operational period? | ||||||
|  |                     gap_dt_unit='days', | ||||||
|  |                     gap_thresh=1, | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |                 # TODO: find the disjoint set of step gaps from | ||||||
|  |                 # venue (closure) set! | ||||||
|  |                 # -[ ] do a set diff by checking for the unique | ||||||
|  |                 #    gap set only in the step_gaps? | ||||||
|  |                 if ( | ||||||
|  |                     not venue_gaps.is_empty() | ||||||
|  |                     or ( | ||||||
|  |                         period_s < 60 | ||||||
|  |                         and not step_gaps.is_empty() | ||||||
|  |                     ) | ||||||
|  |                 ): | ||||||
|  |                     # write repaired ts to parquet-file? | ||||||
|                     if write_parquet: |                     if write_parquet: | ||||||
|                     timeframe: str = f'{secs}s' |                         start: float = time.time() | ||||||
|  |                         path: Path = await client.write_ohlcv( | ||||||
|  |                             fqme, | ||||||
|  |                             ohlcv=deduped, | ||||||
|  |                             timeframe=period_s, | ||||||
|  |                         ) | ||||||
|  |                         write_delay: float = round( | ||||||
|  |                             time.time() - start, | ||||||
|  |                             ndigits=6, | ||||||
|  |                         ) | ||||||
| 
 | 
 | ||||||
|                     datadir: Path = get_conf_dir() / 'nativedb' |                         # read back from fs | ||||||
|                     if not datadir.is_dir(): |                         start: float = time.time() | ||||||
|                         datadir.mkdir() |                         read_df: pl.DataFrame = pl.read_parquet(path) | ||||||
| 
 |                         read_delay: float = round( | ||||||
|                     path: Path = datadir / f'{fqme}.{timeframe}.parquet' |  | ||||||
| 
 |  | ||||||
|                     # write to fs |  | ||||||
|                     start = time.time() |  | ||||||
|                     df.write_parquet(path) |  | ||||||
|                     delay: float = round( |  | ||||||
|                             time.time() - start, |                             time.time() - start, | ||||||
|                             ndigits=6, |                             ndigits=6, | ||||||
|                         ) |                         ) | ||||||
|                         log.info( |                         log.info( | ||||||
|                         f'parquet write took {delay} secs\n' |                             f'parquet write took {write_delay} secs\n' | ||||||
|                             f'file path: {path}' |                             f'file path: {path}' | ||||||
|                     ) |                             f'parquet read took {read_delay} secs\n' | ||||||
| 
 |  | ||||||
|                     # read back from fs |  | ||||||
|                     start = time.time() |  | ||||||
|                     read_df: pl.DataFrame = pl.read_parquet(path) |  | ||||||
|                     delay: float = round( |  | ||||||
|                         time.time() - start, |  | ||||||
|                         ndigits=6, |  | ||||||
|                     ) |  | ||||||
|                     print( |  | ||||||
|                         f'parquet read took {delay} secs\n' |  | ||||||
|                             f'polars df: {read_df}' |                             f'polars df: {read_df}' | ||||||
|                         ) |                         ) | ||||||
| 
 | 
 | ||||||
|             if df is None: |                         if reload_parquet_to_shm: | ||||||
|                 log.error(f'No matching shm buffers for {fqme} ?') |                             new = tsp.pl2np( | ||||||
|  |                                 deduped, | ||||||
|  |                                 dtype=shm.array.dtype, | ||||||
|  |                             ) | ||||||
|  |                             # since normally readonly | ||||||
|  |                             shm._array.setflags( | ||||||
|  |                                 write=int(1), | ||||||
|  |                             ) | ||||||
|  |                             shm.push( | ||||||
|  |                                 new, | ||||||
|  |                                 prepend=True, | ||||||
|  |                                 start=new['index'][-1], | ||||||
|  |                                 update_first=False,  # don't update ._first | ||||||
|  |                             ) | ||||||
|  | 
 | ||||||
|  |                     do_markup_gaps: bool = True | ||||||
|  |                     if do_markup_gaps: | ||||||
|  |                         new_df: pl.DataFrame = tsp.np2pl(new) | ||||||
|  |                         aids: dict = await markup_gaps( | ||||||
|  |                             fqme, | ||||||
|  |                             period_s, | ||||||
|  |                             actl, | ||||||
|  |                             new_df, | ||||||
|  |                             step_gaps, | ||||||
|  |                         ) | ||||||
|  |                         # last chance manual overwrites in REPL | ||||||
|  |                         # await tractor.pause() | ||||||
|  |                         assert aids | ||||||
|  |                         tf2aids[period_s] = aids | ||||||
|  | 
 | ||||||
|  |                 else: | ||||||
|  |                     # allow interaction even when no ts problems. | ||||||
|  |                     assert not diff | ||||||
|  | 
 | ||||||
|  |             await tractor.pause() | ||||||
|  |             log.info('Exiting TSP shm anal-izer!') | ||||||
|  | 
 | ||||||
|  |             if shm_df is None: | ||||||
|  |                 log.error( | ||||||
|  |                     f'No matching shm buffers for {fqme} ?' | ||||||
|  | 
 | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|     trio.run(main) |     trio.run(main) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -19,7 +19,8 @@ | ||||||
|             call a poor man's tsdb). |             call a poor man's tsdb). | ||||||
| 
 | 
 | ||||||
| AKA a `piker`-native file-system native "time series database" | AKA a `piker`-native file-system native "time series database" | ||||||
| without needing an extra process and no standard TSDB features, YET! | without needing an extra process and no standard TSDB features, | ||||||
|  | YET! | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| # TODO: like there's soo much.. | # TODO: like there's soo much.. | ||||||
|  | @ -55,8 +56,6 @@ from datetime import datetime | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| import time | import time | ||||||
| 
 | 
 | ||||||
| # from bidict import bidict |  | ||||||
| # import tractor |  | ||||||
| import numpy as np | import numpy as np | ||||||
| import polars as pl | import polars as pl | ||||||
| from pendulum import ( | from pendulum import ( | ||||||
|  | @ -64,45 +63,18 @@ from pendulum import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| from piker import config | from piker import config | ||||||
| from piker.data import def_iohlcv_fields | from piker import tsp | ||||||
| from piker.data import ShmArray | from piker.data import ( | ||||||
|  |     def_iohlcv_fields, | ||||||
|  |     ShmArray, | ||||||
|  | ) | ||||||
| from piker.log import get_logger | from piker.log import get_logger | ||||||
|  | from . import TimeseriesNotFound | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| log = get_logger('storage.nativedb') | log = get_logger('storage.nativedb') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # NOTE: thanks to this SO answer for the below conversion routines |  | ||||||
| # to go from numpy struct-arrays to polars dataframes and back: |  | ||||||
| # https://stackoverflow.com/a/72054819 |  | ||||||
| def np2pl(array: np.ndarray) -> pl.DataFrame: |  | ||||||
|     return pl.DataFrame({ |  | ||||||
|         field_name: array[field_name] |  | ||||||
|         for field_name in array.dtype.fields |  | ||||||
|     }) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def pl2np( |  | ||||||
|     df: pl.DataFrame, |  | ||||||
|     dtype: np.dtype, |  | ||||||
| 
 |  | ||||||
| ) -> np.ndarray: |  | ||||||
| 
 |  | ||||||
|     # Create numpy struct array of the correct size and dtype |  | ||||||
|     # and loop through df columns to fill in array fields. |  | ||||||
|     array = np.empty( |  | ||||||
|         df.height, |  | ||||||
|         dtype, |  | ||||||
|     ) |  | ||||||
|     for field, col in zip( |  | ||||||
|         dtype.fields, |  | ||||||
|         df.columns, |  | ||||||
|     ): |  | ||||||
|         array[field] = df.get_column(col).to_numpy() |  | ||||||
| 
 |  | ||||||
|     return array |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def detect_period(shm: ShmArray) -> float: | def detect_period(shm: ShmArray) -> float: | ||||||
|     ''' |     ''' | ||||||
|     Attempt to detect the series time step sampling period |     Attempt to detect the series time step sampling period | ||||||
|  | @ -123,16 +95,19 @@ def detect_period(shm: ShmArray) -> float: | ||||||
| 
 | 
 | ||||||
| def mk_ohlcv_shm_keyed_filepath( | def mk_ohlcv_shm_keyed_filepath( | ||||||
|     fqme: str, |     fqme: str, | ||||||
|     period: float,  # ow known as the "timeframe" |     period: float | int,  # ow known as the "timeframe" | ||||||
|     datadir: Path, |     datadir: Path, | ||||||
| 
 | 
 | ||||||
| ) -> str: | ) -> Path: | ||||||
| 
 | 
 | ||||||
|     if period < 1.: |     if period < 1.: | ||||||
|         raise ValueError('Sample period should be >= 1.!?') |         raise ValueError('Sample period should be >= 1.!?') | ||||||
| 
 | 
 | ||||||
|     period_s: str = f'{period}s' |     path: Path = ( | ||||||
|     path: Path = datadir / f'{fqme}.ohlcv{period_s}.parquet' |         datadir | ||||||
|  |         / | ||||||
|  |         f'{fqme}.ohlcv{int(period)}s.parquet' | ||||||
|  |     ) | ||||||
|     return path |     return path | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -186,7 +161,13 @@ class NativeStorageClient: | ||||||
| 
 | 
 | ||||||
|     def index_files(self): |     def index_files(self): | ||||||
|         for path in self._datadir.iterdir(): |         for path in self._datadir.iterdir(): | ||||||
|             if path.name in {'borked', 'expired',}: |             if ( | ||||||
|  |                 path.is_dir() | ||||||
|  |                 or | ||||||
|  |                 '.parquet' not in str(path) | ||||||
|  |                 # or | ||||||
|  |                 # path.name in {'borked', 'expired',} | ||||||
|  |             ): | ||||||
|                 continue |                 continue | ||||||
| 
 | 
 | ||||||
|             key: str = path.name.rstrip('.parquet') |             key: str = path.name.rstrip('.parquet') | ||||||
|  | @ -228,8 +209,21 @@ class NativeStorageClient: | ||||||
|                 fqme, |                 fqme, | ||||||
|                 timeframe, |                 timeframe, | ||||||
|             ) |             ) | ||||||
|         except FileNotFoundError: |         except FileNotFoundError as fnfe: | ||||||
|             return None | 
 | ||||||
|  |             bs_fqme, _, *_ = fqme.rpartition('.') | ||||||
|  | 
 | ||||||
|  |             possible_matches: list[str] = [] | ||||||
|  |             for tskey in self._index: | ||||||
|  |                 if bs_fqme in tskey: | ||||||
|  |                     possible_matches.append(tskey) | ||||||
|  | 
 | ||||||
|  |             match_str: str = '\n'.join(sorted(possible_matches)) | ||||||
|  |             raise TimeseriesNotFound( | ||||||
|  |                 f'No entry for `{fqme}`?\n' | ||||||
|  |                 f'Maybe you need a more specific fqme-key like:\n\n' | ||||||
|  |                 f'{match_str}' | ||||||
|  |             ) from fnfe | ||||||
| 
 | 
 | ||||||
|         times = array['time'] |         times = array['time'] | ||||||
|         return ( |         return ( | ||||||
|  | @ -242,6 +236,7 @@ class NativeStorageClient: | ||||||
|         self, |         self, | ||||||
|         fqme: str, |         fqme: str, | ||||||
|         period: float, |         period: float, | ||||||
|  | 
 | ||||||
|     ) -> Path: |     ) -> Path: | ||||||
|         return mk_ohlcv_shm_keyed_filepath( |         return mk_ohlcv_shm_keyed_filepath( | ||||||
|             fqme=fqme, |             fqme=fqme, | ||||||
|  | @ -249,6 +244,23 @@ class NativeStorageClient: | ||||||
|             datadir=self._datadir, |             datadir=self._datadir, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |     def _cache_df( | ||||||
|  |         self, | ||||||
|  |         fqme: str, | ||||||
|  |         df: pl.DataFrame, | ||||||
|  |         timeframe: float, | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  |         # cache df for later usage since we (currently) need to | ||||||
|  |         # convert to np.ndarrays to push to our `ShmArray` rt | ||||||
|  |         # buffers subsys but later we may operate entirely on | ||||||
|  |         # pyarrow arrays/buffers so keeping the dfs around for | ||||||
|  |         # a variety of purposes is handy. | ||||||
|  |         self._dfs.setdefault( | ||||||
|  |             timeframe, | ||||||
|  |             {}, | ||||||
|  |         )[fqme] = df | ||||||
|  | 
 | ||||||
|     async def read_ohlcv( |     async def read_ohlcv( | ||||||
|         self, |         self, | ||||||
|         fqme: str, |         fqme: str, | ||||||
|  | @ -257,13 +269,20 @@ class NativeStorageClient: | ||||||
|         # limit: int = int(200e3), |         # limit: int = int(200e3), | ||||||
| 
 | 
 | ||||||
|     ) -> np.ndarray: |     ) -> np.ndarray: | ||||||
|         path: Path = self.mk_path(fqme, period=int(timeframe)) |         path: Path = self.mk_path( | ||||||
|  |             fqme, | ||||||
|  |             period=int(timeframe), | ||||||
|  |         ) | ||||||
|         df: pl.DataFrame = pl.read_parquet(path) |         df: pl.DataFrame = pl.read_parquet(path) | ||||||
|         self._dfs.setdefault(timeframe, {})[fqme] = df |  | ||||||
| 
 | 
 | ||||||
|  |         self._cache_df( | ||||||
|  |             fqme=fqme, | ||||||
|  |             df=df, | ||||||
|  |             timeframe=timeframe, | ||||||
|  |         ) | ||||||
|         # TODO: filter by end and limit inputs |         # TODO: filter by end and limit inputs | ||||||
|         # times: pl.Series = df['time'] |         # times: pl.Series = df['time'] | ||||||
|         array: np.ndarray = pl2np( |         array: np.ndarray = tsp.pl2np( | ||||||
|             df, |             df, | ||||||
|             dtype=np.dtype(def_iohlcv_fields), |             dtype=np.dtype(def_iohlcv_fields), | ||||||
|         ) |         ) | ||||||
|  | @ -273,11 +292,15 @@ class NativeStorageClient: | ||||||
|         self, |         self, | ||||||
|         fqme: str, |         fqme: str, | ||||||
|         period: int = 60, |         period: int = 60, | ||||||
|  |         load_from_offline: bool = True, | ||||||
| 
 | 
 | ||||||
|     ) -> pl.DataFrame: |     ) -> pl.DataFrame: | ||||||
|         try: |         try: | ||||||
|             return self._dfs[period][fqme] |             return self._dfs[period][fqme] | ||||||
|         except KeyError: |         except KeyError: | ||||||
|  |             if not load_from_offline: | ||||||
|  |                 raise | ||||||
|  | 
 | ||||||
|             await self.read_ohlcv(fqme, period) |             await self.read_ohlcv(fqme, period) | ||||||
|             return self._dfs[period][fqme] |             return self._dfs[period][fqme] | ||||||
| 
 | 
 | ||||||
|  | @ -299,14 +322,22 @@ class NativeStorageClient: | ||||||
|             datadir=self._datadir, |             datadir=self._datadir, | ||||||
|         ) |         ) | ||||||
|         if isinstance(ohlcv, np.ndarray): |         if isinstance(ohlcv, np.ndarray): | ||||||
|             df: pl.DataFrame = np2pl(ohlcv) |             df: pl.DataFrame = tsp.np2pl(ohlcv) | ||||||
|         else: |         else: | ||||||
|             df = ohlcv |             df = ohlcv | ||||||
| 
 | 
 | ||||||
|  |         self._cache_df( | ||||||
|  |             fqme=fqme, | ||||||
|  |             df=df, | ||||||
|  |             timeframe=timeframe, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|         # TODO: in terms of managing the ultra long term data |         # TODO: in terms of managing the ultra long term data | ||||||
|         # - use a proper profiler to measure all this IO and |         # -[ ] use a proper profiler to measure all this IO and | ||||||
|         #   roundtripping! |         #   roundtripping! | ||||||
|         # - try out ``fastparquet``'s append writing: |         # -[ ] implement parquet append!? see issue: | ||||||
|  |         #   https://github.com/pikers/piker/issues/536 | ||||||
|  |         #   -[ ] try out ``fastparquet``'s append writing: | ||||||
|         #     https://fastparquet.readthedocs.io/en/latest/api.html#fastparquet.write |         #     https://fastparquet.readthedocs.io/en/latest/api.html#fastparquet.write | ||||||
|         start = time.time() |         start = time.time() | ||||||
|         df.write_parquet(path) |         df.write_parquet(path) | ||||||
|  | @ -314,17 +345,16 @@ class NativeStorageClient: | ||||||
|             time.time() - start, |             time.time() - start, | ||||||
|             ndigits=6, |             ndigits=6, | ||||||
|         ) |         ) | ||||||
|         print( |         log.info( | ||||||
|             f'parquet write took {delay} secs\n' |             f'parquet write took {delay} secs\n' | ||||||
|             f'file path: {path}' |             f'file path: {path}' | ||||||
|         ) |         ) | ||||||
|         return path |         return path | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|     async def write_ohlcv( |     async def write_ohlcv( | ||||||
|         self, |         self, | ||||||
|         fqme: str, |         fqme: str, | ||||||
|         ohlcv: np.ndarray, |         ohlcv: np.ndarray | pl.DataFrame, | ||||||
|         timeframe: int, |         timeframe: int, | ||||||
| 
 | 
 | ||||||
|     ) -> Path: |     ) -> Path: | ||||||
|  | @ -376,6 +406,8 @@ class NativeStorageClient: | ||||||
|     #     ... |     #     ... | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO: does this need to be async on average? | ||||||
|  | # I guess for any IPC connected backend yes? | ||||||
| @acm | @acm | ||||||
| async def get_client( | async def get_client( | ||||||
| 
 | 
 | ||||||
|  | @ -393,7 +425,7 @@ async def get_client( | ||||||
|     ''' |     ''' | ||||||
|     datadir: Path = config.get_conf_dir() / 'nativedb' |     datadir: Path = config.get_conf_dir() / 'nativedb' | ||||||
|     if not datadir.is_dir(): |     if not datadir.is_dir(): | ||||||
|         log.info(f'Creating `nativedb` director: {datadir}') |         log.info(f'Creating `nativedb` dir: {datadir}') | ||||||
|         datadir.mkdir() |         datadir.mkdir() | ||||||
| 
 | 
 | ||||||
|     client = NativeStorageClient(datadir) |     client = NativeStorageClient(datadir) | ||||||
|  |  | ||||||
|  | @ -18,24 +18,12 @@ | ||||||
| Toolz for debug, profile and trace of the distributed runtime :surfer: | Toolz for debug, profile and trace of the distributed runtime :surfer: | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from .debug import ( | from tractor.devx import ( | ||||||
|     open_crash_handler, |     open_crash_handler as open_crash_handler, | ||||||
| ) | ) | ||||||
| from .profile import ( | from .profile import ( | ||||||
|     Profiler, |     Profiler as Profiler, | ||||||
|     pg_profile_enabled, |     pg_profile_enabled as pg_profile_enabled, | ||||||
|     ms_slower_then, |     ms_slower_then as ms_slower_then, | ||||||
|     timeit, |     timeit as timeit, | ||||||
| ) | ) | ||||||
| 
 |  | ||||||
| # TODO: other mods to include? |  | ||||||
| # - DROP .trionics, already moved into tractor |  | ||||||
| # - move in `piker.calc` |  | ||||||
| 
 |  | ||||||
| __all__: list[str] = [ |  | ||||||
|     'open_crash_handler', |  | ||||||
|     'pg_profile_enabled', |  | ||||||
|     'ms_slower_then', |  | ||||||
|     'Profiler', |  | ||||||
|     'timeit', |  | ||||||
| ] |  | ||||||
|  |  | ||||||
|  | @ -1,40 +0,0 @@ | ||||||
| # piker: trading gear for hackers |  | ||||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| Debugger wrappers for `pdbp` as used by `tractor`. |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| from contextlib import contextmanager as cm |  | ||||||
| 
 |  | ||||||
| import pdbp |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # TODO: better naming and what additionals? |  | ||||||
| # - optional runtime plugging? |  | ||||||
| # - detection for sync vs. async code? |  | ||||||
| # - specialized REPL entry when in distributed mode? |  | ||||||
| @cm |  | ||||||
| def open_crash_handler(): |  | ||||||
|     ''' |  | ||||||
|     Super basic crash handler using `pdbp` debugger. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     try: |  | ||||||
|         yield |  | ||||||
|     except BaseException: |  | ||||||
|         pdbp.xpm() |  | ||||||
|         raise |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,746 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | Financial time series processing utilities usually | ||||||
|  | pertaining to OHLCV style sampled data. | ||||||
|  | 
 | ||||||
|  | Routines are generally implemented in either ``numpy`` or | ||||||
|  | ``polars`` B) | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | from __future__ import annotations | ||||||
|  | from functools import partial | ||||||
|  | from math import ( | ||||||
|  |     ceil, | ||||||
|  |     floor, | ||||||
|  | ) | ||||||
|  | import time | ||||||
|  | from typing import ( | ||||||
|  |     Literal, | ||||||
|  |     # AsyncGenerator, | ||||||
|  |     Generator, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | import numpy as np | ||||||
|  | import polars as pl | ||||||
|  | from pendulum import ( | ||||||
|  |     DateTime, | ||||||
|  |     from_timestamp, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | from ..toolz.profile import ( | ||||||
|  |     Profiler, | ||||||
|  |     pg_profile_enabled, | ||||||
|  |     ms_slower_then, | ||||||
|  | ) | ||||||
|  | from ..log import ( | ||||||
|  |     get_logger, | ||||||
|  |     get_console_log, | ||||||
|  | ) | ||||||
|  | # for "time series processing" | ||||||
|  | subsys: str = 'piker.tsp' | ||||||
|  | 
 | ||||||
|  | log = get_logger(subsys) | ||||||
|  | get_console_log = partial( | ||||||
|  |     get_console_log, | ||||||
|  |     name=subsys, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | # NOTE: union type-defs to handle generic `numpy` and `polars` types | ||||||
|  | # side-by-side Bo | ||||||
|  | # |_ TODO: schema spec typing? | ||||||
|  | #   -[ ] nptyping! | ||||||
|  | #   -[ ] wtv we can with polars? | ||||||
|  | Frame = pl.DataFrame | np.ndarray | ||||||
|  | Seq = pl.Series | np.ndarray | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def slice_from_time( | ||||||
|  |     arr: np.ndarray, | ||||||
|  |     start_t: float, | ||||||
|  |     stop_t: float, | ||||||
|  |     step: float,  # sampler period step-diff | ||||||
|  | 
 | ||||||
|  | ) -> slice: | ||||||
|  |     ''' | ||||||
|  |     Calculate array indices mapped from a time range and return them in | ||||||
|  |     a slice. | ||||||
|  | 
 | ||||||
|  |     Given an input array with an epoch `'time'` series entry, calculate | ||||||
|  |     the indices which span the time range and return in a slice. Presume | ||||||
|  |     each `'time'` step increment is uniform and when the time stamp | ||||||
|  |     series contains gaps (the uniform presumption is untrue) use | ||||||
|  |     ``np.searchsorted()`` binary search to look up the appropriate | ||||||
|  |     index. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     profiler = Profiler( | ||||||
|  |         msg='slice_from_time()', | ||||||
|  |         disabled=not pg_profile_enabled(), | ||||||
|  |         ms_threshold=ms_slower_then, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     times = arr['time'] | ||||||
|  |     t_first = floor(times[0]) | ||||||
|  |     t_last = ceil(times[-1]) | ||||||
|  | 
 | ||||||
|  |     # the greatest index we can return which slices to the | ||||||
|  |     # end of the input array. | ||||||
|  |     read_i_max = arr.shape[0] | ||||||
|  | 
 | ||||||
|  |     # compute (presumed) uniform-time-step index offsets | ||||||
|  |     i_start_t = floor(start_t) | ||||||
|  |     read_i_start = floor(((i_start_t - t_first) // step)) - 1 | ||||||
|  | 
 | ||||||
|  |     i_stop_t = ceil(stop_t) | ||||||
|  | 
 | ||||||
|  |     # XXX: edge case -> always set stop index to last in array whenever | ||||||
|  |     # the input stop time is detected to be greater then the equiv time | ||||||
|  |     # stamp at that last entry. | ||||||
|  |     if i_stop_t >= t_last: | ||||||
|  |         read_i_stop = read_i_max | ||||||
|  |     else: | ||||||
|  |         read_i_stop = ceil((i_stop_t - t_first) // step) + 1 | ||||||
|  | 
 | ||||||
|  |     # always clip outputs to array support | ||||||
|  |     # for read start: | ||||||
|  |     # - never allow a start < the 0 index | ||||||
|  |     # - never allow an end index > the read array len | ||||||
|  |     read_i_start = min( | ||||||
|  |         max(0, read_i_start), | ||||||
|  |         read_i_max - 1, | ||||||
|  |     ) | ||||||
|  |     read_i_stop = max( | ||||||
|  |         0, | ||||||
|  |         min(read_i_stop, read_i_max), | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # check for larger-then-latest calculated index for given start | ||||||
|  |     # time, in which case we do a binary search for the correct index. | ||||||
|  |     # NOTE: this is usually the result of a time series with time gaps | ||||||
|  |     # where it is expected that each index step maps to a uniform step | ||||||
|  |     # in the time stamp series. | ||||||
|  |     t_iv_start = times[read_i_start] | ||||||
|  |     if ( | ||||||
|  |         t_iv_start > i_start_t | ||||||
|  |     ): | ||||||
|  |         # do a binary search for the best index mapping to ``start_t`` | ||||||
|  |         # given we measured an overshoot using the uniform-time-step | ||||||
|  |         # calculation from above. | ||||||
|  | 
 | ||||||
|  |         # TODO: once we start caching these per source-array, | ||||||
|  |         # we can just overwrite ``read_i_start`` directly. | ||||||
|  |         new_read_i_start = np.searchsorted( | ||||||
|  |             times, | ||||||
|  |             i_start_t, | ||||||
|  |             side='left', | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         # TODO: minimize binary search work as much as possible: | ||||||
|  |         # - cache these remap values which compensate for gaps in the | ||||||
|  |         #   uniform time step basis where we calc a later start | ||||||
|  |         #   index for the given input ``start_t``. | ||||||
|  |         # - can we shorten the input search sequence by heuristic? | ||||||
|  |         #   up_to_arith_start = index[:read_i_start] | ||||||
|  | 
 | ||||||
|  |         if ( | ||||||
|  |             new_read_i_start <= read_i_start | ||||||
|  |         ): | ||||||
|  |             # t_diff = t_iv_start - start_t | ||||||
|  |             # print( | ||||||
|  |             #     f"WE'RE CUTTING OUT TIME - STEP:{step}\n" | ||||||
|  |             #     f'start_t:{start_t} -> 0index start_t:{t_iv_start}\n' | ||||||
|  |             #     f'diff: {t_diff}\n' | ||||||
|  |             #     f'REMAPPED START i: {read_i_start} -> {new_read_i_start}\n' | ||||||
|  |             # ) | ||||||
|  |             read_i_start = new_read_i_start | ||||||
|  | 
 | ||||||
|  |     t_iv_stop = times[read_i_stop - 1] | ||||||
|  |     if ( | ||||||
|  |         t_iv_stop > i_stop_t | ||||||
|  |     ): | ||||||
|  |         # t_diff = stop_t - t_iv_stop | ||||||
|  |         # print( | ||||||
|  |         #     f"WE'RE CUTTING OUT TIME - STEP:{step}\n" | ||||||
|  |         #     f'calced iv stop:{t_iv_stop} -> stop_t:{stop_t}\n' | ||||||
|  |         #     f'diff: {t_diff}\n' | ||||||
|  |         #     # f'SHOULD REMAP STOP: {read_i_start} -> {new_read_i_start}\n' | ||||||
|  |         # ) | ||||||
|  |         new_read_i_stop = np.searchsorted( | ||||||
|  |             times[read_i_start:], | ||||||
|  |             # times, | ||||||
|  |             i_stop_t, | ||||||
|  |             side='right', | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         if ( | ||||||
|  |             new_read_i_stop <= read_i_stop | ||||||
|  |         ): | ||||||
|  |             read_i_stop = read_i_start + new_read_i_stop + 1 | ||||||
|  | 
 | ||||||
|  |     # sanity checks for range size | ||||||
|  |     # samples = (i_stop_t - i_start_t) // step | ||||||
|  |     # index_diff = read_i_stop - read_i_start + 1 | ||||||
|  |     # if index_diff > (samples + 3): | ||||||
|  |     #     breakpoint() | ||||||
|  | 
 | ||||||
|  |     # read-relative indexes: gives a slice where `shm.array[read_slc]` | ||||||
|  |     # will be the data spanning the input time range `start_t` -> | ||||||
|  |     # `stop_t` | ||||||
|  |     read_slc = slice( | ||||||
|  |         int(read_i_start), | ||||||
|  |         int(read_i_stop), | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     profiler( | ||||||
|  |         'slicing complete' | ||||||
|  |         # f'{start_t} -> {abs_slc.start} | {read_slc.start}\n' | ||||||
|  |         # f'{stop_t} -> {abs_slc.stop} | {read_slc.stop}\n' | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # NOTE: if caller needs absolute buffer indices they can | ||||||
|  |     # slice the buffer abs index like so: | ||||||
|  |     # index = arr['index'] | ||||||
|  |     # abs_indx = index[read_slc] | ||||||
|  |     # abs_slc = slice( | ||||||
|  |     #     int(abs_indx[0]), | ||||||
|  |     #     int(abs_indx[-1]), | ||||||
|  |     # ) | ||||||
|  | 
 | ||||||
|  |     return read_slc | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_null_segs( | ||||||
|  |     frame: Frame, | ||||||
|  |     period: float,  # sampling step in seconds | ||||||
|  |     imargin: int = 1, | ||||||
|  |     col: str = 'time', | ||||||
|  | 
 | ||||||
|  | ) -> tuple[ | ||||||
|  |     # Seq,  # TODO: can we make it an array-type instead? | ||||||
|  |     list[ | ||||||
|  |         list[int, int], | ||||||
|  |     ], | ||||||
|  |     Seq, | ||||||
|  |     Frame | ||||||
|  | ] | None: | ||||||
|  |     ''' | ||||||
|  |     Detect if there are any zero(-epoch stamped) valued | ||||||
|  |     rows in for the provided `col: str` column; by default | ||||||
|  |     presume the 'time' field/column. | ||||||
|  | 
 | ||||||
|  |     Filter to all such zero (time) segments and return | ||||||
|  |     the corresponding frame zeroed segment's, | ||||||
|  | 
 | ||||||
|  |       - gap absolute (in buffer terms) indices-endpoints as | ||||||
|  |         `absi_zsegs` | ||||||
|  |       - abs indices of all rows with zeroed `col` values as `absi_zeros` | ||||||
|  |       - the corresponding frame's row-entries (view) which are | ||||||
|  |         zeroed for the `col` as `zero_t` | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     times: Seq = frame['time'] | ||||||
|  |     zero_pred: Seq = (times == 0) | ||||||
|  | 
 | ||||||
|  |     if isinstance(frame, np.ndarray): | ||||||
|  |         tis_zeros: int = zero_pred.any() | ||||||
|  |     else: | ||||||
|  |         tis_zeros: int = zero_pred.any() | ||||||
|  | 
 | ||||||
|  |     if not tis_zeros: | ||||||
|  |         return None | ||||||
|  | 
 | ||||||
|  |     # TODO: use ndarray for this?! | ||||||
|  |     absi_zsegs: list[list[int, int]] = [] | ||||||
|  | 
 | ||||||
|  |     if isinstance(frame, np.ndarray): | ||||||
|  |         # view of ONLY the zero segments as one continuous chunk | ||||||
|  |         zero_t: np.ndarray = frame[zero_pred] | ||||||
|  |         # abs indices of said zeroed rows | ||||||
|  |         absi_zeros = zero_t['index'] | ||||||
|  |         # diff of abs index steps between each zeroed row | ||||||
|  |         absi_zdiff: np.ndarray = np.diff(absi_zeros) | ||||||
|  | 
 | ||||||
|  |         # scan for all frame-indices where the | ||||||
|  |         # zeroed-row-abs-index-step-diff is greater then the | ||||||
|  |         # expected increment of 1. | ||||||
|  |         # data  1st zero seg  data  zeros | ||||||
|  |         # ----  ------------  ----  -----  ------  ---- | ||||||
|  |         # ||||..000000000000..||||..00000..||||||..0000 | ||||||
|  |         # ----  ------------  ----  -----  ------  ---- | ||||||
|  |         #       ^zero_t[0]                            ^zero_t[-1] | ||||||
|  |         #                           ^fi_zgaps[0]   ^fi_zgaps[1] | ||||||
|  |         #       ^absi_zsegs[0][0]   ^---^ => absi_zsegs[1]: tuple | ||||||
|  |         #  absi_zsegs[0][1]^ | ||||||
|  |         # | ||||||
|  |         # NOTE: the first entry in `fi_zgaps` is where | ||||||
|  |         # the first (absolute) index step diff is > 1. | ||||||
|  |         # and it is a frame-relative index into `zero_t`. | ||||||
|  |         fi_zgaps = np.argwhere( | ||||||
|  |             absi_zdiff > 1 | ||||||
|  |             # NOTE: +1 here is ensure we index to the "start" of each | ||||||
|  |             # segment (if we didn't the below loop needs to be | ||||||
|  |             # re-written to expect `fi_end_rows`! | ||||||
|  |         ) + 1 | ||||||
|  |         # the rows from the contiguous zeroed segments which have | ||||||
|  |         # abs-index steps >1 compared to the previous zero row | ||||||
|  |         # (indicating an end of zeroed segment). | ||||||
|  |         fi_zseg_start_rows = zero_t[fi_zgaps] | ||||||
|  | 
 | ||||||
|  |     # TODO: equiv for pl.DataFrame case! | ||||||
|  |     else: | ||||||
|  |         izeros: pl.Series = zero_pred.arg_true() | ||||||
|  |         zero_t: pl.DataFrame = frame[izeros] | ||||||
|  | 
 | ||||||
|  |         absi_zeros = zero_t['index'] | ||||||
|  |         absi_zdiff: pl.Series = absi_zeros.diff() | ||||||
|  |         fi_zgaps = (absi_zdiff > 1).arg_true() | ||||||
|  | 
 | ||||||
|  |     # XXX: our goal (in this func) is to select out slice index | ||||||
|  |     # pairs (zseg0_start, zseg_end) in abs index units for each | ||||||
|  |     # null-segment portion detected throughout entire input frame. | ||||||
|  | 
 | ||||||
|  |     # only up to one null-segment in entire frame? | ||||||
|  |     num_gaps: int = fi_zgaps.size + 1 | ||||||
|  |     if num_gaps < 1: | ||||||
|  |         if absi_zeros.size > 1: | ||||||
|  |             absi_zsegs = [[ | ||||||
|  |                 # TODO: maybe mk these max()/min() limits func | ||||||
|  |                 # consts instead of called more then once? | ||||||
|  |                 max( | ||||||
|  |                     absi_zeros[0] - 1, | ||||||
|  |                     0, | ||||||
|  |                 ), | ||||||
|  |                 # NOTE: need the + 1 to guarantee we index "up to" | ||||||
|  |                 # the next non-null row-datum. | ||||||
|  |                 min( | ||||||
|  |                     absi_zeros[-1] + 1, | ||||||
|  |                     frame['index'][-1], | ||||||
|  |                 ), | ||||||
|  |             ]] | ||||||
|  |         else: | ||||||
|  |             # XXX EDGE CASE: only one null-datum found so | ||||||
|  |             # mark the start abs index as None to trigger | ||||||
|  |             # a full frame-len query to the respective backend? | ||||||
|  |             absi_zsegs = [[ | ||||||
|  |                 # see `get_hist()` in backend, should ALWAYS be | ||||||
|  |                 # able to handle a `start_dt=None`! | ||||||
|  |                 # None, | ||||||
|  |                 None, | ||||||
|  |                 absi_zeros[0] + 1, | ||||||
|  |             ]] | ||||||
|  | 
 | ||||||
|  |     # XXX NOTE XXX: if >= 2 zeroed segments are found, there should | ||||||
|  |     # ALWAYS be more then one zero-segment-abs-index-step-diff row | ||||||
|  |     # in `absi_zdiff`, so loop through all such | ||||||
|  |     # abs-index-step-diffs >1 (i.e. the entries of `absi_zdiff`) | ||||||
|  |     # and add them as the "end index" entries for each segment. | ||||||
|  |     # Then, iif NOT iterating the first such segment end, look back | ||||||
|  |     # for the prior segments zero-segment start indext by relative | ||||||
|  |     # indexing the `zero_t` frame by -1 and grabbing the abs index | ||||||
|  |     # of what should be the prior zero-segment abs start index. | ||||||
|  |     else: | ||||||
|  |         # NOTE: since `absi_zdiff` will never have a row | ||||||
|  |         # corresponding to the first zero-segment's row, we add it | ||||||
|  |         # manually here. | ||||||
|  |         absi_zsegs.append([ | ||||||
|  |             max( | ||||||
|  |                 absi_zeros[0] - 1, | ||||||
|  |                 0, | ||||||
|  |             ), | ||||||
|  |             None, | ||||||
|  |         ]) | ||||||
|  | 
 | ||||||
|  |         # TODO: can we do it with vec ops? | ||||||
|  |         for i, ( | ||||||
|  |             fi,  # frame index of zero-seg start | ||||||
|  |             zseg_start_row,  # full row for ^ | ||||||
|  |         ) in enumerate(zip( | ||||||
|  |             fi_zgaps, | ||||||
|  |             fi_zseg_start_rows, | ||||||
|  |         )): | ||||||
|  |             assert (zseg_start_row == zero_t[fi]).all() | ||||||
|  |             iabs: int = zseg_start_row['index'][0] | ||||||
|  |             absi_zsegs.append([ | ||||||
|  |                 iabs - 1, | ||||||
|  |                 None,  # backfilled on next iter | ||||||
|  |             ]) | ||||||
|  | 
 | ||||||
|  |             # final iter case, backfill FINAL end iabs! | ||||||
|  |             if (i + 1) == fi_zgaps.size: | ||||||
|  |                 absi_zsegs[-1][1] = absi_zeros[-1] + 1 | ||||||
|  | 
 | ||||||
|  |             # NOTE: only after the first segment (due to `.diff()` | ||||||
|  |             # usage above) can we do a lookback to the prior | ||||||
|  |             # segment's end row and determine it's abs index to | ||||||
|  |             # retroactively insert to the prior | ||||||
|  |             # `absi_zsegs[i-1][1]` entry Bo | ||||||
|  |             last_end: int = absi_zsegs[i][1] | ||||||
|  |             if last_end is None: | ||||||
|  |                 prev_zseg_row = zero_t[fi - 1] | ||||||
|  |                 absi_post_zseg = prev_zseg_row['index'][0] + 1 | ||||||
|  |                 # XXX: MUST BACKFILL previous end iabs! | ||||||
|  |                 absi_zsegs[i][1] = absi_post_zseg | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             if 0 < num_gaps < 2: | ||||||
|  |                 absi_zsegs[-1][1] = min( | ||||||
|  |                     absi_zeros[-1] + 1, | ||||||
|  |                     frame['index'][-1], | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             iabs_first: int = frame['index'][0] | ||||||
|  |             for start, end in absi_zsegs: | ||||||
|  | 
 | ||||||
|  |                 ts_start: float = times[start - iabs_first] | ||||||
|  |                 ts_end: float = times[end - iabs_first] | ||||||
|  |                 if ( | ||||||
|  |                     (ts_start == 0 and not start == 0) | ||||||
|  |                     or | ||||||
|  |                     ts_end == 0 | ||||||
|  |                 ): | ||||||
|  |                     import pdbp | ||||||
|  |                     pdbp.set_trace() | ||||||
|  | 
 | ||||||
|  |                 assert end | ||||||
|  |                 assert start < end | ||||||
|  | 
 | ||||||
|  |     log.warning( | ||||||
|  |         f'Frame has {len(absi_zsegs)} NULL GAPS!?\n' | ||||||
|  |         f'period: {period}\n' | ||||||
|  |         f'total null samples: {len(zero_t)}\n' | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     return ( | ||||||
|  |         absi_zsegs,  # [start, end] abs slice indices of seg | ||||||
|  |         absi_zeros,  # all abs indices within all null-segs | ||||||
|  |         zero_t,  # sliced-view of all null-segment rows-datums | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def iter_null_segs( | ||||||
|  |     timeframe: float, | ||||||
|  |     frame: Frame | None = None, | ||||||
|  |     null_segs: tuple | None = None, | ||||||
|  | 
 | ||||||
|  | ) -> Generator[ | ||||||
|  |     tuple[ | ||||||
|  |         int, int, | ||||||
|  |         int, int, | ||||||
|  |         float, float, | ||||||
|  |         float, float, | ||||||
|  | 
 | ||||||
|  |         # Seq,  # TODO: can we make it an array-type instead? | ||||||
|  |         # list[ | ||||||
|  |         #     list[int, int], | ||||||
|  |         # ], | ||||||
|  |         # Seq, | ||||||
|  |         # Frame | ||||||
|  |     ], | ||||||
|  |     None, | ||||||
|  | ]: | ||||||
|  |     if not ( | ||||||
|  |         null_segs := get_null_segs( | ||||||
|  |             frame, | ||||||
|  |             period=timeframe, | ||||||
|  |         ) | ||||||
|  |     ): | ||||||
|  |         return | ||||||
|  | 
 | ||||||
|  |     absi_pairs_zsegs: list[list[float, float]] | ||||||
|  |     izeros: Seq | ||||||
|  |     zero_t: Frame | ||||||
|  |     ( | ||||||
|  |         absi_pairs_zsegs, | ||||||
|  |         izeros, | ||||||
|  |         zero_t, | ||||||
|  |     ) = null_segs | ||||||
|  | 
 | ||||||
|  |     absi_first: int = frame[0]['index'] | ||||||
|  |     for ( | ||||||
|  |         absi_start, | ||||||
|  |         absi_end, | ||||||
|  |     ) in absi_pairs_zsegs: | ||||||
|  | 
 | ||||||
|  |         fi_end: int = absi_end - absi_first | ||||||
|  |         end_row: Seq = frame[fi_end] | ||||||
|  |         end_t: float = end_row['time'] | ||||||
|  |         end_dt: DateTime = from_timestamp(end_t) | ||||||
|  | 
 | ||||||
|  |         fi_start = None | ||||||
|  |         start_row = None | ||||||
|  |         start_t = None | ||||||
|  |         start_dt = None | ||||||
|  |         if ( | ||||||
|  |             absi_start is not None | ||||||
|  |             and start_t != 0 | ||||||
|  |         ): | ||||||
|  |             fi_start: int = absi_start - absi_first | ||||||
|  |             start_row: Seq = frame[fi_start] | ||||||
|  |             start_t: float = start_row['time'] | ||||||
|  |             start_dt: DateTime = from_timestamp(start_t) | ||||||
|  | 
 | ||||||
|  |         if absi_start < 0: | ||||||
|  |             import pdbp | ||||||
|  |             pdbp.set_trace() | ||||||
|  | 
 | ||||||
|  |         yield ( | ||||||
|  |             absi_start, absi_end,  # abs indices | ||||||
|  |             fi_start, fi_end,  # relative "frame" indices | ||||||
|  |             start_t, end_t, | ||||||
|  |             start_dt, end_dt, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def with_dts( | ||||||
|  |     df: pl.DataFrame, | ||||||
|  |     time_col: str = 'time', | ||||||
|  | 
 | ||||||
|  | ) -> pl.DataFrame: | ||||||
|  |     ''' | ||||||
|  |     Insert datetime (casted) columns to a (presumably) OHLC sampled | ||||||
|  |     time series with an epoch-time column keyed by `time_col: str`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     return df.with_columns([ | ||||||
|  |         pl.col(time_col).shift(1).suffix('_prev'), | ||||||
|  |         pl.col(time_col).diff().alias('s_diff'), | ||||||
|  |         pl.from_epoch(pl.col(time_col)).alias('dt'), | ||||||
|  |     ]).with_columns([ | ||||||
|  |         pl.from_epoch( | ||||||
|  |             column=pl.col(f'{time_col}_prev'), | ||||||
|  |         ).alias('dt_prev'), | ||||||
|  |         pl.col('dt').diff().alias('dt_diff'), | ||||||
|  |     ]) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | t_unit: Literal = Literal[ | ||||||
|  |     'days', | ||||||
|  |     'hours', | ||||||
|  |     'minutes', | ||||||
|  |     'seconds', | ||||||
|  |     'miliseconds', | ||||||
|  |     'microseconds', | ||||||
|  |     'nanoseconds', | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def detect_time_gaps( | ||||||
|  |     w_dts: pl.DataFrame, | ||||||
|  | 
 | ||||||
|  |     time_col: str = 'time', | ||||||
|  |     # epoch sampling step diff | ||||||
|  |     expect_period: float = 60, | ||||||
|  | 
 | ||||||
|  |     # NOTE: legacy stock mkts have venue operating hours | ||||||
|  |     # and thus gaps normally no more then 1-2 days at | ||||||
|  |     # a time. | ||||||
|  |     gap_thresh: float = 1., | ||||||
|  | 
 | ||||||
|  |     # TODO: allow passing in a frame of operating hours? | ||||||
|  |     # -[ ] durations/ranges for faster legit gap checks? | ||||||
|  |     # XXX -> must be valid ``polars.Expr.dt.<name>`` | ||||||
|  |     # like 'days' which a sane default for venue closures | ||||||
|  |     # though will detect weekend gaps which are normal :o | ||||||
|  |     gap_dt_unit: t_unit | None = None, | ||||||
|  | 
 | ||||||
|  | ) -> pl.DataFrame: | ||||||
|  |     ''' | ||||||
|  |     Filter to OHLC datums which contain sample step gaps. | ||||||
|  | 
 | ||||||
|  |     For eg. legacy markets which have venue close gaps and/or | ||||||
|  |     actual missing data segments. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # first select by any sample-period (in seconds unit) step size | ||||||
|  |     # greater then expected. | ||||||
|  |     step_gaps: pl.DataFrame = w_dts.filter( | ||||||
|  |         pl.col('s_diff').abs() > expect_period | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     if gap_dt_unit is None: | ||||||
|  |         return step_gaps | ||||||
|  | 
 | ||||||
|  |     # NOTE: this flag is to indicate that on this (sampling) time | ||||||
|  |     # scale we expect to only be filtering against larger venue | ||||||
|  |     # closures-scale time gaps. | ||||||
|  |     return step_gaps.filter( | ||||||
|  |         # Second by an arbitrary dt-unit step size | ||||||
|  |         getattr( | ||||||
|  |             pl.col('dt_diff').dt, | ||||||
|  |             gap_dt_unit, | ||||||
|  |         )().abs() > gap_thresh | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def detect_price_gaps( | ||||||
|  |     df: pl.DataFrame, | ||||||
|  |     gt_multiplier: float = 2., | ||||||
|  |     price_fields: list[str] = ['high', 'low'], | ||||||
|  | 
 | ||||||
|  | ) -> pl.DataFrame: | ||||||
|  |     ''' | ||||||
|  |     Detect gaps in clearing price over an OHLC series. | ||||||
|  | 
 | ||||||
|  |     2 types of gaps generally exist; up gaps and down gaps: | ||||||
|  | 
 | ||||||
|  |     - UP gap: when any next sample's lo price is strictly greater | ||||||
|  |       then the current sample's hi price. | ||||||
|  | 
 | ||||||
|  |     - DOWN gap: when any next sample's hi price is strictly | ||||||
|  |       less then the current samples lo price. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # return df.filter( | ||||||
|  |     #     pl.col('high') - ) > expect_period, | ||||||
|  |     # ).select([ | ||||||
|  |     #     pl.dt.datetime(pl.col(time_col).shift(1)).suffix('_previous'), | ||||||
|  |     #     pl.all(), | ||||||
|  |     # ]).select([ | ||||||
|  |     #     pl.all(), | ||||||
|  |     #     (pl.col(time_col) - pl.col(f'{time_col}_previous')).alias('diff'), | ||||||
|  |     # ]) | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  | # TODO: probably just use the null_segs impl above? | ||||||
|  | def detect_vlm_gaps( | ||||||
|  |     df: pl.DataFrame, | ||||||
|  |     col: str = 'volume', | ||||||
|  | 
 | ||||||
|  | ) -> pl.DataFrame: | ||||||
|  | 
 | ||||||
|  |     vnull: pl.DataFrame = w_dts.filter( | ||||||
|  |         pl.col(col) == 0 | ||||||
|  |     ) | ||||||
|  |     return vnull | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def dedupe( | ||||||
|  |     src_df: pl.DataFrame, | ||||||
|  | 
 | ||||||
|  |     time_gaps: pl.DataFrame | None = None, | ||||||
|  |     sort: bool = True, | ||||||
|  |     period: float = 60, | ||||||
|  | 
 | ||||||
|  | ) -> tuple[ | ||||||
|  |     pl.DataFrame,  # with dts | ||||||
|  |     pl.DataFrame,  # with deduplicated dts (aka gap/repeat removal) | ||||||
|  |     int,  # len diff between input and deduped | ||||||
|  | ]: | ||||||
|  |     ''' | ||||||
|  |     Check for time series gaps and if found | ||||||
|  |     de-duplicate any datetime entries, check for | ||||||
|  |     a frame height diff and return the newly | ||||||
|  |     dt-deduplicated frame. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     wdts: pl.DataFrame = with_dts(src_df) | ||||||
|  | 
 | ||||||
|  |     deduped = wdts | ||||||
|  | 
 | ||||||
|  |     # remove duplicated datetime samples/sections | ||||||
|  |     deduped: pl.DataFrame = wdts.unique( | ||||||
|  |         # subset=['dt'], | ||||||
|  |         subset=['time'], | ||||||
|  |         maintain_order=True, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # maybe sort on any time field | ||||||
|  |     if sort: | ||||||
|  |         deduped = deduped.sort(by='time') | ||||||
|  |         # TODO: detect out-of-order segments which were corrected! | ||||||
|  |         # -[ ] report in log msg | ||||||
|  |         # -[ ] possibly return segment sections which were moved? | ||||||
|  | 
 | ||||||
|  |     diff: int = ( | ||||||
|  |         wdts.height | ||||||
|  |         - | ||||||
|  |         deduped.height | ||||||
|  |     ) | ||||||
|  |     return ( | ||||||
|  |         wdts, | ||||||
|  |         deduped, | ||||||
|  |         diff, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def sort_diff( | ||||||
|  |     src_df: pl.DataFrame, | ||||||
|  |     col: str = 'time', | ||||||
|  | 
 | ||||||
|  | ) -> tuple[ | ||||||
|  |     pl.DataFrame,  # with dts | ||||||
|  |     pl.DataFrame,  # sorted | ||||||
|  |     list[int],  # indices of segments that are out-of-order | ||||||
|  | ]: | ||||||
|  |     ser: pl.Series = src_df[col] | ||||||
|  |     sortd: pl.DataFrame = ser.sort() | ||||||
|  |     diff: pl.Series = ser.diff() | ||||||
|  | 
 | ||||||
|  |     sortd_diff: pl.Series = sortd.diff() | ||||||
|  |     i_step_diff = (diff != sortd_diff).arg_true() | ||||||
|  |     frame_reorders: int = i_step_diff.len() | ||||||
|  |     if frame_reorders: | ||||||
|  |         log.warn( | ||||||
|  |             f'Resorted frame on col: {col}\n' | ||||||
|  |             f'{frame_reorders}' | ||||||
|  | 
 | ||||||
|  |         ) | ||||||
|  |         # import pdbp; pdbp.set_trace() | ||||||
|  | 
 | ||||||
|  | # NOTE: thanks to this SO answer for the below conversion routines | ||||||
|  | # to go from numpy struct-arrays to polars dataframes and back: | ||||||
|  | # https://stackoverflow.com/a/72054819 | ||||||
|  | def np2pl(array: np.ndarray) -> pl.DataFrame: | ||||||
|  |     start: float = time.time() | ||||||
|  | 
 | ||||||
|  |     # XXX: thanks to this SO answer for this conversion tip: | ||||||
|  |     # https://stackoverflow.com/a/72054819 | ||||||
|  |     df = pl.DataFrame({ | ||||||
|  |         field_name: array[field_name] | ||||||
|  |         for field_name in array.dtype.fields | ||||||
|  |     }) | ||||||
|  |     delay: float = round( | ||||||
|  |         time.time() - start, | ||||||
|  |         ndigits=6, | ||||||
|  |     ) | ||||||
|  |     log.info( | ||||||
|  |         f'numpy -> polars conversion took {delay} secs\n' | ||||||
|  |         f'polars df: {df}' | ||||||
|  |     ) | ||||||
|  |     return df | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def pl2np( | ||||||
|  |     df: pl.DataFrame, | ||||||
|  |     dtype: np.dtype, | ||||||
|  | 
 | ||||||
|  | ) -> np.ndarray: | ||||||
|  | 
 | ||||||
|  |     # Create numpy struct array of the correct size and dtype | ||||||
|  |     # and loop through df columns to fill in array fields. | ||||||
|  |     array = np.empty( | ||||||
|  |         df.height, | ||||||
|  |         dtype, | ||||||
|  |     ) | ||||||
|  |     for field, col in zip( | ||||||
|  |         dtype.fields, | ||||||
|  |         df.columns, | ||||||
|  |     ): | ||||||
|  |         array[field] = df.get_column(col).to_numpy() | ||||||
|  | 
 | ||||||
|  |     return array | ||||||
|  | @ -21,15 +21,16 @@ Extensions to built-in or (heavily used but 3rd party) friend-lib | ||||||
| types. | types. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
|  | from __future__ import annotations | ||||||
| from collections import UserList | from collections import UserList | ||||||
| from pprint import ( | from pprint import ( | ||||||
|     pformat, |     saferepr, | ||||||
| ) | ) | ||||||
| from typing import Any | from typing import Any | ||||||
| 
 | 
 | ||||||
| from msgspec import ( | from msgspec import ( | ||||||
|     msgpack, |     msgpack, | ||||||
|     Struct, |     Struct as _Struct, | ||||||
|     structs, |     structs, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | @ -62,7 +63,7 @@ class DiffDump(UserList): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class Struct( | class Struct( | ||||||
|     Struct, |     _Struct, | ||||||
| 
 | 
 | ||||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions |     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||||
|     # tag='pikerstruct', |     # tag='pikerstruct', | ||||||
|  | @ -72,9 +73,27 @@ class Struct( | ||||||
|     A "human friendlier" (aka repl buddy) struct subtype. |     A "human friendlier" (aka repl buddy) struct subtype. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  |     def _sin_props(self) -> Iterator[ | ||||||
|  |         tuple[ | ||||||
|  |             structs.FieldIinfo, | ||||||
|  |             str, | ||||||
|  |             Any, | ||||||
|  |         ] | ||||||
|  |     ]: | ||||||
|  |         ''' | ||||||
|  |         Iterate over all non-@property fields of this struct. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         fi: structs.FieldInfo | ||||||
|  |         for fi in structs.fields(self): | ||||||
|  |             key: str = fi.name | ||||||
|  |             val: Any = getattr(self, key) | ||||||
|  |             yield fi, key, val | ||||||
|  | 
 | ||||||
|     def to_dict( |     def to_dict( | ||||||
|         self, |         self, | ||||||
|         include_non_members: bool = True, |         include_non_members: bool = True, | ||||||
|  | 
 | ||||||
|     ) -> dict: |     ) -> dict: | ||||||
|         ''' |         ''' | ||||||
|         Like it sounds.. direct delegation to: |         Like it sounds.. direct delegation to: | ||||||
|  | @ -90,16 +109,72 @@ class Struct( | ||||||
| 
 | 
 | ||||||
|         # only return a dict of the struct members |         # only return a dict of the struct members | ||||||
|         # which were provided as input, NOT anything |         # which were provided as input, NOT anything | ||||||
|         # added as `@properties`! |         # added as type-defined `@property` methods! | ||||||
|         sin_props: dict = {} |         sin_props: dict = {} | ||||||
|         for fi in structs.fields(self): |         fi: structs.FieldInfo | ||||||
|             key: str = fi.name |         for fi, k, v in self._sin_props(): | ||||||
|             sin_props[key] = asdict[key] |             sin_props[k] = asdict[k] | ||||||
| 
 | 
 | ||||||
|         return sin_props |         return sin_props | ||||||
| 
 | 
 | ||||||
|     def pformat(self) -> str: |     def pformat( | ||||||
|         return f'Struct({pformat(self.to_dict())})' |         self, | ||||||
|  |         field_indent: int = 2, | ||||||
|  |         indent: int = 0, | ||||||
|  | 
 | ||||||
|  |     ) -> str: | ||||||
|  |         ''' | ||||||
|  |         Recursion-safe `pprint.pformat()` style formatting of | ||||||
|  |         a `msgspec.Struct` for sane reading by a human using a REPL. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         # global whitespace indent | ||||||
|  |         ws: str = ' '*indent | ||||||
|  | 
 | ||||||
|  |         # field whitespace indent | ||||||
|  |         field_ws: str = ' '*(field_indent + indent) | ||||||
|  | 
 | ||||||
|  |         # qtn: str = ws + self.__class__.__qualname__ | ||||||
|  |         qtn: str = self.__class__.__qualname__ | ||||||
|  | 
 | ||||||
|  |         obj_str: str = ''  # accumulator | ||||||
|  |         fi: structs.FieldInfo | ||||||
|  |         k: str | ||||||
|  |         v: Any | ||||||
|  |         for fi, k, v in self._sin_props(): | ||||||
|  | 
 | ||||||
|  |             # TODO: how can we prefer `Literal['option1',  'option2, | ||||||
|  |             # ..]` over .__name__ == `Literal` but still get only the | ||||||
|  |             # latter for simple types like `str | int | None` etc..? | ||||||
|  |             ft: type = fi.type | ||||||
|  |             typ_name: str = getattr(ft, '__name__', str(ft)) | ||||||
|  | 
 | ||||||
|  |             # recurse to get sub-struct's `.pformat()` output Bo | ||||||
|  |             if isinstance(v, Struct): | ||||||
|  |                 val_str: str =  v.pformat( | ||||||
|  |                     indent=field_indent + indent, | ||||||
|  |                     field_indent=indent + field_indent, | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             else:  # the `pprint` recursion-safe format: | ||||||
|  |                 # https://docs.python.org/3.11/library/pprint.html#pprint.saferepr | ||||||
|  |                 val_str: str = saferepr(v) | ||||||
|  | 
 | ||||||
|  |             obj_str += (field_ws + f'{k}: {typ_name} = {val_str},\n') | ||||||
|  | 
 | ||||||
|  |         return ( | ||||||
|  |             f'{qtn}(\n' | ||||||
|  |             f'{obj_str}' | ||||||
|  |             f'{ws})' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     # TODO: use a pprint.PrettyPrinter instance around ONLY rendering | ||||||
|  |     # inside a known tty? | ||||||
|  |     # def __repr__(self) -> str: | ||||||
|  |     #     ... | ||||||
|  | 
 | ||||||
|  |     # __str__ = __repr__ = pformat | ||||||
|  |     __repr__ = pformat | ||||||
| 
 | 
 | ||||||
|     def copy( |     def copy( | ||||||
|         self, |         self, | ||||||
|  |  | ||||||
|  | @ -14,9 +14,8 @@ | ||||||
| # You should have received a copy of the GNU Affero General Public License | # You should have received a copy of the GNU Affero General Public License | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| """ | ''' | ||||||
| Stuff for your eyes, aka super hawt Qt UI components. | UI components built using `Qt` with major versions swapped in via | ||||||
|  | the import indirection in the `.qt` sub-mod. | ||||||
| 
 | 
 | ||||||
| Currently we only support PyQt5 due to this issue in Pyside2: | ''' | ||||||
| https://bugreports.qt.io/projects/PYSIDE/issues/PYSIDE-1313 |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|  | @ -21,8 +21,10 @@ Anchor funtions for UI placement of annotions. | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| from typing import Callable, TYPE_CHECKING | from typing import Callable, TYPE_CHECKING | ||||||
| 
 | 
 | ||||||
| from PyQt5.QtCore import QPointF | from piker.ui.qt import ( | ||||||
| from PyQt5.QtWidgets import QGraphicsPathItem |     QPointF, | ||||||
|  |     QGraphicsPathItem, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ._chart import ChartPlotWidget |     from ._chart import ChartPlotWidget | ||||||
|  |  | ||||||
|  | @ -20,12 +20,22 @@ Annotations for ur faces. | ||||||
| """ | """ | ||||||
| from typing import Callable | from typing import Callable | ||||||
| 
 | 
 | ||||||
| from PyQt5 import QtCore, QtGui, QtWidgets | from pyqtgraph import ( | ||||||
| from PyQt5.QtCore import QPointF, QRectF |     Point, | ||||||
| from PyQt5.QtWidgets import QGraphicsPathItem |     functions as fn, | ||||||
| from pyqtgraph import Point, functions as fn, Color |     Color, | ||||||
|  | ) | ||||||
| import numpy as np | import numpy as np | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     QtCore, | ||||||
|  |     QtGui, | ||||||
|  |     QtWidgets, | ||||||
|  |     QPointF, | ||||||
|  |     QRectF, | ||||||
|  |     QGraphicsPathItem, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| def mk_marker_path( | def mk_marker_path( | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -21,9 +21,11 @@ Main app startup and run. | ||||||
| from functools import partial | from functools import partial | ||||||
| from types import ModuleType | from types import ModuleType | ||||||
| 
 | 
 | ||||||
| from PyQt5.QtCore import QEvent |  | ||||||
| import trio | import trio | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     QEvent, | ||||||
|  | ) | ||||||
| from ..service import maybe_spawn_brokerd | from ..service import maybe_spawn_brokerd | ||||||
| from . import _event | from . import _event | ||||||
| from ._exec import run_qtractor | from ._exec import run_qtractor | ||||||
|  |  | ||||||
|  | @ -23,16 +23,24 @@ from functools import lru_cache | ||||||
| from typing import Callable | from typing import Callable | ||||||
| from math import floor | from math import floor | ||||||
| 
 | 
 | ||||||
| import numpy as np | import polars as pl | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| from PyQt5 import QtCore, QtGui, QtWidgets |  | ||||||
| from PyQt5.QtCore import QPointF |  | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     QtCore, | ||||||
|  |     QtGui, | ||||||
|  |     QtWidgets, | ||||||
|  |     QPointF, | ||||||
|  |     txt_flag, | ||||||
|  |     align_flag, | ||||||
|  |     px_cache_mode, | ||||||
|  | ) | ||||||
| from . import _pg_overrides as pgo | from . import _pg_overrides as pgo | ||||||
| from ..accounting._mktinfo import float_digits | from ..accounting._mktinfo import float_digits | ||||||
| from ._label import Label | from ._label import Label | ||||||
| from ._style import DpiAwareFont, hcolor, _font | from ._style import DpiAwareFont, hcolor, _font | ||||||
| from ._interaction import ChartView | from ._interaction import ChartView | ||||||
|  | from ._dataviz import Viz | ||||||
| 
 | 
 | ||||||
| _axis_pen = pg.mkPen(hcolor('bracket')) | _axis_pen = pg.mkPen(hcolor('bracket')) | ||||||
| 
 | 
 | ||||||
|  | @ -287,9 +295,7 @@ class DynamicDateAxis(Axis): | ||||||
|     # time formats mapped by seconds between bars |     # time formats mapped by seconds between bars | ||||||
|     tick_tpl = { |     tick_tpl = { | ||||||
|         60 * 60 * 24: '%Y-%b-%d', |         60 * 60 * 24: '%Y-%b-%d', | ||||||
|         60: '%H:%M', |         60: '%Y-%b-%d(%H:%M)', | ||||||
|         30: '%H:%M:%S', |  | ||||||
|         5: '%H:%M:%S', |  | ||||||
|         1: '%H:%M:%S', |         1: '%H:%M:%S', | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  | @ -305,10 +311,10 @@ class DynamicDateAxis(Axis): | ||||||
|         # XX: ARGGGGG AG:LKSKDJF:LKJSDFD |         # XX: ARGGGGG AG:LKSKDJF:LKJSDFD | ||||||
|         chart = self.pi.chart_widget |         chart = self.pi.chart_widget | ||||||
| 
 | 
 | ||||||
|         viz = chart._vizs[chart.name] |         viz: Viz = chart._vizs[chart.name] | ||||||
|         shm = viz.shm |         shm = viz.shm | ||||||
|         array = shm.array |         array = shm.array | ||||||
|         ifield = viz.index_field |         ifield: str = viz.index_field | ||||||
|         index = array[ifield] |         index = array[ifield] | ||||||
|         i_0, i_l = index[0], index[-1] |         i_0, i_l = index[0], index[-1] | ||||||
| 
 | 
 | ||||||
|  | @ -329,7 +335,7 @@ class DynamicDateAxis(Axis): | ||||||
|             arr_len = index.shape[0] |             arr_len = index.shape[0] | ||||||
|             first = shm._first.value |             first = shm._first.value | ||||||
|             times = array['time'] |             times = array['time'] | ||||||
|             epochs = times[ |             epochs: list[int] = times[ | ||||||
|                 list( |                 list( | ||||||
|                     map( |                     map( | ||||||
|                         int, |                         int, | ||||||
|  | @ -341,23 +347,30 @@ class DynamicDateAxis(Axis): | ||||||
|                 ) |                 ) | ||||||
|             ] |             ] | ||||||
|         else: |         else: | ||||||
|             epochs = list(map(int, indexes)) |             epochs: list[int] = list(map(int, indexes)) | ||||||
| 
 | 
 | ||||||
|         # TODO: **don't** have this hard coded shift to EST |         # TODO: **don't** have this hard coded shift to EST | ||||||
|         # delay = times[-1] - times[-2] |         delay: float = viz.time_step() | ||||||
|         dts = np.array( |         if delay > 1: | ||||||
|  |             # NOTE: use less granular dt-str when using 1M+ OHLC | ||||||
|  |             fmtstr: str = self.tick_tpl[delay] | ||||||
|  |         else: | ||||||
|  |             fmtstr: str = '%Y-%m-%d(%H:%M:%S)' | ||||||
|  | 
 | ||||||
|  |         # https://pola-rs.github.io/polars/py-polars/html/reference/expressions/api/polars.from_epoch.html#polars-from-epoch | ||||||
|  |         pl_dts: pl.Series = pl.from_epoch( | ||||||
|             epochs, |             epochs, | ||||||
|             dtype='datetime64[s]', |             time_unit='s', | ||||||
|  |         # NOTE: kinda weird we can pass it to `.from_epoch()` no? | ||||||
|  |         ).dt.replace_time_zone( | ||||||
|  |             time_zone='UTC' | ||||||
|  |         ).dt.convert_time_zone( | ||||||
|  |             # TODO: pull this from either: | ||||||
|  |             # -[ ] the mkt venue tz by default | ||||||
|  |             # -[ ] the user's config under `sys.mkt_timezone: str` | ||||||
|  |             'EST' | ||||||
|         ) |         ) | ||||||
| 
 |         return pl_dts.dt.to_string(fmtstr).to_list() | ||||||
|         # see units listing: |  | ||||||
|         # https://numpy.org/devdocs/reference/arrays.datetime.html#datetime-units |  | ||||||
|         return list(np.datetime_as_string(dts)) |  | ||||||
| 
 |  | ||||||
|         # TODO: per timeframe formatting? |  | ||||||
|         # - we probably need this based on zoom now right? |  | ||||||
|         # prec = self.np_dt_precision[delay] |  | ||||||
|         # return dts.strftime(self.tick_tpl[delay]) |  | ||||||
| 
 | 
 | ||||||
|     def tickStrings( |     def tickStrings( | ||||||
|         self, |         self, | ||||||
|  | @ -408,11 +421,15 @@ class AxisLabel(pg.GraphicsObject): | ||||||
|         super().__init__() |         super().__init__() | ||||||
|         self.setParentItem(parent) |         self.setParentItem(parent) | ||||||
| 
 | 
 | ||||||
|         self.setFlag(self.ItemIgnoresTransformations) |         self.setFlag( | ||||||
|  |             self.GraphicsItemFlag.ItemIgnoresTransformations | ||||||
|  |         ) | ||||||
|         self.setZValue(100) |         self.setZValue(100) | ||||||
| 
 | 
 | ||||||
|         # XXX: pretty sure this is faster |         # XXX: pretty sure this is faster | ||||||
|         self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache) |         self.setCacheMode( | ||||||
|  |             px_cache_mode.DeviceCoordinateCache | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|         self._parent = parent |         self._parent = parent | ||||||
| 
 | 
 | ||||||
|  | @ -549,21 +566,14 @@ class AxisLabel(pg.GraphicsObject): | ||||||
| 
 | 
 | ||||||
|         return (self.rect.width(), self.rect.height()) |         return (self.rect.width(), self.rect.height()) | ||||||
| 
 | 
 | ||||||
| # _common_text_flags = ( |  | ||||||
| #     QtCore.Qt.TextDontClip | |  | ||||||
| #     QtCore.Qt.AlignCenter | |  | ||||||
| #     QtCore.Qt.AlignTop | |  | ||||||
| #     QtCore.Qt.AlignHCenter | |  | ||||||
| #     QtCore.Qt.AlignVCenter |  | ||||||
| # ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class XAxisLabel(AxisLabel): | class XAxisLabel(AxisLabel): | ||||||
|     _x_margin = 8 |     _x_margin = 8 | ||||||
| 
 | 
 | ||||||
|     text_flags = ( |     text_flags = ( | ||||||
|         QtCore.Qt.TextDontClip |         align_flag.AlignCenter | ||||||
|         | QtCore.Qt.AlignCenter |         | txt_flag.TextDontClip | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     def size_hint(self) -> tuple[float, float]: |     def size_hint(self) -> tuple[float, float]: | ||||||
|  | @ -620,10 +630,10 @@ class YAxisLabel(AxisLabel): | ||||||
|     _y_margin: int = 4 |     _y_margin: int = 4 | ||||||
| 
 | 
 | ||||||
|     text_flags = ( |     text_flags = ( | ||||||
|         QtCore.Qt.AlignLeft |         align_flag.AlignLeft | ||||||
|         # QtCore.Qt.AlignHCenter |         | align_flag.AlignVCenter | ||||||
|         | QtCore.Qt.AlignVCenter |         # | align_flag.AlignHCenter | ||||||
|         | QtCore.Qt.TextDontClip |         | txt_flag.TextDontClip | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     def __init__( |     def __init__( | ||||||
|  |  | ||||||
|  | @ -28,22 +28,20 @@ from typing import ( | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| from PyQt5 import QtCore, QtWidgets | import pyqtgraph as pg | ||||||
| from PyQt5.QtCore import ( | import trio | ||||||
|  | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     QtCore, | ||||||
|  |     QtWidgets, | ||||||
|     Qt, |     Qt, | ||||||
|     QLineF, |     QLineF, | ||||||
|     # QPointF, |  | ||||||
| ) |  | ||||||
| from PyQt5.QtWidgets import ( |  | ||||||
|     QFrame, |     QFrame, | ||||||
|     QWidget, |     QWidget, | ||||||
|     QHBoxLayout, |     QHBoxLayout, | ||||||
|     QVBoxLayout, |     QVBoxLayout, | ||||||
|     QSplitter, |     QSplitter, | ||||||
| ) | ) | ||||||
| import pyqtgraph as pg |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| from ._axes import ( | from ._axes import ( | ||||||
|     DynamicDateAxis, |     DynamicDateAxis, | ||||||
|     PriceAxis, |     PriceAxis, | ||||||
|  | @ -570,8 +568,8 @@ class LinkedSplits(QWidget): | ||||||
| 
 | 
 | ||||||
|         # style? |         # style? | ||||||
|         self.chart.setFrameStyle( |         self.chart.setFrameStyle( | ||||||
|             QFrame.StyledPanel | |             QFrame.Shape.StyledPanel | | ||||||
|             QFrame.Plain |             QFrame.Shadow.Plain | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         return self.chart |         return self.chart | ||||||
|  | @ -689,8 +687,8 @@ class LinkedSplits(QWidget): | ||||||
| 
 | 
 | ||||||
|         cpw.plotItem.vb.linked = self |         cpw.plotItem.vb.linked = self | ||||||
|         cpw.setFrameStyle( |         cpw.setFrameStyle( | ||||||
|             QtWidgets.QFrame.StyledPanel |             QFrame.Shape.StyledPanel | ||||||
|             # | QtWidgets.QFrame.Plain |             # | QFrame.Shadow.Plain | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # don't show the little "autoscale" A label. |         # don't show the little "autoscale" A label. | ||||||
|  |  | ||||||
|  | @ -28,9 +28,14 @@ from typing import ( | ||||||
| import inspect | import inspect | ||||||
| import numpy as np | import numpy as np | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| from PyQt5 import QtCore, QtWidgets |  | ||||||
| from PyQt5.QtCore import QPointF, QRectF |  | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     QPointF, | ||||||
|  |     QRectF, | ||||||
|  |     QtCore, | ||||||
|  |     QtWidgets, | ||||||
|  |     px_cache_mode, | ||||||
|  | ) | ||||||
| from ._style import ( | from ._style import ( | ||||||
|     _xaxis_at, |     _xaxis_at, | ||||||
|     hcolor, |     hcolor, | ||||||
|  | @ -104,7 +109,9 @@ class LineDot(pg.CurvePoint): | ||||||
|         dot.setParentItem(self) |         dot.setParentItem(self) | ||||||
| 
 | 
 | ||||||
|         # keep a static size |         # keep a static size | ||||||
|         self.setFlag(self.ItemIgnoresTransformations) |         self.setFlag( | ||||||
|  |             self.GraphicsItemFlag.ItemIgnoresTransformations | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     def event( |     def event( | ||||||
|         self, |         self, | ||||||
|  | @ -207,9 +214,10 @@ class ContentsLabel(pg.LabelItem): | ||||||
|         # this being "html" is the dumbest shit :eyeroll: |         # this being "html" is the dumbest shit :eyeroll: | ||||||
| 
 | 
 | ||||||
|         self.setText( |         self.setText( | ||||||
|             "<b>i</b>:{index}<br/>" |             "<b>i_arr</b>:{index}<br/>" | ||||||
|             # NB: these fields must be indexed in the correct order via |             # NB: these fields must be indexed in the correct order via | ||||||
|             # the slice syntax below. |             # the slice syntax below. | ||||||
|  |             "<b>i_shm</b>:{}<br/>" | ||||||
|             "<b>epoch</b>:{}<br/>" |             "<b>epoch</b>:{}<br/>" | ||||||
|             "<b>O</b>:{}<br/>" |             "<b>O</b>:{}<br/>" | ||||||
|             "<b>H</b>:{}<br/>" |             "<b>H</b>:{}<br/>" | ||||||
|  | @ -219,6 +227,7 @@ class ContentsLabel(pg.LabelItem): | ||||||
|             # "<b>wap</b>:{}".format( |             # "<b>wap</b>:{}".format( | ||||||
|                 *array[ix][ |                 *array[ix][ | ||||||
|                     [ |                     [ | ||||||
|  |                         'index', | ||||||
|                         'time', |                         'time', | ||||||
|                         'open', |                         'open', | ||||||
|                         'high', |                         'high', | ||||||
|  | @ -270,10 +279,15 @@ class ContentsLabels: | ||||||
|         x_in: int, |         x_in: int, | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         for chart, name, label, update in self._labels: |         for ( | ||||||
|  |             chart, | ||||||
|  |             name, | ||||||
|  |             label, | ||||||
|  |             update, | ||||||
|  |         )in self._labels: | ||||||
| 
 | 
 | ||||||
|             viz = chart.get_viz(name) |             viz = chart.get_viz(name) | ||||||
|             array = viz.shm.array |             array: np.ndarray = viz.shm._array | ||||||
|             index = array[viz.index_field] |             index = array[viz.index_field] | ||||||
|             start = index[0] |             start = index[0] | ||||||
|             stop = index[-1] |             stop = index[-1] | ||||||
|  | @ -284,7 +298,7 @@ class ContentsLabels: | ||||||
|             ): |             ): | ||||||
|                 # out of range |                 # out of range | ||||||
|                 print('WTF out of range?') |                 print('WTF out of range?') | ||||||
|                 continue |                 # continue | ||||||
| 
 | 
 | ||||||
|             # call provided update func with data point |             # call provided update func with data point | ||||||
|             try: |             try: | ||||||
|  | @ -292,6 +306,7 @@ class ContentsLabels: | ||||||
|                 ix = np.searchsorted(index, x_in) |                 ix = np.searchsorted(index, x_in) | ||||||
|                 if ix > len(array): |                 if ix > len(array): | ||||||
|                     breakpoint() |                     breakpoint() | ||||||
|  | 
 | ||||||
|                 update(ix, array) |                 update(ix, array) | ||||||
| 
 | 
 | ||||||
|             except IndexError: |             except IndexError: | ||||||
|  | @ -416,10 +431,10 @@ class Cursor(pg.GraphicsObject): | ||||||
|         # vertical and horizonal lines and a y-axis label |         # vertical and horizonal lines and a y-axis label | ||||||
| 
 | 
 | ||||||
|         vl = plot.addLine(x=0, pen=self.lines_pen, movable=False) |         vl = plot.addLine(x=0, pen=self.lines_pen, movable=False) | ||||||
|         vl.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache) |         vl.setCacheMode(px_cache_mode.DeviceCoordinateCache) | ||||||
| 
 | 
 | ||||||
|         hl = plot.addLine(y=0, pen=self.lines_pen, movable=False) |         hl = plot.addLine(y=0, pen=self.lines_pen, movable=False) | ||||||
|         hl.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache) |         hl.setCacheMode(px_cache_mode.DeviceCoordinateCache) | ||||||
|         hl.hide() |         hl.hide() | ||||||
| 
 | 
 | ||||||
|         yl = YAxisLabel( |         yl = YAxisLabel( | ||||||
|  | @ -503,7 +518,10 @@ class Cursor(pg.GraphicsObject): | ||||||
|             plot=chart |             plot=chart | ||||||
|         ) |         ) | ||||||
|         chart.addItem(cursor) |         chart.addItem(cursor) | ||||||
|         self.graphics[chart].setdefault('cursors', []).append(cursor) |         self.graphics[chart].setdefault( | ||||||
|  |             'cursors', | ||||||
|  |             [], | ||||||
|  |         ).append(cursor) | ||||||
|         return cursor |         return cursor | ||||||
| 
 | 
 | ||||||
|     def mouseAction( |     def mouseAction( | ||||||
|  |  | ||||||
|  | @ -19,20 +19,21 @@ Fast, smooth, sexy curves. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| from contextlib import contextmanager as cm | from contextlib import contextmanager as cm | ||||||
|  | from enum import EnumType | ||||||
| from typing import Callable | from typing import Callable | ||||||
| 
 | 
 | ||||||
| import numpy as np | import numpy as np | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| from PyQt5 import QtWidgets | 
 | ||||||
| from PyQt5.QtWidgets import QGraphicsItem | from piker.ui.qt import ( | ||||||
| from PyQt5.QtCore import ( |     QtWidgets, | ||||||
|  |     QGraphicsItem, | ||||||
|     Qt, |     Qt, | ||||||
|     QLineF, |     QLineF, | ||||||
|     QRectF, |     QRectF, | ||||||
| ) |  | ||||||
| from PyQt5.QtGui import ( |  | ||||||
|     QPainter, |     QPainter, | ||||||
|     QPainterPath, |     QPainterPath, | ||||||
|  |     px_cache_mode, | ||||||
| ) | ) | ||||||
| from ._style import hcolor | from ._style import hcolor | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
|  | @ -42,22 +43,23 @@ from ..toolz.profile import ( | ||||||
|     ms_slower_then, |     ms_slower_then, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | pen_style: EnumType = Qt.PenStyle | ||||||
|  | 
 | ||||||
| _line_styles: dict[str, int] = { | _line_styles: dict[str, int] = { | ||||||
|     'solid': Qt.PenStyle.SolidLine, |     'solid': pen_style.SolidLine, | ||||||
|     'dash': Qt.PenStyle.DashLine, |     'dash': pen_style.DashLine, | ||||||
|     'dot': Qt.PenStyle.DotLine, |     'dot': pen_style.DotLine, | ||||||
|     'dashdot': Qt.PenStyle.DashDotLine, |     'dashdot': pen_style.DashDotLine, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class FlowGraphic(pg.GraphicsObject): | class FlowGraphic(pg.GraphicsObject): | ||||||
|     ''' |     ''' | ||||||
|     Base class with minimal interface for `QPainterPath` implemented, |     Base class with minimal interface for `QPainterPath` | ||||||
|     real-time updated "data flow" graphics. |     implemented, real-time updated "data flow" graphics. | ||||||
| 
 | 
 | ||||||
|     See subtypes below. |     See subtypes below. | ||||||
| 
 | 
 | ||||||
|  | @ -69,12 +71,12 @@ class FlowGraphic(pg.GraphicsObject): | ||||||
|     # XXX-NOTE-XXX: graphics caching B) |     # XXX-NOTE-XXX: graphics caching B) | ||||||
|     # see explanation for different caching modes: |     # see explanation for different caching modes: | ||||||
|     # https://stackoverflow.com/a/39410081 |     # https://stackoverflow.com/a/39410081 | ||||||
|     cache_mode: int = QGraphicsItem.DeviceCoordinateCache |     cache_mode: int = px_cache_mode.DeviceCoordinateCache | ||||||
|     # XXX: WARNING item caching seems to only be useful |     # XXX: WARNING item caching seems to only be useful | ||||||
|     # if we don't re-generate the entire QPainterPath every time |     # if we don't re-generate the entire QPainterPath every time | ||||||
|     # don't ever use this - it's a colossal nightmare of artefacts |     # don't ever use this - it's a colossal nightmare of artefacts | ||||||
|     # and is disastrous for performance. |     # and is disastrous for performance. | ||||||
|     # QGraphicsItem.ItemCoordinateCache |     # cache_mode.ItemCoordinateCache | ||||||
|     # TODO: still questions todo with coord-cacheing that we should |     # TODO: still questions todo with coord-cacheing that we should | ||||||
|     # probably talk to a core dev about: |     # probably talk to a core dev about: | ||||||
|     # - if this makes trasform interactions slower (such as zooming) |     # - if this makes trasform interactions slower (such as zooming) | ||||||
|  | @ -167,15 +169,16 @@ class FlowGraphic(pg.GraphicsObject): | ||||||
|         return None |         return None | ||||||
| 
 | 
 | ||||||
|     # XXX: due to a variety of weird jitter bugs and "smearing" |     # XXX: due to a variety of weird jitter bugs and "smearing" | ||||||
|     # artifacts when click-drag panning and viewing history time series, |     # artifacts when click-drag panning and viewing history time | ||||||
|     # we offer this ctx-mngr interface to allow temporarily disabling |     # series, we offer this ctx-mngr interface to allow temporarily | ||||||
|     # Qt's graphics caching mode; this is now currently used from |     # disabling Qt's graphics caching mode; this is now currently | ||||||
|     # ``ChartView.start/signal_ic()`` methods which also disable the |     # used from ``ChartView.start/signal_ic()`` methods which also | ||||||
|     # rt-display loop when the user is moving around a view. |     # disable the rt-display loop when the user is moving around | ||||||
|  |     # a view. | ||||||
|     @cm |     @cm | ||||||
|     def reset_cache(self) -> None: |     def reset_cache(self) -> None: | ||||||
|         try: |         try: | ||||||
|             none = QGraphicsItem.NoCache |             none = px_cache_mode.NoCache | ||||||
|             log.debug( |             log.debug( | ||||||
|                 f'{self._name} -> CACHE DISABLE: {none}' |                 f'{self._name} -> CACHE DISABLE: {none}' | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|  | @ -36,9 +36,12 @@ from msgspec import ( | ||||||
|     field, |     field, | ||||||
| ) | ) | ||||||
| import numpy as np | import numpy as np | ||||||
|  | from numpy import ( | ||||||
|  |     ndarray, | ||||||
|  | ) | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| from PyQt5.QtCore import QLineF |  | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import QLineF | ||||||
| from ..data._sharedmem import ( | from ..data._sharedmem import ( | ||||||
|     ShmArray, |     ShmArray, | ||||||
| ) | ) | ||||||
|  | @ -49,7 +52,7 @@ from ..data._formatters import ( | ||||||
|     OHLCBarsAsCurveFmtr,  # OHLC converted to line |     OHLCBarsAsCurveFmtr,  # OHLC converted to line | ||||||
|     StepCurveFmtr,  # "step" curve (like for vlm) |     StepCurveFmtr,  # "step" curve (like for vlm) | ||||||
| ) | ) | ||||||
| from ..data._timeseries import ( | from ..tsp import ( | ||||||
|     slice_from_time, |     slice_from_time, | ||||||
| ) | ) | ||||||
| from ._ohlc import ( | from ._ohlc import ( | ||||||
|  | @ -82,10 +85,11 @@ def render_baritems( | ||||||
|     viz: Viz, |     viz: Viz, | ||||||
|     graphics: BarItems, |     graphics: BarItems, | ||||||
|     read: tuple[ |     read: tuple[ | ||||||
|         int, int, np.ndarray, |         int, int, ndarray, | ||||||
|         int, int, np.ndarray, |         int, int, ndarray, | ||||||
|     ], |     ], | ||||||
|     profiler: Profiler, |     profiler: Profiler, | ||||||
|  |     force_redraw: bool = False, | ||||||
|     **kwargs, |     **kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|  | @ -216,9 +220,11 @@ def render_baritems( | ||||||
|     viz._in_ds = should_line |     viz._in_ds = should_line | ||||||
| 
 | 
 | ||||||
|     should_redraw = ( |     should_redraw = ( | ||||||
|         changed_to_line |         force_redraw | ||||||
|  |         or changed_to_line | ||||||
|         or not should_line |         or not should_line | ||||||
|     ) |     ) | ||||||
|  |     # print(f'should_redraw: {should_redraw}') | ||||||
|     return ( |     return ( | ||||||
|         graphics, |         graphics, | ||||||
|         r, |         r, | ||||||
|  | @ -250,7 +256,7 @@ class ViewState(Struct): | ||||||
|     ] | None = None |     ] | None = None | ||||||
| 
 | 
 | ||||||
|     # last in view ``ShmArray.array[read_slc]`` data |     # last in view ``ShmArray.array[read_slc]`` data | ||||||
|     in_view: np.ndarray | None = None |     in_view: ndarray | None = None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class Viz(Struct): | class Viz(Struct): | ||||||
|  | @ -313,6 +319,7 @@ class Viz(Struct): | ||||||
|     _last_uppx: float = 0 |     _last_uppx: float = 0 | ||||||
|     _in_ds: bool = False |     _in_ds: bool = False | ||||||
|     _index_step: float | None = None |     _index_step: float | None = None | ||||||
|  |     _time_step: float | None = None | ||||||
| 
 | 
 | ||||||
|     # map from uppx -> (downsampled data, incremental graphics) |     # map from uppx -> (downsampled data, incremental graphics) | ||||||
|     _src_r: Renderer | None = None |     _src_r: Renderer | None = None | ||||||
|  | @ -359,7 +366,8 @@ class Viz(Struct): | ||||||
| 
 | 
 | ||||||
|     def index_step( |     def index_step( | ||||||
|         self, |         self, | ||||||
|         reset: bool = False, |         index_field: str | None = None, | ||||||
|  | 
 | ||||||
|     ) -> float: |     ) -> float: | ||||||
|         ''' |         ''' | ||||||
|         Return the size between sample steps in the units of the |         Return the size between sample steps in the units of the | ||||||
|  | @ -367,12 +375,17 @@ class Viz(Struct): | ||||||
|         epoch time in seconds. |         epoch time in seconds. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         # attempt to dectect the best step size by scanning a sample of |         # attempt to detect the best step size by scanning a sample | ||||||
|         # the source data. |         # of the source data. | ||||||
|         if self._index_step is None: |         if ( | ||||||
| 
 |             self._index_step is None | ||||||
|             index: np.ndarray = self.shm.array[self.index_field] |             or index_field is not None | ||||||
|             isample: np.ndarray = index[-16:] |         ): | ||||||
|  |             index: ndarray = self.shm.array[ | ||||||
|  |                 index_field | ||||||
|  |                 or self.index_field | ||||||
|  |             ] | ||||||
|  |             isample: ndarray = index[-16:] | ||||||
| 
 | 
 | ||||||
|             mxdiff: None | float = None |             mxdiff: None | float = None | ||||||
|             for step in np.diff(isample): |             for step in np.diff(isample): | ||||||
|  | @ -386,7 +399,15 @@ class Viz(Struct): | ||||||
|                         ) |                         ) | ||||||
|                     mxdiff = step |                     mxdiff = step | ||||||
| 
 | 
 | ||||||
|             self._index_step = max(mxdiff, 1) |             step: float = max(mxdiff, 1) | ||||||
|  | 
 | ||||||
|  |             # only SET the internal index step if an explicit | ||||||
|  |             # field name is NOT passed, since in such cases this | ||||||
|  |             # is likely just being called from `.time_step()`. | ||||||
|  |             if index_field is not None: | ||||||
|  |                 return step | ||||||
|  | 
 | ||||||
|  |             self._index_step = step | ||||||
|             if ( |             if ( | ||||||
|                 mxdiff < 1 |                 mxdiff < 1 | ||||||
|                 or 1 < mxdiff < 60 |                 or 1 < mxdiff < 60 | ||||||
|  | @ -397,6 +418,17 @@ class Viz(Struct): | ||||||
| 
 | 
 | ||||||
|         return self._index_step |         return self._index_step | ||||||
| 
 | 
 | ||||||
|  |     def time_step(self) -> float: | ||||||
|  |         ''' | ||||||
|  |         Attempt to determine the per-sample time-step period by  | ||||||
|  |         forcing an epoch-index and calling `.index_step()`. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         if self._time_step is None: | ||||||
|  |             self._time_step: float = self.index_step(index_field='time') | ||||||
|  | 
 | ||||||
|  |         return self._time_step | ||||||
|  | 
 | ||||||
|     def maxmin( |     def maxmin( | ||||||
|         self, |         self, | ||||||
| 
 | 
 | ||||||
|  | @ -404,6 +436,9 @@ class Viz(Struct): | ||||||
|         i_read_range: tuple[int, int] | None = None, |         i_read_range: tuple[int, int] | None = None, | ||||||
|         use_caching: bool = True, |         use_caching: bool = True, | ||||||
| 
 | 
 | ||||||
|  |         # XXX: internal debug | ||||||
|  |         _do_print: bool = False | ||||||
|  | 
 | ||||||
|     ) -> tuple[float, float] | None: |     ) -> tuple[float, float] | None: | ||||||
|         ''' |         ''' | ||||||
|         Compute the cached max and min y-range values for a given |         Compute the cached max and min y-range values for a given | ||||||
|  | @ -423,15 +458,14 @@ class Viz(Struct): | ||||||
|         if shm is None: |         if shm is None: | ||||||
|             return None |             return None | ||||||
| 
 | 
 | ||||||
|         do_print: bool = False |         arr: ndarray = shm.array | ||||||
|         arr = shm.array |  | ||||||
| 
 | 
 | ||||||
|         if i_read_range is not None: |         if i_read_range is not None: | ||||||
|             read_slc = slice(*i_read_range) |             read_slc = slice(*i_read_range) | ||||||
|             index = arr[read_slc][self.index_field] |             index: float | int = arr[read_slc][self.index_field] | ||||||
|             if not index.size: |             if not index.size: | ||||||
|                 return None |                 return None | ||||||
|             ixrng = (index[0], index[-1]) |             ixrng: tuple[int, int] = (index[0], index[-1]) | ||||||
| 
 | 
 | ||||||
|         else: |         else: | ||||||
|             if x_range is None: |             if x_range is None: | ||||||
|  | @ -449,15 +483,24 @@ class Viz(Struct): | ||||||
| 
 | 
 | ||||||
|             # TODO: hash the slice instead maybe? |             # TODO: hash the slice instead maybe? | ||||||
|             # https://stackoverflow.com/a/29980872 |             # https://stackoverflow.com/a/29980872 | ||||||
|             ixrng = lbar, rbar = round(x_range[0]), round(x_range[1]) |             ixrng = lbar, rbar = ( | ||||||
|  |                 round(x_range[0]), | ||||||
|  |                 round(x_range[1]), | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         if ( |         if ( | ||||||
|             use_caching |             use_caching | ||||||
|             and self._mxmn_cache_enabled |             and self._mxmn_cache_enabled | ||||||
|         ): |         ): | ||||||
|  |             # TODO: is there a way to ONLY clear ranges containing | ||||||
|  |             # a certain sub-range? | ||||||
|  |             # -[ ] currently we have a problem where a previously | ||||||
|  |             #    cached mxmn will persist even if the viz is "hard | ||||||
|  |             #    re-rendered" (usually bc underlying data was | ||||||
|  |             #    corrected) | ||||||
|             cached_result = self._mxmns.get(ixrng) |             cached_result = self._mxmns.get(ixrng) | ||||||
|             if cached_result: |             if cached_result: | ||||||
|                 if do_print: |                 if _do_print: | ||||||
|                     print( |                     print( | ||||||
|                         f'{self.name} CACHED maxmin\n' |                         f'{self.name} CACHED maxmin\n' | ||||||
|                         f'{ixrng} -> {cached_result}' |                         f'{ixrng} -> {cached_result}' | ||||||
|  | @ -487,7 +530,7 @@ class Viz(Struct): | ||||||
|                     (rbar - ifirst) + 1 |                     (rbar - ifirst) + 1 | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|         slice_view = arr[read_slc] |         slice_view: ndarray = arr[read_slc] | ||||||
| 
 | 
 | ||||||
|         if not slice_view.size: |         if not slice_view.size: | ||||||
|             log.warning( |             log.warning( | ||||||
|  | @ -498,7 +541,7 @@ class Viz(Struct): | ||||||
| 
 | 
 | ||||||
|         elif self.ds_yrange: |         elif self.ds_yrange: | ||||||
|             mxmn = self.ds_yrange |             mxmn = self.ds_yrange | ||||||
|             if do_print: |             if _do_print: | ||||||
|                 print( |                 print( | ||||||
|                     f'{self.name} M4 maxmin:\n' |                     f'{self.name} M4 maxmin:\n' | ||||||
|                     f'{ixrng} -> {mxmn}' |                     f'{ixrng} -> {mxmn}' | ||||||
|  | @ -515,7 +558,7 @@ class Viz(Struct): | ||||||
| 
 | 
 | ||||||
|             mxmn = ylow, yhigh |             mxmn = ylow, yhigh | ||||||
|             if ( |             if ( | ||||||
|                 do_print |                 _do_print | ||||||
|             ): |             ): | ||||||
|                 s = 3 |                 s = 3 | ||||||
|                 print( |                 print( | ||||||
|  | @ -529,14 +572,23 @@ class Viz(Struct): | ||||||
| 
 | 
 | ||||||
|         # cache result for input range |         # cache result for input range | ||||||
|         ylow, yhi = mxmn |         ylow, yhi = mxmn | ||||||
|  |         diff: float = yhi - ylow | ||||||
|  | 
 | ||||||
|  |         # order-of-magnitude check | ||||||
|  |         # TODO: really we should be checking the hi or low | ||||||
|  |         # against the previous sample to catch stuff like, | ||||||
|  |         # - rando stock (reverse-)split | ||||||
|  |         # - null-segments written by some prior | ||||||
|  |         #   crash-during-backfil | ||||||
|  |         if diff > 0: | ||||||
|  |             omg: float = abs(logf(diff, 10)) | ||||||
|  |         else: | ||||||
|  |             omg: float = 0 | ||||||
| 
 | 
 | ||||||
|         try: |         try: | ||||||
|             prolly_anomaly: bool = ( |             prolly_anomaly: bool = ( | ||||||
|                 ( |                 # diff == 0 | ||||||
|                     abs(logf(ylow, 10)) > 16 |                 (ylow and omg > 10) | ||||||
|                     if ylow |  | ||||||
|                     else False |  | ||||||
|                 ) |  | ||||||
|                 or ( |                 or ( | ||||||
|                     isnan(ylow) or isnan(yhi) |                     isnan(ylow) or isnan(yhi) | ||||||
|                 ) |                 ) | ||||||
|  | @ -563,7 +615,8 @@ class Viz(Struct): | ||||||
| 
 | 
 | ||||||
|     def view_range(self) -> tuple[int, int]: |     def view_range(self) -> tuple[int, int]: | ||||||
|         ''' |         ''' | ||||||
|         Return the start and stop x-indexes for the managed ``ViewBox``. |         Return the start and stop x-indexes for the managed | ||||||
|  |         ``ViewBox``. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         vr = self.plot.viewRect() |         vr = self.plot.viewRect() | ||||||
|  | @ -576,7 +629,7 @@ class Viz(Struct): | ||||||
|         self, |         self, | ||||||
|         view_range: None | tuple[float, float] = None, |         view_range: None | tuple[float, float] = None, | ||||||
|         index_field: str | None = None, |         index_field: str | None = None, | ||||||
|         array: np.ndarray | None = None, |         array: ndarray | None = None, | ||||||
| 
 | 
 | ||||||
|     ) -> tuple[ |     ) -> tuple[ | ||||||
|         int, int, int, int, int, int |         int, int, int, int, int, int | ||||||
|  | @ -647,8 +700,8 @@ class Viz(Struct): | ||||||
|         profiler: None | Profiler = None, |         profiler: None | Profiler = None, | ||||||
| 
 | 
 | ||||||
|     ) -> tuple[ |     ) -> tuple[ | ||||||
|         int, int, np.ndarray, |         int, int, ndarray, | ||||||
|         int, int, np.ndarray, |         int, int, ndarray, | ||||||
|     ]: |     ]: | ||||||
|         ''' |         ''' | ||||||
|         Read the underlying shm array buffer and |         Read the underlying shm array buffer and | ||||||
|  | @ -818,6 +871,10 @@ class Viz(Struct): | ||||||
|                 graphics, |                 graphics, | ||||||
|                 read, |                 read, | ||||||
|                 profiler, |                 profiler, | ||||||
|  | 
 | ||||||
|  |                 # NOTE: only set when caller says to | ||||||
|  |                 force_redraw=should_redraw, | ||||||
|  | 
 | ||||||
|                 **kwargs, |                 **kwargs, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|  | @ -980,6 +1037,39 @@ class Viz(Struct): | ||||||
|             graphics, |             graphics, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |     def reset_graphics( | ||||||
|  |         self, | ||||||
|  | 
 | ||||||
|  |         # TODO: allow only resetting within some x-domain range? | ||||||
|  |         # ixrng: tuple[int, int] | None = None, | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  |         ''' | ||||||
|  |         Hard reset all graphics (rendering) layers for this | ||||||
|  |         data viz including clearing the mxmn auto-y-range | ||||||
|  |         cache. | ||||||
|  | 
 | ||||||
|  |         Normally called when the underlying data set is modified | ||||||
|  |         (probably by some `.tsp` correcting/editing routine) and | ||||||
|  |         the (now cached) graphics need to be fully re-rendered from | ||||||
|  |         source. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         log.warning( | ||||||
|  |             f'Forcing hard Viz graphihcs RESET:\n' | ||||||
|  |             f'.name: {self.name}\n' | ||||||
|  |             f'.index_field: {self.index_field}\n' | ||||||
|  |             f'.index_step(): {self.index_step()}\n' | ||||||
|  |             f'.time_step(): {self.time_step()}\n' | ||||||
|  |         ) | ||||||
|  |         # XXX: always clear the mxn y-range cache | ||||||
|  |         # to avoid old data (anomalies) from being | ||||||
|  |         # retained in auto-yrange output. | ||||||
|  |         self._mxmn_cache_enabled = False | ||||||
|  |         self._mxmns.clear() | ||||||
|  |         self.update_graphics(force_redraw=True) | ||||||
|  |         self._mxmn_cache_enabled = True | ||||||
|  | 
 | ||||||
|     def draw_last( |     def draw_last( | ||||||
|         self, |         self, | ||||||
|         array_key: str | None = None, |         array_key: str | None = None, | ||||||
|  | @ -1072,7 +1162,7 @@ class Viz(Struct): | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         shm: ShmArray = self.shm |         shm: ShmArray = self.shm | ||||||
|         array: np.ndarray = shm.array |         array: ndarray = shm.array | ||||||
|         view: ChartView = self.plot.vb |         view: ChartView = self.plot.vb | ||||||
|         ( |         ( | ||||||
|             vl, |             vl, | ||||||
|  |  | ||||||
|  | @ -57,6 +57,7 @@ from piker.toolz import ( | ||||||
|     Profiler, |     Profiler, | ||||||
| ) | ) | ||||||
| from piker.log import get_logger | from piker.log import get_logger | ||||||
|  | from piker import config | ||||||
| # from ..data._source import tf_in_1s | # from ..data._source import tf_in_1s | ||||||
| from ._axes import YAxisLabel | from ._axes import YAxisLabel | ||||||
| from ._chart import ( | from ._chart import ( | ||||||
|  | @ -210,9 +211,9 @@ async def increment_history_view( | ||||||
| ): | ): | ||||||
|     hist_chart: ChartPlotWidget = ds.hist_chart |     hist_chart: ChartPlotWidget = ds.hist_chart | ||||||
|     hist_viz: Viz = ds.hist_viz |     hist_viz: Viz = ds.hist_viz | ||||||
|     viz: Viz = ds.viz |     # viz: Viz = ds.viz | ||||||
|     assert 'hist' in hist_viz.shm.token['shm_name'] |     assert 'hist' in hist_viz.shm.token['shm_name'] | ||||||
|     name: str = hist_viz.name |     # name: str = hist_viz.name | ||||||
| 
 | 
 | ||||||
|     # TODO: seems this is more reliable at keeping the slow |     # TODO: seems this is more reliable at keeping the slow | ||||||
|     # chart incremented in view more correctly? |     # chart incremented in view more correctly? | ||||||
|  | @ -225,7 +226,8 @@ async def increment_history_view( | ||||||
|     # draw everything from scratch on first entry! |     # draw everything from scratch on first entry! | ||||||
|     for curve_name, hist_viz in hist_chart._vizs.items(): |     for curve_name, hist_viz in hist_chart._vizs.items(): | ||||||
|         log.info(f'Forcing hard redraw -> {curve_name}') |         log.info(f'Forcing hard redraw -> {curve_name}') | ||||||
|         hist_viz.update_graphics(force_redraw=True) |         hist_viz.reset_graphics() | ||||||
|  |         # hist_viz.update_graphics(force_redraw=True) | ||||||
| 
 | 
 | ||||||
|     async with open_sample_stream(1.) as min_istream: |     async with open_sample_stream(1.) as min_istream: | ||||||
|         async for msg in min_istream: |         async for msg in min_istream: | ||||||
|  | @ -248,17 +250,27 @@ async def increment_history_view( | ||||||
|             # - samplerd could emit the actual update range via |             # - samplerd could emit the actual update range via | ||||||
|             #   tuple and then we only enter the below block if that |             #   tuple and then we only enter the below block if that | ||||||
|             #   range is detected as in-view? |             #   range is detected as in-view? | ||||||
|             if ( |             # match msg: | ||||||
|                 (bf_wut := msg.get('backfilling', False)) |             #     case { | ||||||
|             ): |             #         'backfilling': (viz_name, timeframe), | ||||||
|                 viz_name, timeframe = bf_wut |             #     } if ( | ||||||
|                 if viz_name == name: |             #         viz_name == name | ||||||
|                     log.info(f'Forcing hard redraw -> {name}@{timeframe}') |             #     ): | ||||||
|                     match timeframe: |             #         log.warning( | ||||||
|                         case 60: |             #             f'Forcing HARD REDRAW:\n' | ||||||
|                             hist_viz.update_graphics(force_redraw=True) |             #             f'name: {name}\n' | ||||||
|                         case 1: |             #             f'timeframe: {timeframe}\n' | ||||||
|                             viz.update_graphics(force_redraw=True) |             #         ) | ||||||
|  |             #         # TODO: only allow this when the data is IN VIEW! | ||||||
|  |             #         # also, we probably can do this more efficiently | ||||||
|  |             #         # / smarter by only redrawing the portion of the | ||||||
|  |             #         # path necessary? | ||||||
|  |             #         { | ||||||
|  |             #             60: hist_viz, | ||||||
|  |             #             1: viz, | ||||||
|  |             #         }[timeframe].update_graphics( | ||||||
|  |             #             force_redraw=True | ||||||
|  |             #         ) | ||||||
| 
 | 
 | ||||||
|             # check if slow chart needs an x-domain shift and/or |             # check if slow chart needs an x-domain shift and/or | ||||||
|             # y-range resize. |             # y-range resize. | ||||||
|  | @ -299,6 +311,7 @@ async def increment_history_view( | ||||||
| 
 | 
 | ||||||
| async def graphics_update_loop( | async def graphics_update_loop( | ||||||
| 
 | 
 | ||||||
|  |     dss: dict[str, DisplayState], | ||||||
|     nurse: trio.Nursery, |     nurse: trio.Nursery, | ||||||
|     godwidget: GodWidget, |     godwidget: GodWidget, | ||||||
|     feed: Feed, |     feed: Feed, | ||||||
|  | @ -340,8 +353,6 @@ async def graphics_update_loop( | ||||||
|         'i_last_slow_t':  0,  # multiview-global slow (1m) step index |         'i_last_slow_t':  0,  # multiview-global slow (1m) step index | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     dss: dict[str, DisplayState] = {} |  | ||||||
| 
 |  | ||||||
|     for fqme, flume in feed.flumes.items(): |     for fqme, flume in feed.flumes.items(): | ||||||
|         ohlcv = flume.rt_shm |         ohlcv = flume.rt_shm | ||||||
|         hist_ohlcv = flume.hist_shm |         hist_ohlcv = flume.hist_shm | ||||||
|  | @ -460,10 +471,18 @@ async def graphics_update_loop( | ||||||
|         if ds.hist_vars['i_last'] < ds.hist_vars['i_last_append']: |         if ds.hist_vars['i_last'] < ds.hist_vars['i_last_append']: | ||||||
|             await tractor.pause() |             await tractor.pause() | ||||||
| 
 | 
 | ||||||
|  |     # try: | ||||||
|  | 
 | ||||||
|  |     # XXX TODO: we need to do _dss UPDATE here so that when | ||||||
|  |     # a feed-view is switched you can still remote annotate the | ||||||
|  |     # prior view.. | ||||||
|  |     from . import _remote_ctl | ||||||
|  |     _remote_ctl._dss.update(dss) | ||||||
|  | 
 | ||||||
|     # main real-time quotes update loop |     # main real-time quotes update loop | ||||||
|     stream: tractor.MsgStream |     stream: tractor.MsgStream | ||||||
|     async with feed.open_multi_stream() as stream: |     async with feed.open_multi_stream() as stream: | ||||||
|         assert stream |         # assert stream | ||||||
|         async for quotes in stream: |         async for quotes in stream: | ||||||
|             quote_period = time.time() - last_quote_s |             quote_period = time.time() - last_quote_s | ||||||
|             quote_rate = round( |             quote_rate = round( | ||||||
|  | @ -479,7 +498,7 @@ async def graphics_update_loop( | ||||||
|                 pass |                 pass | ||||||
|                 # log.warning(f'High quote rate {mkt.fqme}: {quote_rate}') |                 # log.warning(f'High quote rate {mkt.fqme}: {quote_rate}') | ||||||
| 
 | 
 | ||||||
|             last_quote_s = time.time() |             last_quote_s: float = time.time() | ||||||
| 
 | 
 | ||||||
|             for fqme, quote in quotes.items(): |             for fqme, quote in quotes.items(): | ||||||
|                 ds = dss[fqme] |                 ds = dss[fqme] | ||||||
|  | @ -509,6 +528,12 @@ async def graphics_update_loop( | ||||||
|                     quote, |                     quote, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|  |     # finally: | ||||||
|  |     #     # XXX: cancel any remote annotation control ctxs | ||||||
|  |     #     _remote_ctl._dss = None | ||||||
|  |     #     for cid, (ctx, aids) in _remote_ctl._ctxs.items(): | ||||||
|  |     #         await ctx.cancel() | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| def graphics_update_cycle( | def graphics_update_cycle( | ||||||
|     ds: DisplayState, |     ds: DisplayState, | ||||||
|  | @ -1207,6 +1232,8 @@ async def link_views_with_region( | ||||||
|     # region.sigRegionChangeFinished.connect(update_pi_from_region) |     # region.sigRegionChangeFinished.connect(update_pi_from_region) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # NOTE: default is set to 60 FPS until the runtime delivers the | ||||||
|  | # discoverd hw value below. | ||||||
| _quote_throttle_rate: int = 60 - 6 | _quote_throttle_rate: int = 60 - 6 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -1225,7 +1252,7 @@ async def display_symbol_data( | ||||||
|     fast from a cached watch-list. |     fast from a cached watch-list. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     sbar = godwidget.window.status_bar |     # sbar = godwidget.window.status_bar | ||||||
|     # historical data fetch |     # historical data fetch | ||||||
|     # brokermod = brokers.get_brokermod(provider) |     # brokermod = brokers.get_brokermod(provider) | ||||||
| 
 | 
 | ||||||
|  | @ -1235,11 +1262,11 @@ async def display_symbol_data( | ||||||
|     #     group_key=loading_sym_key, |     #     group_key=loading_sym_key, | ||||||
|     # ) |     # ) | ||||||
| 
 | 
 | ||||||
|     for fqme in fqmes: |     # for fqme in fqmes: | ||||||
|         loading_sym_key = sbar.open_status( |     #     loading_sym_key = sbar.open_status( | ||||||
|             f'loading {fqme} ->', |     #         f'loading {fqme} ->', | ||||||
|             group_key=True |     #         group_key=True | ||||||
|         ) |     #     ) | ||||||
| 
 | 
 | ||||||
|     # (TODO: make this not so shit XD) |     # (TODO: make this not so shit XD) | ||||||
|     # close group status once a symbol feed fully loads to view. |     # close group status once a symbol feed fully loads to view. | ||||||
|  | @ -1248,26 +1275,54 @@ async def display_symbol_data( | ||||||
|     # TODO: ctl over update loop's maximum frequency. |     # TODO: ctl over update loop's maximum frequency. | ||||||
|     # - load this from a config.toml! |     # - load this from a config.toml! | ||||||
|     # - allow dyanmic configuration from chart UI? |     # - allow dyanmic configuration from chart UI? | ||||||
|  |     ( | ||||||
|  |         conf, | ||||||
|  |         path, | ||||||
|  |     ) = config.load() | ||||||
|  |     ui_conf: dict = conf['ui'] | ||||||
|  | 
 | ||||||
|     global _quote_throttle_rate |     global _quote_throttle_rate | ||||||
|     from ._window import main_window |     from ._window import main_window | ||||||
|     display_rate = main_window().current_screen().refreshRate() | 
 | ||||||
|     _quote_throttle_rate = floor(display_rate) - 6 |     display_rate: int = floor( | ||||||
|  |         main_window().current_screen().refreshRate() | ||||||
|  |     ) - 6 | ||||||
|  | 
 | ||||||
|  |     mx_redraw_rate: int = ui_conf.get( | ||||||
|  |         'max_redraw_rate', | ||||||
|  |         _quote_throttle_rate, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     if mx_redraw_rate < display_rate: | ||||||
|  |         log.info( | ||||||
|  |             'Down-throttling redraw rate to config setting\n' | ||||||
|  |             f'display FPS: {display_rate}\n' | ||||||
|  |             'max_redraw_rate: {max_redraw_rate}\n' | ||||||
|  |         ) | ||||||
|  |     else: | ||||||
|  |         _quote_throttle_rate = display_rate | ||||||
| 
 | 
 | ||||||
|     # TODO: we should be able to increase this if we use some |     # TODO: we should be able to increase this if we use some | ||||||
|     # `mypyc` speedups elsewhere? 22ish seems to be the sweet |     # `mypyc` speedups elsewhere? 22ish seems to be the sweet | ||||||
|     # spot for single-feed chart. |     # spot for single-feed chart. | ||||||
|     num_of_feeds = len(fqmes) |     num_of_feeds = len(fqmes) | ||||||
|     mx: int = 22 |     # if num_of_feeds > 1: | ||||||
|     if num_of_feeds > 1: | 
 | ||||||
|     # there will be more ctx switches with more than 1 feed so we |     # there will be more ctx switches with more than 1 feed so we | ||||||
|     # max throttle down a bit more. |     # max throttle down a bit more. | ||||||
|         mx = 16 |     mx_per_feed: int = ( | ||||||
|  |         ui_conf.get( | ||||||
|  |         'per_feed_redraw_rate', | ||||||
|  |         mx_redraw_rate, | ||||||
|  |         ) | ||||||
|  |         or 16 | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     # limit to at least display's FPS |     # limit to at least display's FPS | ||||||
|     # avoiding needless Qt-in-guest-mode context switches |     # avoiding needless Qt-in-guest-mode context switches | ||||||
|     cycles_per_feed = min( |     cycles_per_feed = min( | ||||||
|         round(_quote_throttle_rate/num_of_feeds), |         round(_quote_throttle_rate/num_of_feeds), | ||||||
|         mx, |         mx_per_feed, | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     feed: Feed |     feed: Feed | ||||||
|  | @ -1412,7 +1467,7 @@ async def display_symbol_data( | ||||||
|                 start_fsp_displays, |                 start_fsp_displays, | ||||||
|                 rt_linked, |                 rt_linked, | ||||||
|                 flume, |                 flume, | ||||||
|                 loading_sym_key, |                 # loading_sym_key, | ||||||
|                 loglevel, |                 loglevel, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|  | @ -1531,8 +1586,10 @@ async def display_symbol_data( | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             # start update loop task |             # start update loop task | ||||||
|  |             dss: dict[str, DisplayState] = {} | ||||||
|             ln.start_soon( |             ln.start_soon( | ||||||
|                 graphics_update_loop, |                 graphics_update_loop, | ||||||
|  |                 dss, | ||||||
|                 ln, |                 ln, | ||||||
|                 godwidget, |                 godwidget, | ||||||
|                 feed, |                 feed, | ||||||
|  | @ -1546,15 +1603,31 @@ async def display_symbol_data( | ||||||
|             order_ctl_fqme: str = fqmes[0] |             order_ctl_fqme: str = fqmes[0] | ||||||
|             mode: OrderMode |             mode: OrderMode | ||||||
|             async with ( |             async with ( | ||||||
|  | 
 | ||||||
|                 open_order_mode( |                 open_order_mode( | ||||||
|                     feed, |                     feed, | ||||||
|                     godwidget, |                     godwidget, | ||||||
|                     order_ctl_fqme, |                     order_ctl_fqme, | ||||||
|                     order_mode_started, |                     order_mode_started, | ||||||
|                     loglevel=loglevel |                     loglevel=loglevel | ||||||
|                 ) as mode |                 ) as mode, | ||||||
|             ): |  | ||||||
| 
 | 
 | ||||||
|  |                 # TODO: maybe have these startup sooner before | ||||||
|  |                 # order mode fully boots? but we gotta, | ||||||
|  |                 # -[ ] decouple the order mode bindings until | ||||||
|  |                 #    the mode has fully booted.. | ||||||
|  |                 #    -[ ] maybe do an Event to sync? | ||||||
|  | 
 | ||||||
|  |                 # start input handling for ``ChartView`` input | ||||||
|  |                 # (i.e. kb + mouse handling loops) | ||||||
|  |                 rt_chart.view.open_async_input_handler( | ||||||
|  |                     dss=dss, | ||||||
|  |                 ), | ||||||
|  |                 hist_chart.view.open_async_input_handler( | ||||||
|  |                     dss=dss, | ||||||
|  |                 ), | ||||||
|  | 
 | ||||||
|  |             ): | ||||||
|                 rt_linked.mode = mode |                 rt_linked.mode = mode | ||||||
| 
 | 
 | ||||||
|                 rt_viz = rt_chart.get_viz(order_ctl_fqme) |                 rt_viz = rt_chart.get_viz(order_ctl_fqme) | ||||||
|  |  | ||||||
|  | @ -21,7 +21,8 @@ Higher level annotation editors. | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
| from typing import ( | from typing import ( | ||||||
|     TYPE_CHECKING |     Sequence, | ||||||
|  |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
|  | @ -31,24 +32,34 @@ from pyqtgraph import ( | ||||||
|     QtCore, |     QtCore, | ||||||
|     QtWidgets, |     QtWidgets, | ||||||
| ) | ) | ||||||
| from PyQt5.QtGui import ( |  | ||||||
|     QColor, |  | ||||||
| ) |  | ||||||
| from PyQt5.QtWidgets import ( |  | ||||||
|     QLabel, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
| from pyqtgraph import functions as fn | from pyqtgraph import functions as fn | ||||||
| from PyQt5.QtCore import QPointF |  | ||||||
| import numpy as np | import numpy as np | ||||||
| 
 | 
 | ||||||
| from piker.types import Struct | from piker.types import Struct | ||||||
| from ._style import hcolor, _font | from piker.ui.qt import ( | ||||||
|  |     Qt, | ||||||
|  |     QPointF, | ||||||
|  |     QRectF, | ||||||
|  |     QGraphicsProxyWidget, | ||||||
|  |     QGraphicsScene, | ||||||
|  |     QLabel, | ||||||
|  |     QColor, | ||||||
|  |     QTransform, | ||||||
|  | ) | ||||||
|  | from ._style import ( | ||||||
|  |     hcolor, | ||||||
|  |     _font, | ||||||
|  | ) | ||||||
| from ._lines import LevelLine | from ._lines import LevelLine | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ._chart import GodWidget |     from ._chart import ( | ||||||
|  |         GodWidget, | ||||||
|  |         ChartPlotWidget, | ||||||
|  |     ) | ||||||
|  |     from ._interaction import ChartView | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
|  | @ -65,7 +76,7 @@ class ArrowEditor(Struct): | ||||||
|         uid: str, |         uid: str, | ||||||
|         x: float, |         x: float, | ||||||
|         y: float, |         y: float, | ||||||
|         color='default', |         color: str = 'default', | ||||||
|         pointing: str | None = None, |         pointing: str | None = None, | ||||||
| 
 | 
 | ||||||
|     ) -> pg.ArrowItem: |     ) -> pg.ArrowItem: | ||||||
|  | @ -251,43 +262,75 @@ class LineEditor(Struct): | ||||||
|         return lines |         return lines | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class SelectRect(QtWidgets.QGraphicsRectItem): | def as_point( | ||||||
|  |     pair: Sequence[float, float] | QPointF, | ||||||
|  | ) -> list[QPointF, QPointF]: | ||||||
|  |     ''' | ||||||
|  |     Case any input tuple of floats to a a list of `QPoint` objects | ||||||
|  |     for use in Qt geometry routines. | ||||||
| 
 | 
 | ||||||
|  |     ''' | ||||||
|  |     if isinstance(pair, QPointF): | ||||||
|  |         return pair | ||||||
|  | 
 | ||||||
|  |     return QPointF(pair[0], pair[1]) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO: maybe implement better, something something RectItemProxy?? | ||||||
|  | # -[ ] dig into details of how proxy's work? | ||||||
|  | #    https://doc.qt.io/qt-5/qgraphicsscene.html#addWidget | ||||||
|  | # -[ ] consider using `.addRect()` maybe? | ||||||
|  | 
 | ||||||
|  | class SelectRect(QtWidgets.QGraphicsRectItem): | ||||||
|  |     ''' | ||||||
|  |     A data-view "selection rectangle": the most fundamental | ||||||
|  |     geometry for annotating data views. | ||||||
|  | 
 | ||||||
|  |     - https://doc.qt.io/qt-5/qgraphicsrectitem.html | ||||||
|  |     - https://doc.qt.io/qt-6/qgraphicsrectitem.html | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         viewbox: ViewBox, |         viewbox: ViewBox, | ||||||
|         color: str = 'dad_blue', |         color: str | None = None, | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         super().__init__(0, 0, 1, 1) |         super().__init__(0, 0, 1, 1) | ||||||
| 
 | 
 | ||||||
|         # self.rbScaleBox = QGraphicsRectItem(0, 0, 1, 1) |         # self.rbScaleBox = QGraphicsRectItem(0, 0, 1, 1) | ||||||
|         self.vb = viewbox |         self.vb: ViewBox = viewbox | ||||||
|         self._chart: 'ChartPlotWidget' = None  # noqa |  | ||||||
| 
 | 
 | ||||||
|         # override selection box color |         self._chart: ChartPlotWidget | None = None  # noqa | ||||||
|  | 
 | ||||||
|  |         # TODO: maybe allow this to be dynamic via a method? | ||||||
|  |         #l override selection box color | ||||||
|  |         color: str = color or 'dad_blue' | ||||||
|         color = QColor(hcolor(color)) |         color = QColor(hcolor(color)) | ||||||
|  | 
 | ||||||
|         self.setPen(fn.mkPen(color, width=1)) |         self.setPen(fn.mkPen(color, width=1)) | ||||||
|         color.setAlpha(66) |         color.setAlpha(66) | ||||||
|         self.setBrush(fn.mkBrush(color)) |         self.setBrush(fn.mkBrush(color)) | ||||||
|         self.setZValue(1e9) |         self.setZValue(1e9) | ||||||
|         self.hide() |  | ||||||
|         self._label = None |  | ||||||
| 
 | 
 | ||||||
|         label = self._label = QLabel() |         label = self._label = QLabel() | ||||||
|         label.setTextFormat(0)  # markdown |         label.setTextFormat( | ||||||
|  |             Qt.TextFormat.MarkdownText | ||||||
|  |         ) | ||||||
|         label.setFont(_font.font) |         label.setFont(_font.font) | ||||||
|         label.setMargin(0) |         label.setMargin(0) | ||||||
|         label.setAlignment( |         label.setAlignment( | ||||||
|             QtCore.Qt.AlignLeft |             QtCore.Qt.AlignLeft | ||||||
|             # | QtCore.Qt.AlignVCenter |             # | QtCore.Qt.AlignVCenter | ||||||
|         ) |         ) | ||||||
|  |         label.hide()  # always right after init | ||||||
| 
 | 
 | ||||||
|         # proxy is created after containing scene is initialized |         # proxy is created after containing scene is initialized | ||||||
|         self._label_proxy = None |         self._label_proxy: QGraphicsProxyWidget | None = None | ||||||
|         self._abs_top_right = None |         self._abs_top_right: Point | None = None | ||||||
| 
 | 
 | ||||||
|         # TODO: "swing %" might be handy here (data's max/min # % change) |         # TODO: "swing %" might be handy here (data's max/min | ||||||
|         self._contents = [ |         # # % change)? | ||||||
|  |         self._contents: list[str] = [ | ||||||
|             'change: {pchng:.2f} %', |             'change: {pchng:.2f} %', | ||||||
|             'range: {rng:.2f}', |             'range: {rng:.2f}', | ||||||
|             'bars: {nbars}', |             'bars: {nbars}', | ||||||
|  | @ -297,12 +340,31 @@ class SelectRect(QtWidgets.QGraphicsRectItem): | ||||||
|             'sigma: {std:.2f}', |             'sigma: {std:.2f}', | ||||||
|         ] |         ] | ||||||
| 
 | 
 | ||||||
|  |         self.add_to_view(viewbox) | ||||||
|  |         self.hide() | ||||||
|  | 
 | ||||||
|  |     def add_to_view( | ||||||
|  |         self, | ||||||
|  |         view: ChartView, | ||||||
|  |     ) -> None: | ||||||
|  |         ''' | ||||||
|  |         Self-defined view hookup impl which will | ||||||
|  |         also re-assign the internal ref. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         view.addItem( | ||||||
|  |             self, | ||||||
|  |             ignoreBounds=True, | ||||||
|  |         ) | ||||||
|  |         if self.vb is not view: | ||||||
|  |             self.vb = view | ||||||
|  | 
 | ||||||
|     @property |     @property | ||||||
|     def chart(self) -> 'ChartPlotWidget':  # noqa |     def chart(self) -> ChartPlotWidget:  # noqa | ||||||
|         return self._chart |         return self._chart | ||||||
| 
 | 
 | ||||||
|     @chart.setter |     @chart.setter | ||||||
|     def chart(self, chart: 'ChartPlotWidget') -> None:  # noqa |     def chart(self, chart: ChartPlotWidget) -> None:  # noqa | ||||||
|         self._chart = chart |         self._chart = chart | ||||||
|         chart.sigRangeChanged.connect(self.update_on_resize) |         chart.sigRangeChanged.connect(self.update_on_resize) | ||||||
|         palette = self._label.palette() |         palette = self._label.palette() | ||||||
|  | @ -315,57 +377,155 @@ class SelectRect(QtWidgets.QGraphicsRectItem): | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     def update_on_resize(self, vr, r): |     def update_on_resize(self, vr, r): | ||||||
|         """Re-position measure label on view range change. |         ''' | ||||||
|  |         Re-position measure label on view range change. | ||||||
| 
 | 
 | ||||||
|         """ |         ''' | ||||||
|         if self._abs_top_right: |         if self._abs_top_right: | ||||||
|             self._label_proxy.setPos( |             self._label_proxy.setPos( | ||||||
|                 self.vb.mapFromView(self._abs_top_right) |                 self.vb.mapFromView(self._abs_top_right) | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|     def mouse_drag_released( |     def set_scen_pos( | ||||||
|         self, |         self, | ||||||
|         p1: QPointF, |         scen_p1: QPointF, | ||||||
|         p2: QPointF |         scen_p2: QPointF, | ||||||
|  | 
 | ||||||
|  |         update_label: bool = True, | ||||||
|  | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         """Called on final button release for mouse drag with start and |         ''' | ||||||
|         end positions. |         Set position from scene coords of selection rect (normally | ||||||
|  |         from mouse position) and accompanying label, move label to | ||||||
|  |         match. | ||||||
| 
 | 
 | ||||||
|         """ |         ''' | ||||||
|         self.set_pos(p1, p2) |         # NOTE XXX: apparently just setting it doesn't work!? | ||||||
|  |         # i have no idea why but it's pretty weird we have to do | ||||||
|  |         # this transform thing which was basically pulled verbatim | ||||||
|  |         # from the `pg.ViewBox.updateScaleBox()` method. | ||||||
|  |         view_rect: QRectF = self.vb.childGroup.mapRectFromScene( | ||||||
|  |             QRectF( | ||||||
|  |                 scen_p1,  | ||||||
|  |                 scen_p2, | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|  |         self.setPos(view_rect.topLeft()) | ||||||
|  |         # XXX: does not work..!?!? | ||||||
|  |         # https://doc.qt.io/qt-5/qgraphicsrectitem.html#setRect | ||||||
|  |         # self.setRect(view_rect) | ||||||
| 
 | 
 | ||||||
|     def set_pos( |         tr = QTransform.fromScale( | ||||||
|         self, |             view_rect.width(), | ||||||
|         p1: QPointF, |             view_rect.height(), | ||||||
|         p2: QPointF |         ) | ||||||
|     ) -> None: |         self.setTransform(tr) | ||||||
|         """Set position of selection rect and accompanying label, move |  | ||||||
|         label to match. |  | ||||||
| 
 | 
 | ||||||
|         """ |         # XXX: never got this working, was always offset | ||||||
|         if self._label_proxy is None: |         # / transformed completely wrong (and off to the far right | ||||||
|             # https://doc.qt.io/qt-5/qgraphicsproxywidget.html |         # from the cursor?) | ||||||
|             self._label_proxy = self.vb.scene().addWidget(self._label) |         # self.set_view_pos( | ||||||
| 
 |         #     view_rect=view_rect, | ||||||
|         start_pos = self.vb.mapToView(p1) |         #     # self.vwqpToView(p1), | ||||||
|         end_pos = self.vb.mapToView(p2) |         #     # self.vb.mapToView(p2), | ||||||
| 
 |         #     # start_pos=self.vb.mapToScene(p1), | ||||||
|         # map to view coords and update area |         #     # end_pos=self.vb.mapToScene(p2), | ||||||
|         r = QtCore.QRectF(start_pos, end_pos) |         # ) | ||||||
| 
 |  | ||||||
|         # old way; don't need right? |  | ||||||
|         # lr = QtCore.QRectF(p1, p2) |  | ||||||
|         # r = self.vb.childGroup.mapRectFromParent(lr) |  | ||||||
| 
 |  | ||||||
|         self.setPos(r.topLeft()) |  | ||||||
|         self.resetTransform() |  | ||||||
|         self.setRect(r) |  | ||||||
|         self.show() |         self.show() | ||||||
| 
 | 
 | ||||||
|         y1, y2 = start_pos.y(), end_pos.y() |         if update_label: | ||||||
|         x1, x2 = start_pos.x(), end_pos.x() |             self.init_label(view_rect) | ||||||
| 
 | 
 | ||||||
|         # TODO: heh, could probably use a max-min streamin algo here too |     def set_view_pos( | ||||||
|  |         self, | ||||||
|  | 
 | ||||||
|  |         start_pos: QPointF | Sequence[float, float] | None = None, | ||||||
|  |         end_pos: QPointF | Sequence[float, float] | None = None, | ||||||
|  |         view_rect: QRectF | None = None, | ||||||
|  | 
 | ||||||
|  |         update_label: bool = True, | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  |         ''' | ||||||
|  |         Set position from `ViewBox` coords (i.e. from the actual | ||||||
|  |         data domain) of rect (and any accompanying label which is | ||||||
|  |         moved to match). | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         if self._chart is None: | ||||||
|  |             raise RuntimeError( | ||||||
|  |                 'You MUST assign a `SelectRect.chart: ChartPlotWidget`!' | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         if view_rect is None: | ||||||
|  |             # ensure point casting | ||||||
|  |             start_pos: QPointF = as_point(start_pos) | ||||||
|  |             end_pos: QPointF = as_point(end_pos) | ||||||
|  | 
 | ||||||
|  |             # map to view coords and update area | ||||||
|  |             view_rect = QtCore.QRectF( | ||||||
|  |                 start_pos, | ||||||
|  |                 end_pos, | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         self.setPos(view_rect.topLeft()) | ||||||
|  | 
 | ||||||
|  |         # NOTE: SERIOUSLY NO IDEA WHY THIS WORKS... | ||||||
|  |         # but it does and all the other commented stuff above | ||||||
|  |         # dint, dawg.. | ||||||
|  | 
 | ||||||
|  |         # self.resetTransform() | ||||||
|  |         # self.setRect(view_rect) | ||||||
|  | 
 | ||||||
|  |         tr = QTransform.fromScale( | ||||||
|  |             view_rect.width(), | ||||||
|  |             view_rect.height(), | ||||||
|  |         ) | ||||||
|  |         self.setTransform(tr) | ||||||
|  | 
 | ||||||
|  |         if update_label: | ||||||
|  |             self.init_label(view_rect) | ||||||
|  | 
 | ||||||
|  |         print( | ||||||
|  |             'SelectRect modify:\n' | ||||||
|  |             f'QRectF: {view_rect}\n' | ||||||
|  |             f'start_pos: {start_pos}\n' | ||||||
|  |             f'end_pos: {end_pos}\n' | ||||||
|  |         ) | ||||||
|  |         self.show() | ||||||
|  | 
 | ||||||
|  |     def init_label( | ||||||
|  |         self, | ||||||
|  |         view_rect: QRectF, | ||||||
|  |    ) -> QLabel: | ||||||
|  | 
 | ||||||
|  |         # should be init-ed in `.__init__()` | ||||||
|  |         label: QLabel = self._label | ||||||
|  |         cv: ChartView = self.vb | ||||||
|  | 
 | ||||||
|  |         # https://doc.qt.io/qt-5/qgraphicsproxywidget.html | ||||||
|  |         if self._label_proxy is None: | ||||||
|  |             scen: QGraphicsScene = cv.scene() | ||||||
|  |             # NOTE: specifically this is passing a widget | ||||||
|  |             # pointer to the scene's `.addWidget()` as per, | ||||||
|  |             # https://doc.qt.io/qt-5/qgraphicsproxywidget.html#embedding-a-widget-with-qgraphicsproxywidget | ||||||
|  |             self._label_proxy: QGraphicsProxyWidget = scen.addWidget(label) | ||||||
|  | 
 | ||||||
|  |         # get label startup coords | ||||||
|  |         tl: QPointF = view_rect.topLeft() | ||||||
|  |         br: QPointF = view_rect.bottomRight() | ||||||
|  | 
 | ||||||
|  |         x1, y1 = tl.x(), tl.y() | ||||||
|  |         x2, y2 = br.x(), br.y() | ||||||
|  | 
 | ||||||
|  |         # TODO: to remove, previous label corner point unpacking | ||||||
|  |         # x1, y1 = start_pos.x(), start_pos.y() | ||||||
|  |         # x2, y2 = end_pos.x(), end_pos.y() | ||||||
|  |         # y1, y2 = start_pos.y(), end_pos.y() | ||||||
|  |         # x1, x2 = start_pos.x(), end_pos.x() | ||||||
|  | 
 | ||||||
|  |         # TODO: heh, could probably use a max-min streamin algo | ||||||
|  |         # here too? | ||||||
|         _, xmn = min(y1, y2), min(x1, x2) |         _, xmn = min(y1, y2), min(x1, x2) | ||||||
|         ymx, xmx = max(y1, y2), max(x1, x2) |         ymx, xmx = max(y1, y2), max(x1, x2) | ||||||
| 
 | 
 | ||||||
|  | @ -375,26 +535,35 @@ class SelectRect(QtWidgets.QGraphicsRectItem): | ||||||
|         ixmn, ixmx = round(xmn), round(xmx) |         ixmn, ixmx = round(xmn), round(xmx) | ||||||
|         nbars = ixmx - ixmn + 1 |         nbars = ixmx - ixmn + 1 | ||||||
| 
 | 
 | ||||||
|         chart = self._chart |         chart: ChartPlotWidget = self._chart | ||||||
|         data = chart.get_viz(chart.name).shm.array[ixmn:ixmx] |         data: np.ndarray = chart.get_viz( | ||||||
|  |             chart.name | ||||||
|  |         ).shm.array[ixmn:ixmx] | ||||||
| 
 | 
 | ||||||
|         if len(data): |         if len(data): | ||||||
|             std = data['close'].std() |             std: float = data['close'].std() | ||||||
|             dmx = data['high'].max() |             dmx: float = data['high'].max() | ||||||
|             dmn = data['low'].min() |             dmn: float = data['low'].min() | ||||||
|         else: |         else: | ||||||
|             dmn = dmx = std = np.nan |             dmn = dmx = std = np.nan | ||||||
| 
 | 
 | ||||||
|         # update label info |         # update label info | ||||||
|         self._label.setText('\n'.join(self._contents).format( |         label.setText('\n'.join(self._contents).format( | ||||||
|             pchng=pchng, rng=rng, nbars=nbars, |             pchng=pchng, | ||||||
|             std=std, dmx=dmx, dmn=dmn, |             rng=rng, | ||||||
|  |             nbars=nbars, | ||||||
|  |             std=std, | ||||||
|  |             dmx=dmx, | ||||||
|  |             dmn=dmn, | ||||||
|         )) |         )) | ||||||
| 
 | 
 | ||||||
|         # print(f'x2, y2: {(x2, y2)}') |         # print(f'x2, y2: {(x2, y2)}') | ||||||
|         # print(f'xmn, ymn: {(xmn, ymx)}') |         # print(f'xmn, ymn: {(xmn, ymx)}') | ||||||
| 
 | 
 | ||||||
|         label_anchor = Point(xmx + 2, ymx) |         label_anchor = Point( | ||||||
|  |             xmx + 2, | ||||||
|  |             ymx, | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|         # XXX: in the drag bottom-right -> top-left case we don't |         # XXX: in the drag bottom-right -> top-left case we don't | ||||||
|         # want the label to overlay the box. |         # want the label to overlay the box. | ||||||
|  | @ -403,13 +572,40 @@ class SelectRect(QtWidgets.QGraphicsRectItem): | ||||||
|         #     # label_anchor = Point(x2, y2 + self._label.height()) |         #     # label_anchor = Point(x2, y2 + self._label.height()) | ||||||
|         #     label_anchor = Point(xmn, ymn) |         #     label_anchor = Point(xmn, ymn) | ||||||
| 
 | 
 | ||||||
|         self._abs_top_right = label_anchor |         self._abs_top_right: Point = label_anchor | ||||||
|         self._label_proxy.setPos(self.vb.mapFromView(label_anchor)) |         self._label_proxy.setPos( | ||||||
|         # self._label.show() |             cv.mapFromView(label_anchor) | ||||||
|  |         ) | ||||||
|  |         label.show() | ||||||
| 
 | 
 | ||||||
|     def clear(self): |     def hide(self): | ||||||
|         """Clear the selection box from view. |         ''' | ||||||
|  |         Clear the selection box from its graphics scene but | ||||||
|  |         don't delete it permanently. | ||||||
| 
 | 
 | ||||||
|         """ |         ''' | ||||||
|  |         super().hide() | ||||||
|         self._label.hide() |         self._label.hide() | ||||||
|         self.hide() | 
 | ||||||
|  |     # TODO: ensure noone else using dis. | ||||||
|  |     clear = hide | ||||||
|  | 
 | ||||||
|  |     def delete(self) -> None: | ||||||
|  |         ''' | ||||||
|  |         De-allocate this rect from its rendering graphics scene. | ||||||
|  | 
 | ||||||
|  |         Like a permanent hide. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         scen: QGraphicsScene = self.scene() | ||||||
|  |         if scen is None: | ||||||
|  |             return | ||||||
|  | 
 | ||||||
|  |         scen.removeItem(self) | ||||||
|  |         if ( | ||||||
|  |             self._label | ||||||
|  |             and | ||||||
|  |             self._label_proxy | ||||||
|  | 
 | ||||||
|  |         ): | ||||||
|  |             scen.removeItem(self._label_proxy) | ||||||
|  |  | ||||||
|  | @ -23,28 +23,29 @@ from typing import Callable | ||||||
| 
 | 
 | ||||||
| import trio | import trio | ||||||
| from tractor.trionics import gather_contexts | from tractor.trionics import gather_contexts | ||||||
| from PyQt5 import QtCore |  | ||||||
| from PyQt5.QtCore import QEvent, pyqtBoundSignal |  | ||||||
| from PyQt5.QtWidgets import QWidget |  | ||||||
| from PyQt5.QtWidgets import ( |  | ||||||
|     QGraphicsSceneMouseEvent as gs_mouse, |  | ||||||
| ) |  | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     QtCore, | ||||||
|  |     QWidget, | ||||||
|  |     QEvent, | ||||||
|  |     keys, | ||||||
|  |     gs_keys, | ||||||
|  |     pyqtBoundSignal, | ||||||
|  | ) | ||||||
| from piker.types import Struct | from piker.types import Struct | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| MOUSE_EVENTS = { | MOUSE_EVENTS = { | ||||||
|     gs_mouse.GraphicsSceneMousePress, |     gs_keys.GraphicsSceneMousePress, | ||||||
|     gs_mouse.GraphicsSceneMouseRelease, |     gs_keys.GraphicsSceneMouseRelease, | ||||||
|     QEvent.MouseButtonPress, |     keys.MouseButtonPress, | ||||||
|     QEvent.MouseButtonRelease, |     keys.MouseButtonRelease, | ||||||
|     # QtGui.QMouseEvent, |     # QtGui.QMouseEvent, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: maybe consider some constrained ints down the road? | # TODO: maybe consider some constrained ints down the road? | ||||||
| # https://pydantic-docs.helpmanual.io/usage/types/#constrained-types | # https://pydantic-docs.helpmanual.io/usage/types/#constrained-types | ||||||
| 
 |  | ||||||
| class KeyboardMsg(Struct): | class KeyboardMsg(Struct): | ||||||
|     '''Unpacked Qt keyboard event data. |     '''Unpacked Qt keyboard event data. | ||||||
| 
 | 
 | ||||||
|  | @ -114,7 +115,10 @@ class EventRelay(QtCore.QObject): | ||||||
|         # something to do with Qt internals and calling the |         # something to do with Qt internals and calling the | ||||||
|         # parent handler? |         # parent handler? | ||||||
| 
 | 
 | ||||||
|         if etype in {QEvent.KeyPress, QEvent.KeyRelease}: |         if etype in { | ||||||
|  |             QEvent.Type.KeyPress, | ||||||
|  |             QEvent.Type.KeyRelease, | ||||||
|  |         }: | ||||||
| 
 | 
 | ||||||
|             msg = KeyboardMsg( |             msg = KeyboardMsg( | ||||||
|                 event=ev, |                 event=ev, | ||||||
|  | @ -160,7 +164,9 @@ class EventRelay(QtCore.QObject): | ||||||
| async def open_event_stream( | async def open_event_stream( | ||||||
| 
 | 
 | ||||||
|     source_widget: QWidget, |     source_widget: QWidget, | ||||||
|     event_types: set[QEvent] = {QEvent.KeyPress}, |     event_types: set[QEvent] = { | ||||||
|  |         QEvent.Type.KeyPress, | ||||||
|  |     }, | ||||||
|     filter_auto_repeats: bool = True, |     filter_auto_repeats: bool = True, | ||||||
| 
 | 
 | ||||||
| ) -> trio.abc.ReceiveChannel: | ) -> trio.abc.ReceiveChannel: | ||||||
|  | @ -201,8 +207,8 @@ async def open_signal_handler( | ||||||
|         async for args in recv: |         async for args in recv: | ||||||
|             await async_handler(*args) |             await async_handler(*args) | ||||||
| 
 | 
 | ||||||
|     async with trio.open_nursery() as n: |     async with trio.open_nursery() as tn: | ||||||
|         n.start_soon(proxy_to_handler) |         tn.start_soon(proxy_to_handler) | ||||||
|         async with send: |         async with send: | ||||||
|             yield |             yield | ||||||
| 
 | 
 | ||||||
|  | @ -212,18 +218,48 @@ async def open_handlers( | ||||||
| 
 | 
 | ||||||
|     source_widgets: list[QWidget], |     source_widgets: list[QWidget], | ||||||
|     event_types: set[QEvent], |     event_types: set[QEvent], | ||||||
|     async_handler: Callable[[QWidget, trio.abc.ReceiveChannel], None], | 
 | ||||||
|     **kwargs, |     # NOTE: if you want to bind in additional kwargs to the handler | ||||||
|  |     # pass in a `partial()` instead! | ||||||
|  |     async_handler: Callable[ | ||||||
|  |         [QWidget, trio.abc.ReceiveChannel],  # required handler args | ||||||
|  |         None | ||||||
|  |     ], | ||||||
|  | 
 | ||||||
|  |     # XXX: these are ONLY inputs available to the | ||||||
|  |     # `open_event_stream()` event-relay to mem-chan factor above! | ||||||
|  |     **open_ev_stream_kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     Connect and schedule an async handler function to receive an | ||||||
|  |     arbitrary `QWidget`'s events with kb/mouse msgs repacked into | ||||||
|  |     structs (see above) and shuttled over a mem-chan to the input | ||||||
|  |     `async_handler` to allow interaction-IO processing from | ||||||
|  |     a `trio` func-as-task. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     widget: QWidget | ||||||
|  |     streams: list[trio.abc.ReceiveChannel] | ||||||
|     async with ( |     async with ( | ||||||
|         trio.open_nursery() as n, |         trio.open_nursery() as tn, | ||||||
|         gather_contexts([ |         gather_contexts([ | ||||||
|             open_event_stream(widget, event_types, **kwargs) |             open_event_stream( | ||||||
|  |                 widget, | ||||||
|  |                 event_types, | ||||||
|  |                 **open_ev_stream_kwargs, | ||||||
|  |             ) | ||||||
|             for widget in source_widgets |             for widget in source_widgets | ||||||
|         ]) as streams, |         ]) as streams, | ||||||
|     ): |     ): | ||||||
|         for widget, event_recv_stream in zip(source_widgets, streams): |         for widget, event_recv_stream in zip( | ||||||
|             n.start_soon(async_handler, widget, event_recv_stream) |             source_widgets, | ||||||
|  |             streams, | ||||||
|  |         ): | ||||||
|  |             tn.start_soon( | ||||||
|  |                 async_handler, | ||||||
|  |                 widget, | ||||||
|  |                 event_recv_stream, | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         yield |         yield | ||||||
|  |  | ||||||
|  | @ -30,34 +30,35 @@ from typing import ( | ||||||
| import platform | import platform | ||||||
| import traceback | import traceback | ||||||
| 
 | 
 | ||||||
| # Qt specific |  | ||||||
| import PyQt5  # noqa |  | ||||||
| from PyQt5.QtWidgets import ( |  | ||||||
|     QWidget, |  | ||||||
|     QMainWindow, |  | ||||||
|     QApplication, |  | ||||||
| ) |  | ||||||
| from PyQt5 import QtCore |  | ||||||
| from PyQt5.QtCore import ( |  | ||||||
|     pyqtRemoveInputHook, |  | ||||||
|     Qt, |  | ||||||
|     QCoreApplication, |  | ||||||
| ) |  | ||||||
| import qdarkstyle | import qdarkstyle | ||||||
| from qdarkstyle import DarkPalette | from qdarkstyle import DarkPalette | ||||||
| # import qdarkgraystyle  # TODO: play with it | # import qdarkgraystyle  # TODO: play with it | ||||||
| import trio | import trio | ||||||
| from outcome import Error | from outcome import Error | ||||||
| 
 | 
 | ||||||
|  | # Qt version-agnostic | ||||||
|  | from .qt import ( | ||||||
|  |     QWidget, | ||||||
|  |     QMainWindow, | ||||||
|  |     QApplication, | ||||||
|  |     QtCore, | ||||||
|  |     pyqtRemoveInputHook, | ||||||
|  |     Qt, | ||||||
|  |     QCoreApplication, | ||||||
|  | ) | ||||||
| from ..service import ( | from ..service import ( | ||||||
|     maybe_open_pikerd, |     maybe_open_pikerd, | ||||||
|     get_tractor_runtime_kwargs, |     get_runtime_vars, | ||||||
| ) | ) | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
| from ._pg_overrides import _do_overrides | from ._pg_overrides import _do_overrides | ||||||
| from . import _style | from . import _style | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | if TYPE_CHECKING: | ||||||
|  |     from ._chart import GodWidget | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| # pyqtgraph global config | # pyqtgraph global config | ||||||
|  | @ -146,7 +147,7 @@ def run_qtractor( | ||||||
| 
 | 
 | ||||||
|     # load dark theme |     # load dark theme | ||||||
|     stylesheet = qdarkstyle.load_stylesheet( |     stylesheet = qdarkstyle.load_stylesheet( | ||||||
|         qt_api='pyqt5', |         qt_api='pyqt6', | ||||||
|         palette=DarkPalette, |         palette=DarkPalette, | ||||||
|     ) |     ) | ||||||
|     app.setStyleSheet(stylesheet) |     app.setStyleSheet(stylesheet) | ||||||
|  | @ -173,7 +174,9 @@ def run_qtractor( | ||||||
|     instance.window = window |     instance.window = window | ||||||
| 
 | 
 | ||||||
|     # override tractor's defaults |     # override tractor's defaults | ||||||
|     tractor_kwargs.update(get_tractor_runtime_kwargs()) |     tractor_kwargs.update( | ||||||
|  |         get_runtime_vars() | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     # define tractor entrypoint |     # define tractor entrypoint | ||||||
|     async def main(): |     async def main(): | ||||||
|  |  | ||||||
|  | @ -28,9 +28,15 @@ from typing import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import trio | import trio | ||||||
| from PyQt5 import QtGui | 
 | ||||||
| from PyQt5.QtCore import QSize, QModelIndex, Qt, QEvent | from piker.ui.qt import ( | ||||||
| from PyQt5.QtWidgets import ( |     keys, | ||||||
|  |     size_policy, | ||||||
|  |     QtGui, | ||||||
|  |     QSize, | ||||||
|  |     QModelIndex, | ||||||
|  |     Qt, | ||||||
|  |     QEvent, | ||||||
|     QWidget, |     QWidget, | ||||||
|     QLabel, |     QLabel, | ||||||
|     QComboBox, |     QComboBox, | ||||||
|  | @ -39,7 +45,6 @@ from PyQt5.QtWidgets import ( | ||||||
|     QVBoxLayout, |     QVBoxLayout, | ||||||
|     QFormLayout, |     QFormLayout, | ||||||
|     QProgressBar, |     QProgressBar, | ||||||
|     QSizePolicy, |  | ||||||
|     QStyledItemDelegate, |     QStyledItemDelegate, | ||||||
|     QStyleOptionViewItem, |     QStyleOptionViewItem, | ||||||
| ) | ) | ||||||
|  | @ -71,14 +76,14 @@ class Edit(QLineEdit): | ||||||
| 
 | 
 | ||||||
|         if width_in_chars: |         if width_in_chars: | ||||||
|             self._chars = int(width_in_chars) |             self._chars = int(width_in_chars) | ||||||
|             x_size_policy = QSizePolicy.Fixed |             x_size_policy = size_policy.Fixed | ||||||
| 
 | 
 | ||||||
|         else: |         else: | ||||||
|             # chart count which will be used to calculate |             # chart count which will be used to calculate | ||||||
|             # width of input field. |             # width of input field. | ||||||
|             self._chars: int = 6 |             self._chars: int = 6 | ||||||
|             # fit to surroundingn frame width |             # fit to surroundingn frame width | ||||||
|             x_size_policy = QSizePolicy.Expanding |             x_size_policy = size_policy.Expanding | ||||||
| 
 | 
 | ||||||
|         super().__init__(parent) |         super().__init__(parent) | ||||||
| 
 | 
 | ||||||
|  | @ -86,7 +91,7 @@ class Edit(QLineEdit): | ||||||
|         # https://doc.qt.io/qt-5/qsizepolicy.html#Policy-enum |         # https://doc.qt.io/qt-5/qsizepolicy.html#Policy-enum | ||||||
|         self.setSizePolicy( |         self.setSizePolicy( | ||||||
|             x_size_policy, |             x_size_policy, | ||||||
|             QSizePolicy.Fixed, |             size_policy.Fixed, | ||||||
|         ) |         ) | ||||||
|         self.setFont(font.font) |         self.setFont(font.font) | ||||||
| 
 | 
 | ||||||
|  | @ -180,11 +185,13 @@ class Selection(QComboBox): | ||||||
| 
 | 
 | ||||||
|         self._items: dict[str, int] = {} |         self._items: dict[str, int] = {} | ||||||
|         super().__init__(parent=parent) |         super().__init__(parent=parent) | ||||||
|         self.setSizeAdjustPolicy(QComboBox.AdjustToContents) |         self.setSizeAdjustPolicy( | ||||||
|  |             QComboBox.SizeAdjustPolicy.AdjustToContents, | ||||||
|  |         ) | ||||||
|         # make line edit expand to surrounding frame |         # make line edit expand to surrounding frame | ||||||
|         self.setSizePolicy( |         self.setSizePolicy( | ||||||
|             QSizePolicy.Expanding, |             size_policy.Expanding, | ||||||
|             QSizePolicy.Fixed, |             size_policy.Fixed, | ||||||
|         ) |         ) | ||||||
|         view = self.view() |         view = self.view() | ||||||
|         view.setUniformItemSizes(True) |         view.setUniformItemSizes(True) | ||||||
|  | @ -308,8 +315,8 @@ class FieldsForm(QWidget): | ||||||
| 
 | 
 | ||||||
|         # size it as we specify |         # size it as we specify | ||||||
|         self.setSizePolicy( |         self.setSizePolicy( | ||||||
|             QSizePolicy.Expanding, |             size_policy.Expanding, | ||||||
|             QSizePolicy.Expanding, |             size_policy.Expanding, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # XXX: not sure why we have to create this here exactly |         # XXX: not sure why we have to create this here exactly | ||||||
|  | @ -416,8 +423,8 @@ class FieldsForm(QWidget): | ||||||
|         select.set_items(values) |         select.set_items(values) | ||||||
| 
 | 
 | ||||||
|         self.setSizePolicy( |         self.setSizePolicy( | ||||||
|             QSizePolicy.Fixed, |             size_policy.Fixed, | ||||||
|             QSizePolicy.Fixed, |             size_policy.Fixed, | ||||||
|         ) |         ) | ||||||
|         select.show() |         select.show() | ||||||
|         self.form.addRow(label, select) |         self.form.addRow(label, select) | ||||||
|  | @ -437,7 +444,10 @@ async def handle_field_input( | ||||||
| 
 | 
 | ||||||
|     async for kbmsg in recv_chan: |     async for kbmsg in recv_chan: | ||||||
| 
 | 
 | ||||||
|         if kbmsg.etype in {QEvent.KeyPress, QEvent.KeyRelease}: |         if kbmsg.etype in { | ||||||
|  |             keys.KeyPress, | ||||||
|  |             keys.KeyRelease, | ||||||
|  |         }: | ||||||
|             event, etype, key, mods, txt = kbmsg.to_tuple() |             event, etype, key, mods, txt = kbmsg.to_tuple() | ||||||
|             print(f'key: {kbmsg.key}, mods: {kbmsg.mods}, txt: {kbmsg.txt}') |             print(f'key: {kbmsg.key}, mods: {kbmsg.mods}, txt: {kbmsg.txt}') | ||||||
| 
 | 
 | ||||||
|  | @ -703,7 +713,8 @@ def mk_fill_status_bar( | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     bottom_label = form.add_field_label( |     bottom_label = form.add_field_label( | ||||||
|         'x: {step_size}', |         # 'x: {step_size}', | ||||||
|  |        '{unit_prefix}: {step_size}', | ||||||
|         font_size=bar_label_font_size, |         font_size=bar_label_font_size, | ||||||
|         font_color='gunmetal', |         font_color='gunmetal', | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  | @ -181,7 +181,10 @@ async def open_fsp_sidepane( | ||||||
| async def open_fsp_actor_cluster( | async def open_fsp_actor_cluster( | ||||||
|     names: list[str] = ['fsp_0', 'fsp_1'], |     names: list[str] = ['fsp_0', 'fsp_1'], | ||||||
| 
 | 
 | ||||||
| ) -> AsyncGenerator[int, dict[str, tractor.Portal]]: | ) -> AsyncGenerator[ | ||||||
|  |     int, | ||||||
|  |     dict[str, tractor.Portal] | ||||||
|  | ]: | ||||||
| 
 | 
 | ||||||
|     from tractor._clustering import open_actor_cluster |     from tractor._clustering import open_actor_cluster | ||||||
| 
 | 
 | ||||||
|  | @ -390,7 +393,7 @@ class FspAdmin: | ||||||
|         complete: trio.Event, |         complete: trio.Event, | ||||||
|         started: trio.Event, |         started: trio.Event, | ||||||
|         fqme: str, |         fqme: str, | ||||||
|         dst_fsp_flume: Flume, |         dst_flume: Flume, | ||||||
|         conf: dict, |         conf: dict, | ||||||
|         target: Fsp, |         target: Fsp, | ||||||
|         loglevel: str, |         loglevel: str, | ||||||
|  | @ -408,16 +411,14 @@ class FspAdmin: | ||||||
|                 # chaining entrypoint |                 # chaining entrypoint | ||||||
|                 cascade, |                 cascade, | ||||||
| 
 | 
 | ||||||
|  |                 # TODO: can't we just drop this and expect | ||||||
|  |                 # far end to read the src flume's .mkt.fqme? | ||||||
|                 # data feed key |                 # data feed key | ||||||
|                 fqme=fqme, |                 fqme=fqme, | ||||||
| 
 | 
 | ||||||
|                 # TODO: pass `Flume.to_msg()`s here? |                 src_flume_addr=self.flume.to_msg(), | ||||||
|                 # mems |                 dst_flume_addr=dst_flume.to_msg(), | ||||||
|                 src_shm_token=self.flume.rt_shm.token, |                 ns_path=ns_path,  # edge-bind-func | ||||||
|                 dst_shm_token=dst_fsp_flume.rt_shm.token, |  | ||||||
| 
 |  | ||||||
|                 # target |  | ||||||
|                 ns_path=ns_path, |  | ||||||
| 
 | 
 | ||||||
|                 loglevel=loglevel, |                 loglevel=loglevel, | ||||||
|                 zero_on_step=conf.get('zero_on_step', False), |                 zero_on_step=conf.get('zero_on_step', False), | ||||||
|  | @ -431,14 +432,14 @@ class FspAdmin: | ||||||
|             ctx.open_stream() as stream, |             ctx.open_stream() as stream, | ||||||
|         ): |         ): | ||||||
| 
 | 
 | ||||||
|             dst_fsp_flume.stream: tractor.MsgStream = stream |             dst_flume.stream: tractor.MsgStream = stream | ||||||
| 
 | 
 | ||||||
|             # register output data |             # register output data | ||||||
|             self._registry[ |             self._registry[ | ||||||
|                 (fqme, ns_path) |                 (fqme, ns_path) | ||||||
|             ] = ( |             ] = ( | ||||||
|                 stream, |                 stream, | ||||||
|                 dst_fsp_flume.rt_shm, |                 dst_flume.rt_shm, | ||||||
|                 complete |                 complete | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|  | @ -515,7 +516,7 @@ class FspAdmin: | ||||||
|             broker='piker', |             broker='piker', | ||||||
|             _atype='fsp', |             _atype='fsp', | ||||||
|         ) |         ) | ||||||
|         dst_fsp_flume = Flume( |         dst_flume = Flume( | ||||||
|             mkt=mkt, |             mkt=mkt, | ||||||
|             _rt_shm_token=dst_shm.token, |             _rt_shm_token=dst_shm.token, | ||||||
|             first_quote={}, |             first_quote={}, | ||||||
|  | @ -543,13 +544,13 @@ class FspAdmin: | ||||||
|             complete, |             complete, | ||||||
|             started, |             started, | ||||||
|             fqme, |             fqme, | ||||||
|             dst_fsp_flume, |             dst_flume, | ||||||
|             conf, |             conf, | ||||||
|             target, |             target, | ||||||
|             loglevel, |             loglevel, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         return dst_fsp_flume, started |         return dst_flume, started | ||||||
| 
 | 
 | ||||||
|     async def open_fsp_chart( |     async def open_fsp_chart( | ||||||
|         self, |         self, | ||||||
|  | @ -559,7 +560,7 @@ class FspAdmin: | ||||||
|         conf: dict,  # yeah probably dumb.. |         conf: dict,  # yeah probably dumb.. | ||||||
|         loglevel: str = 'error', |         loglevel: str = 'error', | ||||||
| 
 | 
 | ||||||
|     ) -> (trio.Event, ChartPlotWidget): |     ) -> trio.Event: | ||||||
| 
 | 
 | ||||||
|         flume, started = await self.start_engine_task( |         flume, started = await self.start_engine_task( | ||||||
|             target, |             target, | ||||||
|  | @ -926,7 +927,7 @@ async def start_fsp_displays( | ||||||
| 
 | 
 | ||||||
|     linked: LinkedSplits, |     linked: LinkedSplits, | ||||||
|     flume: Flume, |     flume: Flume, | ||||||
|     group_status_key: str, |     # group_status_key: str, | ||||||
|     loglevel: str, |     loglevel: str, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|  | @ -973,21 +974,23 @@ async def start_fsp_displays( | ||||||
|             flume, |             flume, | ||||||
|         ) as admin, |         ) as admin, | ||||||
|     ): |     ): | ||||||
|         statuses = [] |         statuses: list[trio.Event] = [] | ||||||
|         for target, conf in fsp_conf.items(): |         for target, conf in fsp_conf.items(): | ||||||
|             started = await admin.open_fsp_chart( |             started: trio.Event = await admin.open_fsp_chart( | ||||||
|                 target, |                 target, | ||||||
|                 conf, |                 conf, | ||||||
|             ) |             ) | ||||||
|             done = linked.window().status_bar.open_status( |             # done = linked.window().status_bar.open_status( | ||||||
|                 f'loading fsp, {target}..', |             #     f'loading fsp, {target}..', | ||||||
|                 group_key=group_status_key, |             #     group_key=group_status_key, | ||||||
|             ) |             # ) | ||||||
|             statuses.append((started, done)) |             # statuses.append((started, done)) | ||||||
|  |             statuses.append(started) | ||||||
| 
 | 
 | ||||||
|         for fsp_loaded, status_cb in statuses: |         # for fsp_loaded, status_cb in statuses: | ||||||
|  |         for fsp_loaded in statuses: | ||||||
|             await fsp_loaded.wait() |             await fsp_loaded.wait() | ||||||
|             profiler(f'attached to fsp portal: {target}') |             profiler(f'attached to fsp portal: {target}') | ||||||
|             status_cb() |             # status_cb() | ||||||
| 
 | 
 | ||||||
|     # blocks on nursery until all fsp actors complete |     # blocks on nursery until all fsp actors complete | ||||||
|  |  | ||||||
|  | @ -15,15 +15,18 @@ | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| ``QIcon`` hackery. | `QIcon` hackery. | ||||||
|  | 
 | ||||||
|  | Mostly dynamically loading pixmaps for use with `QGraphicsScene`. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from PyQt5.QtWidgets import QStyle | from piker.ui.qt import ( | ||||||
| from PyQt5.QtGui import ( |     QSize, | ||||||
|     QIcon, QPixmap, QColor |     QStyle, | ||||||
|  |     QIcon, | ||||||
|  |     QPixmap, | ||||||
|  |     QColor, | ||||||
| ) | ) | ||||||
| from PyQt5.QtCore import QSize |  | ||||||
| 
 |  | ||||||
| from ._style import hcolor | from ._style import hcolor | ||||||
| 
 | 
 | ||||||
| # https://www.pythonguis.com/faq/built-in-qicons-pyqt/ | # https://www.pythonguis.com/faq/built-in-qicons-pyqt/ | ||||||
|  | @ -44,7 +47,8 @@ def mk_icons( | ||||||
|     size: QSize, |     size: QSize, | ||||||
| 
 | 
 | ||||||
| ) -> dict[str, QIcon]: | ) -> dict[str, QIcon]: | ||||||
|     '''This helper is indempotent. |     ''' | ||||||
|  |     This helper is indempotent. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     global _icons, _icon_names |     global _icons, _icon_names | ||||||
|  | @ -56,7 +60,11 @@ def mk_icons( | ||||||
|     # load account selection using current style |     # load account selection using current style | ||||||
|     for name, icon_name in _icon_names.items(): |     for name, icon_name in _icon_names.items(): | ||||||
| 
 | 
 | ||||||
|         stdpixmap = getattr(QStyle, icon_name) |         stdpixmap = getattr( | ||||||
|  |             # https://www.pythonguis.com/faq/built-in-qicons-pyqt/ | ||||||
|  |             QStyle.StandardPixmap,  # pyqt/pyside6 | ||||||
|  |             icon_name, | ||||||
|  |         ) | ||||||
|         stdicon = style.standardIcon(stdpixmap) |         stdicon = style.standardIcon(stdpixmap) | ||||||
|         pixmap = stdicon.pixmap(size) |         pixmap = stdicon.pixmap(size) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -23,6 +23,7 @@ from contextlib import ( | ||||||
|     asynccontextmanager, |     asynccontextmanager, | ||||||
|     ExitStack, |     ExitStack, | ||||||
| ) | ) | ||||||
|  | from functools import partial | ||||||
| import time | import time | ||||||
| from typing import ( | from typing import ( | ||||||
|     Callable, |     Callable, | ||||||
|  | @ -30,24 +31,26 @@ from typing import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| # from pyqtgraph.GraphicsScene import mouseEvents | # NOTE XXX: pg is super annoying and re-implements it's own mouse | ||||||
| from PyQt5.QtWidgets import QGraphicsSceneMouseEvent as gs_mouse | # event subsystem.. we should really look into re-working/writing | ||||||
| from PyQt5.QtGui import ( | # this down the road.. Bo | ||||||
|     QWheelEvent, | from pyqtgraph.GraphicsScene import mouseEvents as mevs | ||||||
| ) | # from pyqtgraph.GraphicsScene.mouseEvents import MouseDragEvent | ||||||
| from PyQt5.QtCore import ( |  | ||||||
|     Qt, |  | ||||||
|     QEvent, |  | ||||||
| ) |  | ||||||
| from pyqtgraph import ( | from pyqtgraph import ( | ||||||
|     ViewBox, |     ViewBox, | ||||||
|     Point, |     Point, | ||||||
|     QtCore, |     QtCore, | ||||||
|  |     functions as fn, | ||||||
| ) | ) | ||||||
| from pyqtgraph import functions as fn |  | ||||||
| import numpy as np | import numpy as np | ||||||
| import trio | import trio | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     QWheelEvent, | ||||||
|  |     QGraphicsSceneMouseEvent as gs_mouse, | ||||||
|  |     Qt, | ||||||
|  |     QEvent, | ||||||
|  | ) | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
| from ..toolz import ( | from ..toolz import ( | ||||||
|     Profiler, |     Profiler, | ||||||
|  | @ -70,27 +73,28 @@ if TYPE_CHECKING: | ||||||
|     ) |     ) | ||||||
|     from ._dataviz import Viz |     from ._dataviz import Viz | ||||||
|     from .order_mode import OrderMode |     from .order_mode import OrderMode | ||||||
|  |     from ._display import DisplayState | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| NUMBER_LINE = { | NUMBER_LINE = { | ||||||
|     Qt.Key_1, |     Qt.Key.Key_1, | ||||||
|     Qt.Key_2, |     Qt.Key.Key_2, | ||||||
|     Qt.Key_3, |     Qt.Key.Key_3, | ||||||
|     Qt.Key_4, |     Qt.Key.Key_4, | ||||||
|     Qt.Key_5, |     Qt.Key.Key_5, | ||||||
|     Qt.Key_6, |     Qt.Key.Key_6, | ||||||
|     Qt.Key_7, |     Qt.Key.Key_7, | ||||||
|     Qt.Key_8, |     Qt.Key.Key_8, | ||||||
|     Qt.Key_9, |     Qt.Key.Key_9, | ||||||
|     Qt.Key_0, |     Qt.Key.Key_0, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| ORDER_MODE = { | ORDER_MODE = { | ||||||
|     Qt.Key_A, |     Qt.Key.Key_A, | ||||||
|     Qt.Key_F, |     Qt.Key.Key_F, | ||||||
|     Qt.Key_D, |     Qt.Key.Key_D, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -98,6 +102,7 @@ async def handle_viewmode_kb_inputs( | ||||||
| 
 | 
 | ||||||
|     view: ChartView, |     view: ChartView, | ||||||
|     recv_chan: trio.abc.ReceiveChannel, |     recv_chan: trio.abc.ReceiveChannel, | ||||||
|  |     dss: dict[str, DisplayState], | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|  | @ -173,17 +178,42 @@ async def handle_viewmode_kb_inputs( | ||||||
|                     Qt.Key_P, |                     Qt.Key_P, | ||||||
|                 } |                 } | ||||||
|             ): |             ): | ||||||
|                 import tractor |  | ||||||
|                 feed = order_mode.feed  # noqa |                 feed = order_mode.feed  # noqa | ||||||
|                 chart = order_mode.chart  # noqa |                 chart = order_mode.chart  # noqa | ||||||
|                 viz = chart.main_viz  # noqa |                 viz = chart.main_viz  # noqa | ||||||
|                 vlm_chart = chart.linked.subplots['volume']  # noqa |                 vlm_chart = chart.linked.subplots['volume']  # noqa | ||||||
|                 vlm_viz = vlm_chart.main_viz  # noqa |                 vlm_viz = vlm_chart.main_viz  # noqa | ||||||
|                 dvlm_pi = vlm_chart._vizs['dolla_vlm'].plot  # noqa |                 dvlm_pi = vlm_chart._vizs['dolla_vlm'].plot  # noqa | ||||||
|  |                 import tractor | ||||||
|                 await tractor.pause() |                 await tractor.pause() | ||||||
|                 view.interact_graphics_cycle() |                 view.interact_graphics_cycle() | ||||||
| 
 | 
 | ||||||
|             # SEARCH MODE # |             # FORCE graphics reset-and-render of all currently | ||||||
|  |             # shown data `Viz`s for the current chart app. | ||||||
|  |             if ( | ||||||
|  |                 ctrl | ||||||
|  |                 and key in { | ||||||
|  |                     Qt.Key_R, | ||||||
|  |                 } | ||||||
|  |             ): | ||||||
|  |                 fqme: str | ||||||
|  |                 ds: DisplayState | ||||||
|  |                 for fqme, ds in dss.items(): | ||||||
|  | 
 | ||||||
|  |                     viz: Viz | ||||||
|  |                     for tf, viz in { | ||||||
|  |                         60: ds.hist_viz, | ||||||
|  |                         1: ds.viz, | ||||||
|  |                     }.items(): | ||||||
|  |                         # TODO: only allow this when the data is IN VIEW! | ||||||
|  |                         # also, we probably can do this more efficiently | ||||||
|  |                         # / smarter by only redrawing the portion of the | ||||||
|  |                         # path necessary? | ||||||
|  |                         viz.reset_graphics() | ||||||
|  | 
 | ||||||
|  |             # ------ - ------ | ||||||
|  |             # SEARCH MODE | ||||||
|  |             # ------ - ------ | ||||||
|             # ctlr-<space>/<l> for "lookup", "search" -> open search tree |             # ctlr-<space>/<l> for "lookup", "search" -> open search tree | ||||||
|             if ( |             if ( | ||||||
|                 ctrl |                 ctrl | ||||||
|  | @ -243,8 +273,10 @@ async def handle_viewmode_kb_inputs( | ||||||
|                     delta=-view.def_delta, |                     delta=-view.def_delta, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|             elif key == Qt.Key_R: |             elif ( | ||||||
| 
 |                 not ctrl | ||||||
|  |                 and key == Qt.Key_R | ||||||
|  |             ): | ||||||
|                 # NOTE: seems that if we don't yield a Qt render |                 # NOTE: seems that if we don't yield a Qt render | ||||||
|                 # cycle then the m4 downsampled curves will show here |                 # cycle then the m4 downsampled curves will show here | ||||||
|                 # without another reset.. |                 # without another reset.. | ||||||
|  | @ -427,6 +459,7 @@ async def handle_viewmode_mouse( | ||||||
| 
 | 
 | ||||||
|     view: ChartView, |     view: ChartView, | ||||||
|     recv_chan: trio.abc.ReceiveChannel, |     recv_chan: trio.abc.ReceiveChannel, | ||||||
|  |     dss: dict[str, DisplayState], | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|  | @ -466,6 +499,7 @@ class ChartView(ViewBox): | ||||||
|     mode_name: str = 'view' |     mode_name: str = 'view' | ||||||
|     def_delta: float = 616 * 6 |     def_delta: float = 616 * 6 | ||||||
|     def_scale_factor: float = 1.016 ** (def_delta * -1 / 20) |     def_scale_factor: float = 1.016 ** (def_delta * -1 / 20) | ||||||
|  |     # annots: dict[int, GraphicsObject] = {} | ||||||
| 
 | 
 | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|  | @ -486,6 +520,7 @@ class ChartView(ViewBox): | ||||||
|             # defaultPadding=0., |             # defaultPadding=0., | ||||||
|             **kwargs |             **kwargs | ||||||
|         ) |         ) | ||||||
|  | 
 | ||||||
|         # for "known y-range style" |         # for "known y-range style" | ||||||
|         self._static_yrange = static_yrange |         self._static_yrange = static_yrange | ||||||
| 
 | 
 | ||||||
|  | @ -500,7 +535,11 @@ class ChartView(ViewBox): | ||||||
| 
 | 
 | ||||||
|         # add our selection box annotator |         # add our selection box annotator | ||||||
|         self.select_box = SelectRect(self) |         self.select_box = SelectRect(self) | ||||||
|         self.addItem(self.select_box, ignoreBounds=True) |         # self.select_box.add_to_view(self) | ||||||
|  |         # self.addItem( | ||||||
|  |         #     self.select_box, | ||||||
|  |         #     ignoreBounds=True, | ||||||
|  |         # ) | ||||||
| 
 | 
 | ||||||
|         self.mode = None |         self.mode = None | ||||||
|         self.order_mode: bool = False |         self.order_mode: bool = False | ||||||
|  | @ -557,6 +596,7 @@ class ChartView(ViewBox): | ||||||
|     @asynccontextmanager |     @asynccontextmanager | ||||||
|     async def open_async_input_handler( |     async def open_async_input_handler( | ||||||
|         self, |         self, | ||||||
|  |         **handler_kwargs, | ||||||
| 
 | 
 | ||||||
|     ) -> ChartView: |     ) -> ChartView: | ||||||
| 
 | 
 | ||||||
|  | @ -567,14 +607,20 @@ class ChartView(ViewBox): | ||||||
|                     QEvent.KeyPress, |                     QEvent.KeyPress, | ||||||
|                     QEvent.KeyRelease, |                     QEvent.KeyRelease, | ||||||
|                 }, |                 }, | ||||||
|                 async_handler=handle_viewmode_kb_inputs, |                 async_handler=partial( | ||||||
|  |                     handle_viewmode_kb_inputs, | ||||||
|  |                     **handler_kwargs, | ||||||
|  |                 ), | ||||||
|             ), |             ), | ||||||
|             _event.open_handlers( |             _event.open_handlers( | ||||||
|                 [self], |                 [self], | ||||||
|                 event_types={ |                 event_types={ | ||||||
|                     gs_mouse.GraphicsSceneMousePress, |                     gs_mouse.GraphicsSceneMousePress, | ||||||
|                 }, |                 }, | ||||||
|                 async_handler=handle_viewmode_mouse, |                 async_handler=partial( | ||||||
|  |                     handle_viewmode_mouse, | ||||||
|  |                     **handler_kwargs, | ||||||
|  |                 ), | ||||||
|             ), |             ), | ||||||
|         ): |         ): | ||||||
|             yield self |             yield self | ||||||
|  | @ -711,17 +757,18 @@ class ChartView(ViewBox): | ||||||
| 
 | 
 | ||||||
|     def mouseDragEvent( |     def mouseDragEvent( | ||||||
|         self, |         self, | ||||||
|         ev, |         ev: mevs.MouseDragEvent, | ||||||
|         axis: int | None = None, |         axis: int | None = None, | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         pos = ev.pos() |         pos: Point = ev.pos() | ||||||
|         lastPos = ev.lastPos() |         lastPos: Point = ev.lastPos() | ||||||
|         dif = pos - lastPos |         dif: Point = (pos - lastPos) * -1 | ||||||
|         dif = dif * -1 |         # dif: Point = pos - lastPos | ||||||
|  |         # dif: Point = dif * -1 | ||||||
| 
 | 
 | ||||||
|         # NOTE: if axis is specified, event will only affect that axis. |         # NOTE: if axis is specified, event will only affect that axis. | ||||||
|         button = ev.button() |         btn = ev.button() | ||||||
| 
 | 
 | ||||||
|         # Ignore axes if mouse is disabled |         # Ignore axes if mouse is disabled | ||||||
|         mouseEnabled = np.array( |         mouseEnabled = np.array( | ||||||
|  | @ -733,7 +780,7 @@ class ChartView(ViewBox): | ||||||
|             mask[1-axis] = 0.0 |             mask[1-axis] = 0.0 | ||||||
| 
 | 
 | ||||||
|         # Scale or translate based on mouse button |         # Scale or translate based on mouse button | ||||||
|         if button & ( |         if btn & ( | ||||||
|             QtCore.Qt.LeftButton | QtCore.Qt.MidButton |             QtCore.Qt.LeftButton | QtCore.Qt.MidButton | ||||||
|         ): |         ): | ||||||
|             # zoom y-axis ONLY when click-n-drag on it |             # zoom y-axis ONLY when click-n-drag on it | ||||||
|  | @ -756,34 +803,55 @@ class ChartView(ViewBox): | ||||||
|                 # XXX: WHY |                 # XXX: WHY | ||||||
|                 ev.accept() |                 ev.accept() | ||||||
| 
 | 
 | ||||||
|                 down_pos = ev.buttonDownPos() |                 down_pos: Point = ev.buttonDownPos( | ||||||
|  |                     btn=btn, | ||||||
|  |                 ) | ||||||
|  |                 scen_pos: Point = ev.scenePos() | ||||||
|  |                 scen_down_pos: Point = ev.buttonDownScenePos( | ||||||
|  |                     btn=btn, | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|                 # This is the final position in the drag |                 # This is the final position in the drag | ||||||
|                 if ev.isFinish(): |                 if ev.isFinish(): | ||||||
| 
 | 
 | ||||||
|                     self.select_box.mouse_drag_released(down_pos, pos) |                     # import pdbp; pdbp.set_trace() | ||||||
| 
 | 
 | ||||||
|                     ax = QtCore.QRectF(down_pos, pos) |                     # NOTE: think of this as a `.mouse_drag_release()` | ||||||
|                     ax = self.childGroup.mapRectFromParent(ax) |                     # (bc HINT that's what i called the shit ass | ||||||
|  |                     # method that wrapped this call [yes, as a single | ||||||
|  |                     # fucking call] originally.. you bish, guille) | ||||||
|  |                     # Bo.. oraleeee | ||||||
|  |                     self.select_box.set_scen_pos( | ||||||
|  |                         # down_pos, | ||||||
|  |                         # pos, | ||||||
|  |                         scen_down_pos, | ||||||
|  |                         scen_pos, | ||||||
|  |                     ) | ||||||
| 
 | 
 | ||||||
|                     # this is the zoom transform cmd |                     # this is the zoom transform cmd | ||||||
|                     self.showAxRect(ax) |                     ax = QtCore.QRectF(down_pos, pos) | ||||||
| 
 |                     ax = self.childGroup.mapRectFromParent(ax) | ||||||
|  |                     # self.showAxRect(ax) | ||||||
|                     # axis history tracking |                     # axis history tracking | ||||||
|                     self.axHistoryPointer += 1 |                     self.axHistoryPointer += 1 | ||||||
|                     self.axHistory = self.axHistory[ |                     self.axHistory = self.axHistory[ | ||||||
|                         :self.axHistoryPointer] + [ax] |                         :self.axHistoryPointer] + [ax] | ||||||
| 
 | 
 | ||||||
|                 else: |                 else: | ||||||
|                     print('drag finish?') |                     self.select_box.set_scen_pos( | ||||||
|                     self.select_box.set_pos(down_pos, pos) |                         # down_pos, | ||||||
|  |                         # pos, | ||||||
|  |                         scen_down_pos, | ||||||
|  |                         scen_pos, | ||||||
|  |                     ) | ||||||
| 
 | 
 | ||||||
|                     # update shape of scale box |                     # update shape of scale box | ||||||
|                     # self.updateScaleBox(ev.buttonDownPos(), ev.pos()) |                     # self.updateScaleBox(ev.buttonDownPos(), ev.pos()) | ||||||
|                     self.updateScaleBox( |                     # breakpoint() | ||||||
|                         down_pos, |                     # self.updateScaleBox( | ||||||
|                         ev.pos(), |                     #     down_pos, | ||||||
|                     ) |                     #     ev.pos(), | ||||||
|  |                     # ) | ||||||
| 
 | 
 | ||||||
|             # PANNING MODE |             # PANNING MODE | ||||||
|             else: |             else: | ||||||
|  | @ -822,7 +890,7 @@ class ChartView(ViewBox): | ||||||
|                 # ev.accept() |                 # ev.accept() | ||||||
| 
 | 
 | ||||||
|         # WEIRD "RIGHT-CLICK CENTER ZOOM" MODE |         # WEIRD "RIGHT-CLICK CENTER ZOOM" MODE | ||||||
|         elif button & QtCore.Qt.RightButton: |         elif btn & QtCore.Qt.RightButton: | ||||||
| 
 | 
 | ||||||
|             if self.state['aspectLocked'] is not False: |             if self.state['aspectLocked'] is not False: | ||||||
|                 mask[0] = 0 |                 mask[0] = 0 | ||||||
|  |  | ||||||
|  | @ -21,9 +21,12 @@ Double auction top-of-book (L1) graphics. | ||||||
| from typing import Tuple | from typing import Tuple | ||||||
| 
 | 
 | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| from PyQt5 import QtCore, QtGui |  | ||||||
| from PyQt5.QtCore import QPointF |  | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     QPointF, | ||||||
|  |     QtCore, | ||||||
|  |     QtGui, | ||||||
|  | ) | ||||||
| from ._axes import YAxisLabel | from ._axes import YAxisLabel | ||||||
| from ._style import hcolor | from ._style import hcolor | ||||||
| from ._pg_overrides import PlotItem | from ._pg_overrides import PlotItem | ||||||
|  |  | ||||||
|  | @ -25,10 +25,17 @@ from typing import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| from PyQt5 import QtGui, QtWidgets |  | ||||||
| from PyQt5.QtWidgets import QLabel, QSizePolicy |  | ||||||
| from PyQt5.QtCore import QPointF, QRectF, Qt |  | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     px_cache_mode, | ||||||
|  |     QtGui, | ||||||
|  |     QtWidgets, | ||||||
|  |     QLabel, | ||||||
|  |     size_policy, | ||||||
|  |     QPointF, | ||||||
|  |     QRectF, | ||||||
|  |     Qt, | ||||||
|  | ) | ||||||
| from ._style import ( | from ._style import ( | ||||||
|     DpiAwareFont, |     DpiAwareFont, | ||||||
|     hcolor, |     hcolor, | ||||||
|  | @ -78,7 +85,7 @@ class Label: | ||||||
|         self._x_offset = x_offset |         self._x_offset = x_offset | ||||||
| 
 | 
 | ||||||
|         txt = self.txt = QtWidgets.QGraphicsTextItem(parent=parent) |         txt = self.txt = QtWidgets.QGraphicsTextItem(parent=parent) | ||||||
|         txt.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache) |         txt.setCacheMode(px_cache_mode.DeviceCoordinateCache) | ||||||
| 
 | 
 | ||||||
|         vb.scene().addItem(txt) |         vb.scene().addItem(txt) | ||||||
| 
 | 
 | ||||||
|  | @ -103,7 +110,7 @@ class Label: | ||||||
|         self._anchor_func = self.txt.pos().x |         self._anchor_func = self.txt.pos().x | ||||||
| 
 | 
 | ||||||
|         # not sure if this makes a diff |         # not sure if this makes a diff | ||||||
|         self.txt.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache) |         self.txt.setCacheMode(px_cache_mode.DeviceCoordinateCache) | ||||||
| 
 | 
 | ||||||
|         # TODO: edit and selection support |         # TODO: edit and selection support | ||||||
|         # https://doc.qt.io/qt-5/qt.html#TextInteractionFlag-enum |         # https://doc.qt.io/qt-5/qt.html#TextInteractionFlag-enum | ||||||
|  | @ -299,12 +306,14 @@ class FormatLabel(QLabel): | ||||||
|             """ |             """ | ||||||
|         ) |         ) | ||||||
|         self.setFont(_font.font) |         self.setFont(_font.font) | ||||||
|         self.setTextFormat(Qt.MarkdownText)  # markdown |         self.setTextFormat( | ||||||
|  |             Qt.TextFormat.MarkdownText | ||||||
|  |         ) | ||||||
|         self.setMargin(0) |         self.setMargin(0) | ||||||
| 
 | 
 | ||||||
|         self.setSizePolicy( |         self.setSizePolicy( | ||||||
|             QSizePolicy.Expanding, |             size_policy.Expanding, | ||||||
|             QSizePolicy.Expanding, |             size_policy.Expanding, | ||||||
|         ) |         ) | ||||||
|         self.setAlignment( |         self.setAlignment( | ||||||
|             Qt.AlignVCenter | Qt.AlignLeft |             Qt.AlignVCenter | Qt.AlignLeft | ||||||
|  |  | ||||||
|  | @ -27,10 +27,22 @@ from typing import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| from pyqtgraph import Point, functions as fn | from pyqtgraph import ( | ||||||
| from PyQt5 import QtCore, QtGui, QtWidgets |     Point, | ||||||
| from PyQt5.QtCore import QPointF |     functions as fn, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     px_cache_mode, | ||||||
|  |     QtCore, | ||||||
|  |     QtGui, | ||||||
|  |     QGraphicsPathItem, | ||||||
|  |     QStyleOptionGraphicsItem, | ||||||
|  |     QGraphicsItem, | ||||||
|  |     QGraphicsScene, | ||||||
|  |     QWidget, | ||||||
|  |     QPointF, | ||||||
|  | ) | ||||||
| from ._annotate import LevelMarker | from ._annotate import LevelMarker | ||||||
| from ._anchors import ( | from ._anchors import ( | ||||||
|     vbr_left, |     vbr_left, | ||||||
|  | @ -130,7 +142,9 @@ class LevelLine(pg.InfiniteLine): | ||||||
|         self._right_end_sc: float = 0 |         self._right_end_sc: float = 0 | ||||||
| 
 | 
 | ||||||
|         # use px caching |         # use px caching | ||||||
|         self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache) |         self.setCacheMode( | ||||||
|  |             px_cache_mode.DeviceCoordinateCache | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     def txt_offsets(self) -> tuple[int, int]: |     def txt_offsets(self) -> tuple[int, int]: | ||||||
|         return 0, 0 |         return 0, 0 | ||||||
|  | @ -201,7 +215,7 @@ class LevelLine(pg.InfiniteLine): | ||||||
|     ) -> None: |     ) -> None: | ||||||
| 
 | 
 | ||||||
|         if not called_from_on_pos_change: |         if not called_from_on_pos_change: | ||||||
|             last = self.value() |             last: float = self.value() | ||||||
| 
 | 
 | ||||||
|             # if the position hasn't changed then ``.update_labels()`` |             # if the position hasn't changed then ``.update_labels()`` | ||||||
|             # will not be called by a non-triggered `.on_pos_change()`, |             # will not be called by a non-triggered `.on_pos_change()`, | ||||||
|  | @ -308,7 +322,7 @@ class LevelLine(pg.InfiniteLine): | ||||||
|         Remove this line from containing chart/view/scene. |         Remove this line from containing chart/view/scene. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         scene = self.scene() |         scene: QGraphicsScene = self.scene() | ||||||
|         if scene: |         if scene: | ||||||
|             for label in self._labels: |             for label in self._labels: | ||||||
|                 label.delete() |                 label.delete() | ||||||
|  | @ -339,8 +353,8 @@ class LevelLine(pg.InfiniteLine): | ||||||
|         self, |         self, | ||||||
| 
 | 
 | ||||||
|         p: QtGui.QPainter, |         p: QtGui.QPainter, | ||||||
|         opt: QtWidgets.QStyleOptionGraphicsItem, |         opt: QStyleOptionGraphicsItem, | ||||||
|         w: QtWidgets.QWidget |         w: QWidget | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         ''' |         ''' | ||||||
|  | @ -417,9 +431,9 @@ class LevelLine(pg.InfiniteLine): | ||||||
| 
 | 
 | ||||||
|     def add_marker( |     def add_marker( | ||||||
|         self, |         self, | ||||||
|         path: QtWidgets.QGraphicsPathItem, |         path: QGraphicsPathItem, | ||||||
| 
 | 
 | ||||||
|     ) -> QtWidgets.QGraphicsPathItem: |     ) -> QGraphicsPathItem: | ||||||
| 
 | 
 | ||||||
|         self._marker = path |         self._marker = path | ||||||
|         self._marker.setPen(self.currentPen) |         self._marker.setPen(self.currentPen) | ||||||
|  |  | ||||||
|  | @ -20,16 +20,14 @@ Super fast OHLC sampling graphics types. | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| 
 | 
 | ||||||
| import numpy as np | import numpy as np | ||||||
| from PyQt5 import ( | 
 | ||||||
|  | from piker.ui.qt import ( | ||||||
|     QtGui, |     QtGui, | ||||||
|     QtWidgets, |     QtWidgets, | ||||||
| ) |     QPainterPath, | ||||||
| from PyQt5.QtCore import ( |  | ||||||
|     QLineF, |     QLineF, | ||||||
|     QRectF, |     QRectF, | ||||||
| ) | ) | ||||||
| from PyQt5.QtGui import QPainterPath |  | ||||||
| 
 |  | ||||||
| from ._curve import FlowGraphic | from ._curve import FlowGraphic | ||||||
| from ..toolz import ( | from ..toolz import ( | ||||||
|     Profiler, |     Profiler, | ||||||
|  |  | ||||||
|  | @ -24,8 +24,6 @@ view transforms. | ||||||
| """ | """ | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| 
 | 
 | ||||||
| from ._axes import Axis |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| def invertQTransform(tr): | def invertQTransform(tr): | ||||||
|     """Return a QTransform that is the inverse of *tr*. |     """Return a QTransform that is the inverse of *tr*. | ||||||
|  | @ -53,6 +51,9 @@ def _do_overrides() -> None: | ||||||
|     pg.functions.invertQTransform = invertQTransform |     pg.functions.invertQTransform = invertQTransform | ||||||
|     pg.PlotItem = PlotItem |     pg.PlotItem = PlotItem | ||||||
| 
 | 
 | ||||||
|  |     from ._axes import Axis | ||||||
|  |     pg.Axis = Axis | ||||||
|  | 
 | ||||||
|     # enable "QPainterPathPrivate for faster arrayToQPath" from |     # enable "QPainterPathPrivate for faster arrayToQPath" from | ||||||
|     # https://github.com/pyqtgraph/pyqtgraph/pull/2324 |     # https://github.com/pyqtgraph/pyqtgraph/pull/2324 | ||||||
|     pg.setConfigOption('enableExperimental', True) |     pg.setConfigOption('enableExperimental', True) | ||||||
|  | @ -234,7 +235,7 @@ class PlotItem(pg.PlotItem): | ||||||
|                 # ``ViewBox`` geometry bug.. where a gap for the |                 # ``ViewBox`` geometry bug.. where a gap for the | ||||||
|                 # 'bottom' axis is somehow left in? |                 # 'bottom' axis is somehow left in? | ||||||
|                 # axis = pg.AxisItem(orientation=name, parent=self) |                 # axis = pg.AxisItem(orientation=name, parent=self) | ||||||
|                 axis = Axis( |                 axis = pg.Axis( | ||||||
|                     self, |                     self, | ||||||
|                     orientation=name, |                     orientation=name, | ||||||
|                     parent=self, |                     parent=self, | ||||||
|  |  | ||||||
|  | @ -344,7 +344,10 @@ class SettingsPane: | ||||||
|         dsize = tracker.live_pp.dsize |         dsize = tracker.live_pp.dsize | ||||||
| 
 | 
 | ||||||
|         # READ out settings and update the status UI / settings widgets |         # READ out settings and update the status UI / settings widgets | ||||||
|         suffix = {'currency': ' $', 'units': ' u'}[alloc.size_unit] |         unit_char: str = { | ||||||
|  |             'currency': '$', | ||||||
|  |             'units': 'u', | ||||||
|  |         }[alloc.size_unit] | ||||||
|         size_unit, limit = alloc.limit_info() |         size_unit, limit = alloc.limit_info() | ||||||
| 
 | 
 | ||||||
|         step_size, currency_per_slot = alloc.step_sizes() |         step_size, currency_per_slot = alloc.step_sizes() | ||||||
|  | @ -358,10 +361,11 @@ class SettingsPane: | ||||||
|             self.apply_setting('limit', limit) |             self.apply_setting('limit', limit) | ||||||
| 
 | 
 | ||||||
|         self.step_label.format( |         self.step_label.format( | ||||||
|             step_size=str(humanize(step_size)) + suffix |             unit_prefix=unit_char, | ||||||
|  |             step_size=str(humanize(step_size)) | ||||||
|         ) |         ) | ||||||
|         self.limit_label.format( |         self.limit_label.format( | ||||||
|             limit=str(humanize(limit)) + suffix |             limit=f'{unit_char}: {str(humanize(limit))}' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # update size unit in UI |         # update size unit in UI | ||||||
|  |  | ||||||
|  | @ -0,0 +1,426 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | Remote control tasks for sending annotations (and maybe more cmds) | ||||||
|  | to a chart from some other actor. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | from __future__ import annotations | ||||||
|  | from contextlib import ( | ||||||
|  |     asynccontextmanager as acm, | ||||||
|  |     AsyncExitStack, | ||||||
|  | ) | ||||||
|  | from functools import partial | ||||||
|  | from pprint import pformat | ||||||
|  | from typing import ( | ||||||
|  |     # Any, | ||||||
|  |     AsyncContextManager, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | import tractor | ||||||
|  | from tractor import trionics | ||||||
|  | from tractor import ( | ||||||
|  |     Portal, | ||||||
|  |     Context, | ||||||
|  |     MsgStream, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | from piker.log import get_logger | ||||||
|  | from piker.types import Struct | ||||||
|  | from piker.service import find_service | ||||||
|  | from piker.brokers import SymbolNotFound | ||||||
|  | from piker.ui.qt import ( | ||||||
|  |     QGraphicsItem, | ||||||
|  | ) | ||||||
|  | from ._display import DisplayState | ||||||
|  | from ._interaction import ChartView | ||||||
|  | from ._editors import SelectRect | ||||||
|  | from ._chart import ChartPlotWidget | ||||||
|  | from ._dataviz import Viz | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | log = get_logger(__name__) | ||||||
|  | 
 | ||||||
|  | # NOTE: this is UPDATED by the `._display.graphics_update_loop()` | ||||||
|  | # once all chart widgets / Viz per flume have been initialized | ||||||
|  | # allowing for remote annotation (control) of any chart-actor's mkt | ||||||
|  | # feed by fqme lookup Bo | ||||||
|  | _dss: dict[str, DisplayState] = {} | ||||||
|  | 
 | ||||||
|  | # stash each and every client connection so that they can all | ||||||
|  | # be cancelled on shutdown/error. | ||||||
|  | # TODO: make `tractor.Context` hashable via is `.cid: str`? | ||||||
|  | # _ctxs: set[Context] = set() | ||||||
|  | # TODO: use type statements from 3.12+ | ||||||
|  | IpcCtxTable = dict[ | ||||||
|  |     str,  # each `Context.cid` | ||||||
|  |     tuple[ | ||||||
|  |         Context,  # handle for ctx-cancellation | ||||||
|  |         set[int]  # set of annotation (instance) ids | ||||||
|  |     ] | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | _ctxs: IpcCtxTable = {} | ||||||
|  | 
 | ||||||
|  | # XXX: global map of all uniquely created annotation-graphics so | ||||||
|  | # that they can be mutated (eventually) by a client. | ||||||
|  | # NOTE: this map is only populated on the `chart` actor side (aka | ||||||
|  | # the "annotations server" which actually renders to a Qt canvas). | ||||||
|  | # type AnnotsTable = dict[int, QGraphicsItem] | ||||||
|  | AnnotsTable = dict[int, QGraphicsItem] | ||||||
|  | 
 | ||||||
|  | _annots: AnnotsTable  = {} | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def serve_rc_annots( | ||||||
|  |     ipc_key: str, | ||||||
|  |     annot_req_stream: MsgStream, | ||||||
|  |     dss: dict[str, DisplayState], | ||||||
|  |     ctxs: IpcCtxTable, | ||||||
|  |     annots: AnnotsTable, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     async for msg in annot_req_stream: | ||||||
|  |         match msg: | ||||||
|  |             case { | ||||||
|  |                 'cmd': 'SelectRect', | ||||||
|  |                 'fqme': fqme, | ||||||
|  |                 'timeframe': timeframe, | ||||||
|  |                 'meth': str(meth), | ||||||
|  |                 'kwargs': dict(kwargs), | ||||||
|  |             }: | ||||||
|  | 
 | ||||||
|  |                 ds: DisplayState = _dss[fqme] | ||||||
|  |                 chart: ChartPlotWidget = { | ||||||
|  |                     60: ds.hist_chart, | ||||||
|  |                     1: ds.chart, | ||||||
|  |                 }[timeframe] | ||||||
|  |                 cv: ChartView = chart.cv | ||||||
|  | 
 | ||||||
|  |                 # annot type lookup from cmd | ||||||
|  |                 rect = SelectRect( | ||||||
|  |                     viewbox=cv, | ||||||
|  | 
 | ||||||
|  |                     # TODO: make this more dynamic? | ||||||
|  |                     # -[ ] pull from conf.toml? | ||||||
|  |                     # -[ ] add `.set_color()` method to type? | ||||||
|  |                     # -[ ] make a green/red based on direction | ||||||
|  |                     #    instead of default static color? | ||||||
|  |                     color=kwargs.pop('color', None), | ||||||
|  |                 ) | ||||||
|  |                 # XXX NOTE: this is REQUIRED to set the rect | ||||||
|  |                 # resize callback! | ||||||
|  |                 rect.chart: ChartPlotWidget = chart | ||||||
|  | 
 | ||||||
|  |                 # delegate generically to the requested method | ||||||
|  |                 getattr(rect, meth)(**kwargs) | ||||||
|  |                 rect.show() | ||||||
|  |                 aid: int = id(rect) | ||||||
|  |                 annots[aid] = rect | ||||||
|  |                 aids: set[int] = ctxs[ipc_key][1] | ||||||
|  |                 aids.add(aid) | ||||||
|  |                 await annot_req_stream.send(aid) | ||||||
|  | 
 | ||||||
|  |             case { | ||||||
|  |                 'cmd': 'remove', | ||||||
|  |                 'aid': int(aid), | ||||||
|  |             }: | ||||||
|  |                 # NOTE: this is normally entered on | ||||||
|  |                 # a client's annotation de-alloc normally | ||||||
|  |                 # prior to detach or modify. | ||||||
|  |                 annot: QGraphicsItem = annots[aid] | ||||||
|  |                 annot.delete() | ||||||
|  | 
 | ||||||
|  |                 # respond to client indicating annot | ||||||
|  |                 # was indeed deleted. | ||||||
|  |                 await annot_req_stream.send(aid) | ||||||
|  | 
 | ||||||
|  |             case { | ||||||
|  |                 'cmd': 'redraw', | ||||||
|  |                 'fqme': fqme, | ||||||
|  |                 'timeframe': timeframe, | ||||||
|  | 
 | ||||||
|  |                 # TODO: maybe more fields? | ||||||
|  |                 # 'render': int(aid), | ||||||
|  |                 # 'viz_name': str(viz_name), | ||||||
|  |             }: | ||||||
|  |             # NOTE: old match from the 60s display loop task | ||||||
|  |             # | { | ||||||
|  |             #     'backfilling': (str(viz_name), timeframe), | ||||||
|  |             # }: | ||||||
|  |                 ds: DisplayState = _dss[fqme] | ||||||
|  |                 viz: Viz = { | ||||||
|  |                     60: ds.hist_viz, | ||||||
|  |                     1: ds.viz, | ||||||
|  |                 }[timeframe] | ||||||
|  |                 log.warning( | ||||||
|  |                     f'Forcing VIZ REDRAW:\n' | ||||||
|  |                     f'fqme: {fqme}\n' | ||||||
|  |                     f'timeframe: {timeframe}\n' | ||||||
|  |                 ) | ||||||
|  |                 viz.reset_graphics() | ||||||
|  | 
 | ||||||
|  |             case _: | ||||||
|  |                 log.error( | ||||||
|  |                     'Unknown remote annotation cmd:\n' | ||||||
|  |                     f'{pformat(msg)}' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def remote_annotate( | ||||||
|  |     ctx: Context, | ||||||
|  | ) -> None: | ||||||
|  | 
 | ||||||
|  |     global _dss, _ctxs | ||||||
|  |     assert _dss | ||||||
|  | 
 | ||||||
|  |     _ctxs[ctx.cid] = (ctx, set()) | ||||||
|  | 
 | ||||||
|  |     # send back full fqme symbology to caller | ||||||
|  |     await ctx.started(list(_dss)) | ||||||
|  | 
 | ||||||
|  |     # open annot request handler stream | ||||||
|  |     async with ctx.open_stream() as annot_req_stream: | ||||||
|  |         try: | ||||||
|  |             await serve_rc_annots( | ||||||
|  |                 ipc_key=ctx.cid, | ||||||
|  |                 annot_req_stream=annot_req_stream, | ||||||
|  |                 dss=_dss, | ||||||
|  |                 ctxs=_ctxs, | ||||||
|  |                 annots=_annots, | ||||||
|  |             ) | ||||||
|  |         finally: | ||||||
|  |             # ensure all annots for this connection are deleted | ||||||
|  |             # on any final teardown | ||||||
|  |             (_ctx, aids) = _ctxs[ctx.cid] | ||||||
|  |             assert _ctx is ctx | ||||||
|  |             for aid in aids: | ||||||
|  |                 annot: QGraphicsItem = _annots[aid] | ||||||
|  |                 annot.delete() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class AnnotCtl(Struct): | ||||||
|  |     ''' | ||||||
|  |     A control for remote "data annotations". | ||||||
|  | 
 | ||||||
|  |     You know those "squares they always show in machine vision | ||||||
|  |     UIs.." this API allows you to remotely control stuff like that | ||||||
|  |     in some other graphics actor. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     ctx2fqmes: dict[str, str] | ||||||
|  |     fqme2ipc: dict[str, MsgStream] | ||||||
|  |     _annot_stack: AsyncExitStack | ||||||
|  | 
 | ||||||
|  |     # runtime-populated mapping of all annotation | ||||||
|  |     # ids to their equivalent IPC msg-streams. | ||||||
|  |     _ipcs: dict[int, MsgStream] = {} | ||||||
|  | 
 | ||||||
|  |     def _get_ipc( | ||||||
|  |         self, | ||||||
|  |         fqme: str, | ||||||
|  |     ) -> MsgStream: | ||||||
|  |         ipc: MsgStream = self.fqme2ipc.get(fqme) | ||||||
|  |         if ipc is None: | ||||||
|  |             raise SymbolNotFound( | ||||||
|  |                 'No chart (actor) seems to have mkt feed loaded?\n' | ||||||
|  |                 f'{fqme}' | ||||||
|  |             ) | ||||||
|  |         return ipc | ||||||
|  | 
 | ||||||
|  |     async def add_rect( | ||||||
|  |         self, | ||||||
|  |         fqme: str, | ||||||
|  |         timeframe: float, | ||||||
|  |         start_pos: tuple[float, float], | ||||||
|  |         end_pos: tuple[float, float], | ||||||
|  | 
 | ||||||
|  |         # TODO: a `Literal['view', 'scene']` for this? | ||||||
|  |         domain: str = 'view',  # or 'scene' | ||||||
|  |         color: str = 'dad_blue', | ||||||
|  | 
 | ||||||
|  |         from_acm: bool = False, | ||||||
|  | 
 | ||||||
|  |     ) -> int: | ||||||
|  |         ''' | ||||||
|  |         Add a `SelectRect` annotation to the target view, return | ||||||
|  |         the instances `id(obj)` from the remote UI actor. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         ipc: MsgStream = self._get_ipc(fqme) | ||||||
|  |         await ipc.send({ | ||||||
|  |             'fqme': fqme, | ||||||
|  |             'cmd': 'SelectRect', | ||||||
|  |             'timeframe': timeframe, | ||||||
|  |             # 'meth': str(meth), | ||||||
|  |             'meth': 'set_view_pos' if domain == 'view' else 'set_scene_pos', | ||||||
|  |             'kwargs': { | ||||||
|  |                 'start_pos': tuple(start_pos), | ||||||
|  |                 'end_pos': tuple(end_pos), | ||||||
|  |                 'color': color, | ||||||
|  |                 'update_label': False, | ||||||
|  |             }, | ||||||
|  |         }) | ||||||
|  |         aid: int = await ipc.receive() | ||||||
|  |         self._ipcs[aid] = ipc | ||||||
|  |         if not from_acm: | ||||||
|  |             self._annot_stack.push_async_callback( | ||||||
|  |                 partial( | ||||||
|  |                     self.remove, | ||||||
|  |                     aid, | ||||||
|  |                 ) | ||||||
|  |             ) | ||||||
|  |         return aid | ||||||
|  | 
 | ||||||
|  |     async def remove( | ||||||
|  |         self, | ||||||
|  |         aid: int, | ||||||
|  | 
 | ||||||
|  |     ) -> bool: | ||||||
|  |         ''' | ||||||
|  |         Remove an existing annotation by instance id. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         ipc: MsgStream = self._ipcs[aid] | ||||||
|  |         await ipc.send({ | ||||||
|  |             'cmd': 'remove', | ||||||
|  |             'aid': aid, | ||||||
|  |         }) | ||||||
|  |         removed: bool = await ipc.receive() | ||||||
|  |         return removed | ||||||
|  | 
 | ||||||
|  |     @acm | ||||||
|  |     async def open_rect( | ||||||
|  |         self, | ||||||
|  |         **kwargs, | ||||||
|  |     ) -> int: | ||||||
|  |         try: | ||||||
|  |             aid: int = await self.add_rect( | ||||||
|  |                 from_acm=True, | ||||||
|  |                 **kwargs, | ||||||
|  |             ) | ||||||
|  |             yield aid | ||||||
|  |         finally: | ||||||
|  |             await self.remove(aid) | ||||||
|  | 
 | ||||||
|  |     async def redraw( | ||||||
|  |         self, | ||||||
|  |         fqme: str, | ||||||
|  |         timeframe: float, | ||||||
|  |     ) -> None: | ||||||
|  |         await self._get_ipc(fqme).send({ | ||||||
|  |             'cmd': 'redraw', | ||||||
|  |             'fqme': fqme, | ||||||
|  |             # 'render': int(aid), | ||||||
|  |             # 'viz_name': str(viz_name), | ||||||
|  |             'timeframe': timeframe, | ||||||
|  |         }) | ||||||
|  | 
 | ||||||
|  |     # TODO: do we even need this? | ||||||
|  |     # async def modify( | ||||||
|  |     #     self, | ||||||
|  |     #     aid: int,  # annotation id | ||||||
|  |     #     meth: str,  # far end graphics object method to invoke | ||||||
|  |     #     params: dict[str, Any],  # far end `meth(**kwargs)` | ||||||
|  |     # ) -> bool: | ||||||
|  |     #     ''' | ||||||
|  |     #     Modify an existing (remote) annotation's graphics | ||||||
|  |     #     paramters, thus changing it's appearance / state in real | ||||||
|  |     #     time. | ||||||
|  | 
 | ||||||
|  |     #     ''' | ||||||
|  |     #     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def open_annot_ctl( | ||||||
|  |     uid: tuple[str, str] | None = None, | ||||||
|  | 
 | ||||||
|  | ) -> AnnotCtl: | ||||||
|  |     # TODO: load connetion to a specific chart actor | ||||||
|  |     # -[ ] pull from either service scan or config | ||||||
|  |     # -[ ] return some kinda client/proxy thinger? | ||||||
|  |     #    -[ ] maybe we should finally just provide this as | ||||||
|  |     #       a `tractor.hilevel.CallableProxy` or wtv? | ||||||
|  |     # -[ ] use this from the storage.cli stuff to mark up gaps! | ||||||
|  | 
 | ||||||
|  |     maybe_portals: list[Portal] | None | ||||||
|  |     fqmes: list[str] | ||||||
|  |     async with find_service( | ||||||
|  |         service_name='chart', | ||||||
|  |         first_only=False, | ||||||
|  |     ) as maybe_portals: | ||||||
|  | 
 | ||||||
|  |         ctx_mngrs: list[AsyncContextManager] = [] | ||||||
|  | 
 | ||||||
|  |         # TODO: print the current discoverable actor UID set | ||||||
|  |         # here as well? | ||||||
|  |         if not maybe_portals: | ||||||
|  |             raise RuntimeError('No chart UI actors found in service domain?') | ||||||
|  | 
 | ||||||
|  |         for portal in maybe_portals: | ||||||
|  |             ctx_mngrs.append( | ||||||
|  |                 portal.open_context(remote_annotate) | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         ctx2fqmes: dict[str, set[str]] = {} | ||||||
|  |         fqme2ipc: dict[str, MsgStream] = {} | ||||||
|  |         stream_ctxs: list[AsyncContextManager] = [] | ||||||
|  | 
 | ||||||
|  |         async with ( | ||||||
|  |             trionics.gather_contexts(ctx_mngrs) as ctxs, | ||||||
|  |         ): | ||||||
|  |             for (ctx, fqmes) in ctxs: | ||||||
|  |                 stream_ctxs.append(ctx.open_stream()) | ||||||
|  | 
 | ||||||
|  |                 # fill lookup table of mkt addrs to IPC ctxs | ||||||
|  |                 for fqme in fqmes: | ||||||
|  |                     if other := fqme2ipc.get(fqme): | ||||||
|  |                         raise ValueError( | ||||||
|  |                             f'More then one chart displays {fqme}!?\n' | ||||||
|  |                             'Other UI actor info:\n' | ||||||
|  |                             f'channel: {other._ctx.chan}]\n' | ||||||
|  |                             f'actor uid: {other._ctx.chan.uid}]\n' | ||||||
|  |                             f'ctx id: {other._ctx.cid}]\n' | ||||||
|  |                         ) | ||||||
|  | 
 | ||||||
|  |                     ctx2fqmes.setdefault( | ||||||
|  |                         ctx.cid, | ||||||
|  |                         set(), | ||||||
|  |                     ).add(fqme) | ||||||
|  | 
 | ||||||
|  |             async with trionics.gather_contexts(stream_ctxs) as streams: | ||||||
|  |                 for stream in streams: | ||||||
|  |                     fqmes: set[str] = ctx2fqmes[stream._ctx.cid] | ||||||
|  |                     for fqme in fqmes: | ||||||
|  |                         fqme2ipc[fqme] = stream | ||||||
|  | 
 | ||||||
|  |                 # NOTE: on graceful teardown we always attempt to | ||||||
|  |                 # remove all annots that were created by the | ||||||
|  |                 # entering client. | ||||||
|  |                 # TODO: should we maybe instead/also do this on the | ||||||
|  |                 # server-actor side so that when a client | ||||||
|  |                 # disconnects we always delete all annotations by | ||||||
|  |                 # default instaead of expecting the client to? | ||||||
|  |                 async with AsyncExitStack() as annots_stack: | ||||||
|  |                     client = AnnotCtl( | ||||||
|  |                         ctx2fqmes=ctx2fqmes, | ||||||
|  |                         fqme2ipc=fqme2ipc, | ||||||
|  |                         _annot_stack=annots_stack, | ||||||
|  |                     ) | ||||||
|  |                     yield client | ||||||
|  | @ -30,8 +30,8 @@ from typing import ( | ||||||
| import msgspec | import msgspec | ||||||
| import numpy as np | import numpy as np | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| from PyQt5.QtGui import QPainterPath |  | ||||||
| 
 | 
 | ||||||
|  | from piker.ui.qt import QPainterPath | ||||||
| from ..data._formatters import ( | from ..data._formatters import ( | ||||||
|     IncrementalFormatter, |     IncrementalFormatter, | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | @ -15,7 +15,7 @@ | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| qompleterz: embeddable search and complete using trio, Qt and fuzzywuzzy. | qompleterz: embeddable search and complete using trio, Qt and rapidfuzz. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| 
 | 
 | ||||||
|  | @ -43,32 +43,29 @@ from typing import ( | ||||||
|     Iterator, |     Iterator, | ||||||
| ) | ) | ||||||
| import time | import time | ||||||
| # from pprint import pformat | from pprint import pformat | ||||||
| 
 | 
 | ||||||
| from fuzzywuzzy import process as fuzzy | from rapidfuzz import process as fuzzy | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| from PyQt5 import QtCore | 
 | ||||||
| from PyQt5 import QtWidgets | from piker.ui.qt import ( | ||||||
| from PyQt5.QtCore import ( |     size_policy, | ||||||
|  |     align_flag, | ||||||
|     Qt, |     Qt, | ||||||
|  |     QtCore, | ||||||
|  |     QtWidgets, | ||||||
|     QModelIndex, |     QModelIndex, | ||||||
|     QItemSelectionModel, |     QItemSelectionModel, | ||||||
| ) |  | ||||||
| from PyQt5.QtGui import ( |  | ||||||
|     # QLayout, |     # QLayout, | ||||||
|     QStandardItem, |     QStandardItem, | ||||||
|     QStandardItemModel, |     QStandardItemModel, | ||||||
| ) |  | ||||||
| from PyQt5.QtWidgets import ( |  | ||||||
|     QWidget, |     QWidget, | ||||||
|     QTreeView, |     QTreeView, | ||||||
|     # QListWidgetItem, |     # QListWidgetItem, | ||||||
|     # QAbstractScrollArea, |     # QAbstractScrollArea, | ||||||
|     # QStyledItemDelegate, |     # QStyledItemDelegate, | ||||||
| ) | ) | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
| from ._style import ( | from ._style import ( | ||||||
|     _font, |     _font, | ||||||
|  | @ -129,8 +126,8 @@ class CompleterView(QTreeView): | ||||||
| 
 | 
 | ||||||
|         # ux settings |         # ux settings | ||||||
|         self.setSizePolicy( |         self.setSizePolicy( | ||||||
|             QtWidgets.QSizePolicy.Expanding, |             size_policy.Expanding, | ||||||
|             QtWidgets.QSizePolicy.Expanding, |             size_policy.Expanding, | ||||||
|         ) |         ) | ||||||
|         self.setItemsExpandable(True) |         self.setItemsExpandable(True) | ||||||
|         self.setExpandsOnDoubleClick(False) |         self.setExpandsOnDoubleClick(False) | ||||||
|  | @ -567,8 +564,8 @@ class SearchWidget(QtWidgets.QWidget): | ||||||
| 
 | 
 | ||||||
|         # size it as we specify |         # size it as we specify | ||||||
|         self.setSizePolicy( |         self.setSizePolicy( | ||||||
|             QtWidgets.QSizePolicy.Fixed, |             size_policy.Fixed, | ||||||
|             QtWidgets.QSizePolicy.Fixed, |             size_policy.Fixed, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         self.godwidget = godwidget |         self.godwidget = godwidget | ||||||
|  | @ -592,14 +589,16 @@ class SearchWidget(QtWidgets.QWidget): | ||||||
|             }} |             }} | ||||||
|             """ |             """ | ||||||
|         ) |         ) | ||||||
|         label.setTextFormat(3)  # markdown |         label.setTextFormat( | ||||||
|  |             Qt.TextFormat.MarkdownText | ||||||
|  |         ) | ||||||
|         label.setFont(_font.font) |         label.setFont(_font.font) | ||||||
|         label.setMargin(4) |         label.setMargin(4) | ||||||
|         label.setText("search:") |         label.setText("search:") | ||||||
|         label.show() |         label.show() | ||||||
|         label.setAlignment( |         label.setAlignment( | ||||||
|             QtCore.Qt.AlignVCenter |             align_flag.AlignVCenter | ||||||
|             | QtCore.Qt.AlignLeft |             | align_flag.AlignLeft | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         self.bar_hbox.addWidget(label) |         self.bar_hbox.addWidget(label) | ||||||
|  | @ -617,9 +616,17 @@ class SearchWidget(QtWidgets.QWidget): | ||||||
| 
 | 
 | ||||||
|         self.vbox.addLayout(self.bar_hbox) |         self.vbox.addLayout(self.bar_hbox) | ||||||
| 
 | 
 | ||||||
|         self.vbox.setAlignment(self.bar, Qt.AlignTop | Qt.AlignRight) |         self.vbox.setAlignment( | ||||||
|  |             self.bar, | ||||||
|  |             align_flag.AlignTop | ||||||
|  |             | align_flag.AlignRight, | ||||||
|  |         ) | ||||||
|         self.vbox.addWidget(self.bar.view) |         self.vbox.addWidget(self.bar.view) | ||||||
|         self.vbox.setAlignment(self.view, Qt.AlignTop | Qt.AlignLeft) |         self.vbox.setAlignment( | ||||||
|  |             self.view, | ||||||
|  |             align_flag.AlignTop | ||||||
|  |             | align_flag.AlignLeft, | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     def focus(self) -> None: |     def focus(self) -> None: | ||||||
|         self.show() |         self.show() | ||||||
|  | @ -1139,21 +1146,25 @@ async def search_simple_dict( | ||||||
| 
 | 
 | ||||||
| ) -> dict[str, Any]: | ) -> dict[str, Any]: | ||||||
| 
 | 
 | ||||||
|     tokens = [] |     tokens: list[str] = [] | ||||||
|     for key in source: |     for key in source: | ||||||
|         if not isinstance(key, str): |         match key: | ||||||
|             tokens.extend(key) |             case str(): | ||||||
|         else: |  | ||||||
|                 tokens.append(key) |                 tokens.append(key) | ||||||
|  |             case []: | ||||||
|  |                 tokens.extend(key) | ||||||
| 
 | 
 | ||||||
|     # search routine can be specified as a function such |     # search routine can be specified as a function such | ||||||
|     # as in the case of the current app's local symbol cache |     # as in the case of the current app's local symbol cache | ||||||
|     matches = fuzzy.extractBests( |     matches = fuzzy.extract( | ||||||
|         text, |         text, | ||||||
|         tokens, |         tokens, | ||||||
|         score_cutoff=90, |         score_cutoff=90, | ||||||
|     ) |     ) | ||||||
| 
 |     log.info( | ||||||
|  |         'cache search results:\n' | ||||||
|  |         f'{pformat(matches)}' | ||||||
|  |     ) | ||||||
|     return [item[0] for item in matches] |     return [item[0] for item in matches] | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -22,10 +22,14 @@ from typing import Dict | ||||||
| import math | import math | ||||||
| 
 | 
 | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| from PyQt5 import QtCore, QtGui |  | ||||||
| from PyQt5.QtCore import Qt, QCoreApplication |  | ||||||
| from qdarkstyle import DarkPalette | from qdarkstyle import DarkPalette | ||||||
| 
 | 
 | ||||||
|  | from .qt import ( | ||||||
|  |     QtCore, | ||||||
|  |     QtGui, | ||||||
|  |     Qt, | ||||||
|  |     QCoreApplication, | ||||||
|  | ) | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
| 
 | 
 | ||||||
| from .. import config | from .. import config | ||||||
|  |  | ||||||
|  | @ -27,16 +27,14 @@ from typing import ( | ||||||
| ) | ) | ||||||
| import uuid | import uuid | ||||||
| 
 | 
 | ||||||
| from PyQt5 import QtCore | from piker.ui.qt import ( | ||||||
| from PyQt5.QtWidgets import ( |     Qt, | ||||||
|  |     QtCore, | ||||||
|     QWidget, |     QWidget, | ||||||
|     QMainWindow, |     QMainWindow, | ||||||
|     QApplication, |     QApplication, | ||||||
|     QLabel, |     QLabel, | ||||||
|     QStatusBar, |     QStatusBar, | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from PyQt5.QtGui import ( |  | ||||||
|     QScreen, |     QScreen, | ||||||
|     QCloseEvent, |     QCloseEvent, | ||||||
| ) | ) | ||||||
|  | @ -197,7 +195,9 @@ class MainWindow(QMainWindow): | ||||||
|                 """ |                 """ | ||||||
|                 # font-size : {font_size}px; |                 # font-size : {font_size}px; | ||||||
|             ) |             ) | ||||||
|             label.setTextFormat(3)  # markdown |             label.setTextFormat( | ||||||
|  |                 Qt.TextFormat.MarkdownText | ||||||
|  |             ) | ||||||
|             label.setFont(_font_small.font) |             label.setFont(_font_small.font) | ||||||
|             label.setMargin(2) |             label.setMargin(2) | ||||||
|             label.setAlignment( |             label.setAlignment( | ||||||
|  |  | ||||||
|  | @ -96,9 +96,17 @@ def monitor(config, rate, name, dhost, test, tl): | ||||||
| @click.option('--rate', '-r', default=1, help='Logging level') | @click.option('--rate', '-r', default=1, help='Logging level') | ||||||
| @click.argument('symbol', required=True) | @click.argument('symbol', required=True) | ||||||
| @click.pass_obj | @click.pass_obj | ||||||
| def optschain(config, symbol, date, rate, test): | def optschain( | ||||||
|     """Start an option chain UI |     config, | ||||||
|     """ |     symbol, | ||||||
|  |     date, | ||||||
|  |     rate, | ||||||
|  |     test, | ||||||
|  | ): | ||||||
|  |     ''' | ||||||
|  |     Start an option chain UI | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|     # global opts |     # global opts | ||||||
|     loglevel = config['loglevel'] |     loglevel = config['loglevel'] | ||||||
|     brokername = config['broker'] |     brokername = config['broker'] | ||||||
|  | @ -132,21 +140,23 @@ def optschain(config, symbol, date, rate, test): | ||||||
|     default=None, |     default=None, | ||||||
|     help='Enable pyqtgraph profiling' |     help='Enable pyqtgraph profiling' | ||||||
| ) | ) | ||||||
| @click.option( | # @click.option( | ||||||
|     '--pdb', | #     '--pdb', | ||||||
|     is_flag=True, | #     is_flag=True, | ||||||
|     help='Enable tractor debug mode' | #     help='Enable tractor debug mode' | ||||||
| ) | # ) | ||||||
| @click.argument('symbols', nargs=-1, required=True) | @click.argument('symbols', nargs=-1, required=True) | ||||||
|  | # @click.pass_context | ||||||
| @click.pass_obj | @click.pass_obj | ||||||
| def chart( | def chart( | ||||||
|     config, |     config, | ||||||
|  |     # ctx: click.Context, | ||||||
|     symbols: list[str], |     symbols: list[str], | ||||||
|     profile, |     profile, | ||||||
|     pdb: bool, |  | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     Start a real-time chartng UI |     Run chart UI app, spawning service daemons dynamically as | ||||||
|  |     needed if not discovered via [network] config. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     # eg. ``--profile 3`` reports profiling for anything slower then 3 ms. |     # eg. ``--profile 3`` reports profiling for anything slower then 3 ms. | ||||||
|  | @ -173,6 +183,42 @@ def chart( | ||||||
|     tractorloglevel = config['tractorloglevel'] |     tractorloglevel = config['tractorloglevel'] | ||||||
|     pikerloglevel = config['loglevel'] |     pikerloglevel = config['loglevel'] | ||||||
| 
 | 
 | ||||||
|  |     maddrs: list[tuple[str, int]] = config.get( | ||||||
|  |         'maddrs', | ||||||
|  |         [], | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # if maddrs: | ||||||
|  |     #     from tractor._multiaddr import parse_maddr | ||||||
|  |     #     for addr in maddrs: | ||||||
|  |     #         breakpoint() | ||||||
|  |     #         layers: dict = parse_maddr(addr) | ||||||
|  | 
 | ||||||
|  |     regaddrs: list[tuple[str, int]] = config.get( | ||||||
|  |         'registry_addrs', | ||||||
|  |         [], | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     from ..config import load | ||||||
|  |     conf, _ = load( | ||||||
|  |         conf_name='conf', | ||||||
|  |     ) | ||||||
|  |     network: dict = conf.get('network') | ||||||
|  |     if network: | ||||||
|  |         from ..cli import load_trans_eps | ||||||
|  |         eps: dict = load_trans_eps( | ||||||
|  |             network, | ||||||
|  |             maddrs, | ||||||
|  |         ) | ||||||
|  |         for layers in eps['pikerd']: | ||||||
|  |             regaddrs.append(( | ||||||
|  |                 layers['ipv4']['addr'], | ||||||
|  |                 layers['tcp']['port'], | ||||||
|  |             )) | ||||||
|  | 
 | ||||||
|  |     from tractor.devx import maybe_open_crash_handler | ||||||
|  |     pdb: bool = config['pdb'] | ||||||
|  |     with maybe_open_crash_handler(pdb=pdb): | ||||||
|         _main( |         _main( | ||||||
|             syms=symbols, |             syms=symbols, | ||||||
|             brokermods=brokermods, |             brokermods=brokermods, | ||||||
|  | @ -181,6 +227,11 @@ def chart( | ||||||
|                 'debug_mode': pdb, |                 'debug_mode': pdb, | ||||||
|                 'loglevel': tractorloglevel, |                 'loglevel': tractorloglevel, | ||||||
|                 'name': 'chart', |                 'name': 'chart', | ||||||
|             'registry_addr': config.get('registry_addr'), |                 'registry_addrs': list(set(regaddrs)), | ||||||
|  |                 'enable_modules': [ | ||||||
|  | 
 | ||||||
|  |                     # remote data-view annotations Bo | ||||||
|  |                     'piker.ui._remote_ctl', | ||||||
|  |                 ], | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  | @ -34,7 +34,6 @@ import uuid | ||||||
| from bidict import bidict | from bidict import bidict | ||||||
| import tractor | import tractor | ||||||
| import trio | import trio | ||||||
| from PyQt5.QtCore import Qt |  | ||||||
| 
 | 
 | ||||||
| from piker import config | from piker import config | ||||||
| from piker.accounting import ( | from piker.accounting import ( | ||||||
|  | @ -59,6 +58,7 @@ from piker.data import ( | ||||||
| ) | ) | ||||||
| from piker.types import Struct | from piker.types import Struct | ||||||
| from piker.log import get_logger | from piker.log import get_logger | ||||||
|  | from piker.ui.qt import Qt | ||||||
| from ._editors import LineEditor, ArrowEditor | from ._editors import LineEditor, ArrowEditor | ||||||
| from ._lines import order_line, LevelLine | from ._lines import order_line, LevelLine | ||||||
| from ._position import ( | from ._position import ( | ||||||
|  | @ -358,7 +358,7 @@ class OrderMode: | ||||||
|         send_msg: bool = True, |         send_msg: bool = True, | ||||||
|         order: Order | None = None, |         order: Order | None = None, | ||||||
| 
 | 
 | ||||||
|     ) -> Dialog: |     ) -> Dialog|None: | ||||||
|         ''' |         ''' | ||||||
|         Send execution order to EMS return a level line to |         Send execution order to EMS return a level line to | ||||||
|         represent the order on a chart. |         represent the order on a chart. | ||||||
|  | @ -378,6 +378,16 @@ class OrderMode: | ||||||
|                 'oid': oid, |                 'oid': oid, | ||||||
|             }) |             }) | ||||||
| 
 | 
 | ||||||
|  |         if order.price <= 0: | ||||||
|  |             log.error( | ||||||
|  |                 '*!? Invalid `Order.price <= 0` ?!*\n' | ||||||
|  |                 # TODO: make this present multi-line in object form | ||||||
|  |                 # like `ib_insync.contracts.Contract.__repr__()` | ||||||
|  |                 f'{order}\n' | ||||||
|  |             ) | ||||||
|  |             self.cancel_orders([order.oid]) | ||||||
|  |             return None | ||||||
|  | 
 | ||||||
|         lines = self.lines_from_order( |         lines = self.lines_from_order( | ||||||
|             order, |             order, | ||||||
|             show_markers=True, |             show_markers=True, | ||||||
|  | @ -484,7 +494,7 @@ class OrderMode: | ||||||
|         uuid: str, |         uuid: str, | ||||||
|         order: Order | None = None, |         order: Order | None = None, | ||||||
| 
 | 
 | ||||||
|     ) -> Dialog: |     ) -> Dialog | None: | ||||||
|         ''' |         ''' | ||||||
|         Order submitted status event handler. |         Order submitted status event handler. | ||||||
| 
 | 
 | ||||||
|  | @ -505,6 +515,11 @@ class OrderMode: | ||||||
|             # if an order msg is provided update the line |             # if an order msg is provided update the line | ||||||
|             # **from** that msg. |             # **from** that msg. | ||||||
|             if order: |             if order: | ||||||
|  |                 if order.price <= 0: | ||||||
|  |                     log.error(f'Order has 0 price, cancelling..\n{order}') | ||||||
|  |                     self.cancel_orders([order.oid]) | ||||||
|  |                     return None | ||||||
|  | 
 | ||||||
|                 line.set_level(order.price) |                 line.set_level(order.price) | ||||||
|                 self.on_level_change_update_next_order_info( |                 self.on_level_change_update_next_order_info( | ||||||
|                     level=order.price, |                     level=order.price, | ||||||
|  | @ -613,7 +628,7 @@ class OrderMode: | ||||||
| 
 | 
 | ||||||
|         oids: set[str] = set() |         oids: set[str] = set() | ||||||
|         for line in lines: |         for line in lines: | ||||||
|             dialog: Dialog = getattr(line, 'dialog', None) |             if dialog := getattr(line, 'dialog', None): | ||||||
|                 oid: str = dialog.uuid |                 oid: str = dialog.uuid | ||||||
|                 if ( |                 if ( | ||||||
|                     dialog |                     dialog | ||||||
|  | @ -663,7 +678,7 @@ class OrderMode: | ||||||
|         self, |         self, | ||||||
|         msg: Status, |         msg: Status, | ||||||
| 
 | 
 | ||||||
|     ) -> Dialog: |     ) -> Dialog | None: | ||||||
|         # NOTE: the `.order` attr **must** be set with the |         # NOTE: the `.order` attr **must** be set with the | ||||||
|         # equivalent order msg in order to be loaded. |         # equivalent order msg in order to be loaded. | ||||||
|         order = msg.req |         order = msg.req | ||||||
|  | @ -694,12 +709,15 @@ class OrderMode: | ||||||
|             fqsn=fqme, |             fqsn=fqme, | ||||||
|             info={}, |             info={}, | ||||||
|         ) |         ) | ||||||
|         dialog = self.submit_order( |         maybe_dialog: Dialog | None = self.submit_order( | ||||||
|             send_msg=False, |             send_msg=False, | ||||||
|             order=order, |             order=order, | ||||||
|         ) |         ) | ||||||
|         assert self.dialogs[oid] == dialog |         if maybe_dialog is None: | ||||||
|         return dialog |             return None | ||||||
|  | 
 | ||||||
|  |         assert self.dialogs[oid] == maybe_dialog | ||||||
|  |         return maybe_dialog | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @asynccontextmanager | @asynccontextmanager | ||||||
|  | @ -930,13 +948,8 @@ async def open_order_mode( | ||||||
|                     msg, |                     msg, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|         # start async input handling for chart's view |  | ||||||
|         async with ( |         async with ( | ||||||
| 
 | 
 | ||||||
|             # ``ChartView`` input async handler startup |  | ||||||
|             chart.view.open_async_input_handler(), |  | ||||||
|             hist_chart.view.open_async_input_handler(), |  | ||||||
| 
 |  | ||||||
|             # pp pane kb inputs |             # pp pane kb inputs | ||||||
|             open_form_input_handling( |             open_form_input_handling( | ||||||
|                 form, |                 form, | ||||||
|  | @ -1005,8 +1018,13 @@ async def process_trade_msg( | ||||||
| 
 | 
 | ||||||
| ) -> tuple[Dialog, Status]: | ) -> tuple[Dialog, Status]: | ||||||
| 
 | 
 | ||||||
|     fmsg = pformat(msg) |     # TODO: obvi once we're parsing to native struct instances we can | ||||||
|     log.debug(f'Received order msg:\n{fmsg}') |     # drop the `pformat()` call Bo | ||||||
|  |     fmtmsg: Struct | dict = msg | ||||||
|  |     if not isinstance(msg, Struct): | ||||||
|  |         fmtmsg: str = pformat(msg) | ||||||
|  | 
 | ||||||
|  |     log.debug(f'Received order msg:\n{fmtmsg}') | ||||||
|     name = msg['name'] |     name = msg['name'] | ||||||
| 
 | 
 | ||||||
|     if name in ( |     if name in ( | ||||||
|  | @ -1022,7 +1040,7 @@ async def process_trade_msg( | ||||||
|         ): |         ): | ||||||
|             log.info( |             log.info( | ||||||
|                 f'Loading position for `{fqme}`:\n' |                 f'Loading position for `{fqme}`:\n' | ||||||
|                 f'{fmsg}' |                 f'{fmtmsg}' | ||||||
|             ) |             ) | ||||||
|             tracker = mode.trackers[msg['account']] |             tracker = mode.trackers[msg['account']] | ||||||
|             tracker.live_pp.update_from_msg(msg) |             tracker.live_pp.update_from_msg(msg) | ||||||
|  | @ -1064,7 +1082,7 @@ async def process_trade_msg( | ||||||
| 
 | 
 | ||||||
|             elif order.action != 'cancel': |             elif order.action != 'cancel': | ||||||
|                 log.warning( |                 log.warning( | ||||||
|                     f'received msg for untracked dialog:\n{fmsg}' |                     f'received msg for untracked dialog:\n{fmtmsg}' | ||||||
|                 ) |                 ) | ||||||
|                 assert msg.resp in ('open', 'dark_open'), f'Unknown msg: {msg}' |                 assert msg.resp in ('open', 'dark_open'), f'Unknown msg: {msg}' | ||||||
| 
 | 
 | ||||||
|  | @ -1084,7 +1102,24 @@ async def process_trade_msg( | ||||||
|                     ) |                     ) | ||||||
|                 ): |                 ): | ||||||
|                     msg.req = order |                     msg.req = order | ||||||
|                     dialog = mode.load_unknown_dialog_from_msg(msg) |                     dialog: ( | ||||||
|  |                         Dialog | ||||||
|  |                         # NOTE: on an invalid order submission (eg. | ||||||
|  |                         # price <=0) the downstream APIs may return | ||||||
|  |                         # a null. | ||||||
|  |                         | None | ||||||
|  |                     ) = mode.load_unknown_dialog_from_msg(msg) | ||||||
|  | 
 | ||||||
|  |                     # cancel any invalid pre-existing order! | ||||||
|  |                     if dialog is None: | ||||||
|  |                         log.warning( | ||||||
|  |                             'Order was ignored/invalid?\n' | ||||||
|  |                             f'{order}' | ||||||
|  |                         ) | ||||||
|  | 
 | ||||||
|  |                     # if valid, display the order line the same as if | ||||||
|  |                     # it was submitted during this UI session. | ||||||
|  |                     else: | ||||||
|                         mode.on_submit(oid) |                         mode.on_submit(oid) | ||||||
| 
 | 
 | ||||||
|         case Status(resp='error'): |         case Status(resp='error'): | ||||||
|  | @ -1114,7 +1149,7 @@ async def process_trade_msg( | ||||||
|             req={'exec_mode': 'dark'}, |             req={'exec_mode': 'dark'}, | ||||||
|         ): |         ): | ||||||
|             # TODO: UX for a "pending" clear/live order |             # TODO: UX for a "pending" clear/live order | ||||||
|             log.info(f'Dark order triggered for {fmsg}') |             log.info(f'Dark order triggered for {fmtmsg}') | ||||||
| 
 | 
 | ||||||
|         case Status( |         case Status( | ||||||
|             resp='triggered', |             resp='triggered', | ||||||
|  | @ -1163,7 +1198,10 @@ async def process_trade_msg( | ||||||
| 
 | 
 | ||||||
|             # XXX TODO: have seen order be a dict here!? |             # XXX TODO: have seen order be a dict here!? | ||||||
|             # that should never happen tho? |             # that should never happen tho? | ||||||
|             action: str = order.action |             action: str = ( | ||||||
|  |                 getattr(order, 'action', None) | ||||||
|  |                 or order['action'] | ||||||
|  |             ) | ||||||
|             details: dict = msg.brokerd_msg |             details: dict = msg.brokerd_msg | ||||||
| 
 | 
 | ||||||
|             # TODO: state tracking: |             # TODO: state tracking: | ||||||
|  |  | ||||||
|  | @ -0,0 +1,104 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | Qt UI framework version shimming. | ||||||
|  | 
 | ||||||
|  | Allow importing sub-pkgs from this module instead of worrying about | ||||||
|  | major version specifics, any enum moves or component renames. | ||||||
|  | 
 | ||||||
|  | Code in `piker.ui.*` should always explicitlyimport directly from | ||||||
|  | this module like `from piker.ui.qt import ( ..` | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | from enum import EnumType | ||||||
|  | 
 | ||||||
|  | from PyQt6 import ( | ||||||
|  |     QtCore, | ||||||
|  |     QtGui, | ||||||
|  |     QtWidgets, | ||||||
|  | ) | ||||||
|  | from PyQt6.QtCore import ( | ||||||
|  |     Qt, | ||||||
|  |     QCoreApplication, | ||||||
|  |     QLineF, | ||||||
|  |     QRectF, | ||||||
|  |     # NOTE: for enums use the `.Type`  subattr-space | ||||||
|  |     QEvent, | ||||||
|  |     QPointF, | ||||||
|  |     QSize, | ||||||
|  |     QModelIndex, | ||||||
|  |     QItemSelectionModel, | ||||||
|  |     pyqtBoundSignal, | ||||||
|  |     pyqtRemoveInputHook, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | align_flag: EnumType = Qt.AlignmentFlag | ||||||
|  | txt_flag: EnumType = Qt.TextFlag | ||||||
|  | keys: EnumType = QEvent.Type | ||||||
|  | scrollbar_policy: EnumType = Qt.ScrollBarPolicy | ||||||
|  | 
 | ||||||
|  | # ^-NOTE-^: handy snippet to discover enums: | ||||||
|  | # import enum | ||||||
|  | # [attr for attr_name in dir(QFrame) | ||||||
|  | #  if (attr := getattr(QFrame, attr_name)) | ||||||
|  | #  and isinstance(attr, enum.EnumType)] | ||||||
|  | 
 | ||||||
|  | from PyQt6.QtGui import ( | ||||||
|  |     QPainter, | ||||||
|  |     QPainterPath, | ||||||
|  |     QIcon, | ||||||
|  |     QPixmap, | ||||||
|  |     QColor, | ||||||
|  |     QTransform, | ||||||
|  |     QStandardItem, | ||||||
|  |     QStandardItemModel, | ||||||
|  |     QWheelEvent, | ||||||
|  |     QScreen, | ||||||
|  |     QCloseEvent, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | from PyQt6.QtWidgets import ( | ||||||
|  |     QMainWindow, | ||||||
|  |     QApplication, | ||||||
|  |     QLabel, | ||||||
|  |     QStatusBar, | ||||||
|  |     QLineEdit, | ||||||
|  |     QHBoxLayout, | ||||||
|  |     QVBoxLayout, | ||||||
|  |     QFormLayout, | ||||||
|  |     QProgressBar, | ||||||
|  |     QSizePolicy, | ||||||
|  |     QStyledItemDelegate, | ||||||
|  |     QStyleOptionViewItem, | ||||||
|  |     QComboBox, | ||||||
|  |     QWidget, | ||||||
|  |     QFrame, | ||||||
|  |     QSplitter, | ||||||
|  |     QTreeView, | ||||||
|  |     QStyle, | ||||||
|  |     QGraphicsItem, | ||||||
|  |     QGraphicsPathItem, | ||||||
|  |     # QGraphicsView, | ||||||
|  |     QStyleOptionGraphicsItem, | ||||||
|  |     QGraphicsScene, | ||||||
|  |     QGraphicsSceneMouseEvent, | ||||||
|  |     QGraphicsProxyWidget, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | gs_keys: EnumType = QGraphicsSceneMouseEvent.Type | ||||||
|  | size_policy: EnumType = QtWidgets.QSizePolicy.Policy | ||||||
|  | px_cache_mode: EnumType = QGraphicsItem.CacheMode | ||||||
|  | @ -31,7 +31,7 @@ import pendulum | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| 
 | 
 | ||||||
| from piker.types import Struct | from piker.types import Struct | ||||||
| from ..data._timeseries import slice_from_time | from ..tsp import slice_from_time | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
| from ..toolz import Profiler | from ..toolz import Profiler | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,133 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) 2018-present (in stewardship of pikers) | ||||||
|  | # Tyler Goodlet and the pike pikes | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | [build-system] | ||||||
|  | requires = ["hatchling"] | ||||||
|  | build-backend = "hatchling.build" | ||||||
|  | 
 | ||||||
|  | [project] | ||||||
|  | name = "piker" | ||||||
|  | version = "0.1.0a0dev0" | ||||||
|  | description = "trading gear for hackers" | ||||||
|  | authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }] | ||||||
|  | requires-python = ">=3.12, <3.13" | ||||||
|  | license = "AGPL-3.0-or-later" | ||||||
|  | readme = "README.rst" | ||||||
|  | keywords = [ | ||||||
|  |     "async", | ||||||
|  |     "trading", | ||||||
|  |     "finance", | ||||||
|  |     "quant", | ||||||
|  |     "charting", | ||||||
|  | ] | ||||||
|  | classifiers = [ | ||||||
|  |     "Development Status :: 3 - Alpha", | ||||||
|  |     "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||||
|  |     "Operating System :: POSIX :: Linux", | ||||||
|  |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|  |     "Programming Language :: Python :: 3 :: Only", | ||||||
|  |     "Programming Language :: Python :: 3.11", | ||||||
|  |     "Programming Language :: Python :: 3.12", | ||||||
|  |     "Intended Audience :: Financial and Insurance Industry", | ||||||
|  |     "Intended Audience :: Science/Research", | ||||||
|  |     "Intended Audience :: Developers", | ||||||
|  |     "Intended Audience :: Education", | ||||||
|  | ] | ||||||
|  | dependencies = [ | ||||||
|  |     "async-generator >=1.10, <2.0.0", | ||||||
|  |     "attrs >=23.1.0, <24.0.0", | ||||||
|  |     "bidict >=0.22.1, <0.23.0", | ||||||
|  |     "colorama >=0.4.6, <0.5.0", | ||||||
|  |     "colorlog >=6.7.0, <7.0.0", | ||||||
|  |     "ib-insync >=0.9.86, <0.10.0", | ||||||
|  |     "numba >=0.59.0, <0.60.0", | ||||||
|  |     "numpy >=1.25, <2.0", | ||||||
|  |     "polars >=0.18.13, <0.19.0", | ||||||
|  |     "pygments >=2.16.1, <3.0.0", | ||||||
|  |     "rich >=13.5.2, <14.0.0", | ||||||
|  |     "tomli >=2.0.1, <3.0.0", | ||||||
|  |     "tomli-w >=1.0.0, <2.0.0", | ||||||
|  |     "trio-util >=0.7.0, <0.8.0", | ||||||
|  |     "trio-websocket >=0.10.3, <0.11.0", | ||||||
|  |     "typer >=0.9.0, <1.0.0", | ||||||
|  |     "rapidfuzz >=3.5.2, <4.0.0", | ||||||
|  |     "pdbp >=1.5.0, <2.0.0", | ||||||
|  |     "trio >=0.24, <0.25", | ||||||
|  |     "pendulum >=3.0.0, <4.0.0", | ||||||
|  |     "httpx >=0.27.0, <0.28.0", | ||||||
|  |     "cryptofeed >=2.4.0, <3.0.0", | ||||||
|  |     "pyarrow >=17.0.0, <18.0.0", | ||||||
|  |     "websockets ==12.0", | ||||||
|  |     "msgspec", | ||||||
|  |     "tractor", | ||||||
|  |     "asyncvnc", | ||||||
|  |     "tomlkit", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [project.optional-dependencies] | ||||||
|  | uis = [ | ||||||
|  |     # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies | ||||||
|  |     # TODO: make sure the levenshtein shit compiles on nix.. | ||||||
|  |     # rapidfuzz = {extras = ["speedup"], version = "^0.18.0"} | ||||||
|  |     "rapidfuzz >=3.2.0, <4.0.0", | ||||||
|  |     "qdarkstyle >=3.0.2, <4.0.0", | ||||||
|  |     "pyqt6 >=6.7.0, <7.0.0", | ||||||
|  |     "pyqtgraph", | ||||||
|  | 
 | ||||||
|  |     # for consideration, | ||||||
|  |     # - 'visidata' | ||||||
|  | 
 | ||||||
|  |     # TODO: add an `--only daemon` group for running non-ui / pikerd | ||||||
|  |     # service tree in distributed mode B) | ||||||
|  |     # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [dependency-groups] | ||||||
|  | # TODO: a toolset that makes debugging a `pikerd` service (tree) easy | ||||||
|  | # to hack on directly using more or less the local env: | ||||||
|  | # - xonsh + xxh | ||||||
|  | # - rsyscall + pdbp | ||||||
|  | # - actor runtime control console like BEAM/OTP | ||||||
|  | # | ||||||
|  | # console ehancements and eventually remote debugging extras/helpers. | ||||||
|  | # use `uv --dev` to enable | ||||||
|  | dev = [ | ||||||
|  |     "pytest >=6.0.0, <7.0.0", | ||||||
|  |     "elasticsearch >=8.9.0, <9.0.0", | ||||||
|  |     "xonsh >=0.14.2, <0.15.0", | ||||||
|  |     "prompt-toolkit ==3.0.40", | ||||||
|  |     "cython >=3.0.0, <4.0.0", | ||||||
|  |     "greenback >=1.1.1, <2.0.0", | ||||||
|  |     "ruff>=0.9.6", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [project.scripts] | ||||||
|  | piker = "piker.cli:cli" | ||||||
|  | pikerd = "piker.cli:pikerd" | ||||||
|  | ledger = "piker.accounting.cli:ledger" | ||||||
|  | 
 | ||||||
|  | [tool.hatch.build.targets.sdist] | ||||||
|  | include = ["piker"] | ||||||
|  | 
 | ||||||
|  | [tool.hatch.build.targets.wheel] | ||||||
|  | include = ["piker"] | ||||||
|  | 
 | ||||||
|  | [tool.uv.sources] | ||||||
|  | pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" } | ||||||
|  | asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" } | ||||||
|  | tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" } | ||||||
|  | msgspec = { git = "https://github.com/jcrist/msgspec.git" } | ||||||
|  | tractor = { path = "../tractor", editable = true } | ||||||
|  | @ -1,3 +0,0 @@ | ||||||
| pytest |  | ||||||
| docker |  | ||||||
| elasticsearch |  | ||||||
|  | @ -1,20 +0,0 @@ | ||||||
| # we require a pinned dev branch to get some edge features that |  | ||||||
| # are often untested in tractor's CI and/or being tested by us |  | ||||||
| # first before committing as core features in tractor's base. |  | ||||||
| -e git+https://github.com/goodboy/tractor.git@piker_pin#egg=tractor |  | ||||||
| 
 |  | ||||||
| # `pyqtgraph` peeps keep breaking, fixing, improving so might as well |  | ||||||
| # pin this to a dev branch that we have more control over especially |  | ||||||
| # as more graphics stuff gets hashed out. |  | ||||||
| -e git+https://github.com/pikers/pyqtgraph.git@master#egg=pyqtgraph |  | ||||||
| 
 |  | ||||||
| # our async client for ``marketstore`` (the tsdb) |  | ||||||
| -e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore |  | ||||||
| 
 |  | ||||||
| # ``asyncvnc`` for sending interactions to ib-gw inside docker |  | ||||||
| -e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # ``tomlkit`` for account files and configs; we've |  | ||||||
| # added some new features that need to get upstreamed: |  | ||||||
| -e git+https://github.com/pikers/tomlkit.git@piker_pin#egg=tomlkit |  | ||||||
|  | @ -0,0 +1,93 @@ | ||||||
|  | # from default `ruff.toml` @ | ||||||
|  | # https://docs.astral.sh/ruff/configuration/ | ||||||
|  | 
 | ||||||
|  | # Exclude a variety of commonly ignored directories. | ||||||
|  | exclude = [ | ||||||
|  |     ".bzr", | ||||||
|  |     ".direnv", | ||||||
|  |     ".eggs", | ||||||
|  |     ".git", | ||||||
|  |     ".git-rewrite", | ||||||
|  |     ".hg", | ||||||
|  |     ".ipynb_checkpoints", | ||||||
|  |     ".mypy_cache", | ||||||
|  |     ".nox", | ||||||
|  |     ".pants.d", | ||||||
|  |     ".pyenv", | ||||||
|  |     ".pytest_cache", | ||||||
|  |     ".pytype", | ||||||
|  |     ".ruff_cache", | ||||||
|  |     ".svn", | ||||||
|  |     ".tox", | ||||||
|  |     ".venv", | ||||||
|  |     ".vscode", | ||||||
|  |     "__pypackages__", | ||||||
|  |     "_build", | ||||||
|  |     "buck-out", | ||||||
|  |     "build", | ||||||
|  |     "dist", | ||||||
|  |     "node_modules", | ||||||
|  |     "site-packages", | ||||||
|  |     "venv", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | # Same as Black. | ||||||
|  | line-length = 88 | ||||||
|  | indent-width = 4 | ||||||
|  | 
 | ||||||
|  | # Assume Python 3.9 | ||||||
|  | target-version = "py312" | ||||||
|  | 
 | ||||||
|  | # ------ - ------ | ||||||
|  | # TODO, stop warnings around `anext()` builtin use? | ||||||
|  | # tool.ruff.target-version = "py310" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | [lint] | ||||||
|  | # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`)  codes by default. | ||||||
|  | # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or | ||||||
|  | # McCabe complexity (`C901`) by default. | ||||||
|  | select = ["E4", "E7", "E9", "F"] | ||||||
|  | ignore = [] | ||||||
|  | ignore-init-module-imports = false | ||||||
|  | 
 | ||||||
|  | [lint.per-file-ignores] | ||||||
|  | "piker/ui/qt.py" = [ | ||||||
|  |   "E402", | ||||||
|  |   'F401',  # unused imports (without __all__ or blah as blah) | ||||||
|  |   # "F841", # unused variable rules | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | # Allow fix for all enabled rules (when `--fix`) is provided. | ||||||
|  | fixable = ["ALL"] | ||||||
|  | unfixable = [] | ||||||
|  | 
 | ||||||
|  | # Allow unused variables when underscore-prefixed. | ||||||
|  | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" | ||||||
|  | 
 | ||||||
|  | [format] | ||||||
|  | # Use single quotes in `ruff format`. | ||||||
|  | quote-style = "single" | ||||||
|  | 
 | ||||||
|  | # Like Black, indent with spaces, rather than tabs. | ||||||
|  | indent-style = "space" | ||||||
|  | 
 | ||||||
|  | # Like Black, respect magic trailing commas. | ||||||
|  | skip-magic-trailing-comma = false | ||||||
|  | 
 | ||||||
|  | # Like Black, automatically detect the appropriate line ending. | ||||||
|  | line-ending = "auto" | ||||||
|  | 
 | ||||||
|  | # Enable auto-formatting of code examples in docstrings. Markdown, | ||||||
|  | # reStructuredText code/literal blocks and doctests are all supported. | ||||||
|  | # | ||||||
|  | # This is currently disabled by default, but it is planned for this | ||||||
|  | # to be opt-out in the future. | ||||||
|  | docstring-code-format = false | ||||||
|  | 
 | ||||||
|  | # Set the line length limit used when formatting code snippets in | ||||||
|  | # docstrings. | ||||||
|  | # | ||||||
|  | # This only has an effect when the `docstring-code-format` setting is | ||||||
|  | # enabled. | ||||||
|  | docstring-code-line-length = "dynamic" | ||||||
							
								
								
									
										121
									
								
								setup.py
								
								
								
								
							
							
						
						
									
										121
									
								
								setup.py
								
								
								
								
							|  | @ -1,121 +0,0 @@ | ||||||
| #!/usr/bin/env python |  | ||||||
| 
 |  | ||||||
| # piker: trading gear for hackers |  | ||||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0) |  | ||||||
| 
 |  | ||||||
| # This program is free software: you can redistribute it and/or modify |  | ||||||
| # it under the terms of the GNU Affero General Public License as published by |  | ||||||
| # the Free Software Foundation, either version 3 of the License, or |  | ||||||
| # (at your option) any later version. |  | ||||||
| 
 |  | ||||||
| # This program is distributed in the hope that it will be useful, |  | ||||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of |  | ||||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  | ||||||
| # GNU Affero General Public License for more details. |  | ||||||
| 
 |  | ||||||
| # You should have received a copy of the GNU Affero General Public License |  | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. |  | ||||||
| 
 |  | ||||||
| from setuptools import ( |  | ||||||
|     setup, |  | ||||||
|     find_packages, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| with open('README.rst', encoding='utf-8') as f: |  | ||||||
|     readme = f.read() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| setup( |  | ||||||
|     name="piker", |  | ||||||
|     version='0.1.0.alpha0.dev0', |  | ||||||
|     description='trading gear for hackers.', |  | ||||||
|     long_description=readme, |  | ||||||
|     license='AGPLv3', |  | ||||||
|     author='Tyler Goodlet', |  | ||||||
|     maintainer='Tyler Goodlet', |  | ||||||
|     url='https://github.com/pikers/piker', |  | ||||||
|     platforms=['linux'], |  | ||||||
|     packages=find_packages(), |  | ||||||
|     entry_points={ |  | ||||||
|         'console_scripts': [ |  | ||||||
|             'piker = piker.cli:cli', |  | ||||||
|             'pikerd = piker.cli:pikerd', |  | ||||||
|             'ledger = piker.accounting.cli:ledger', |  | ||||||
|             # 'store = piker.storage.cli:store', |  | ||||||
|         ] |  | ||||||
|     }, |  | ||||||
|     install_requires=[ |  | ||||||
|         # 'tomlkit',  # fork & fix for now.. |  | ||||||
|         'tomli',  # for pre-3.11 |  | ||||||
|         'tomli-w',  # for fast ledger writing |  | ||||||
|         'colorlog', |  | ||||||
|         'attrs', |  | ||||||
|         'pygments', |  | ||||||
|         'colorama',  # numba traceback coloring |  | ||||||
|         'msgspec',  # performant IPC messaging and structs |  | ||||||
|         'protobuf', |  | ||||||
|         'typer', |  | ||||||
|         'rich', |  | ||||||
| 
 |  | ||||||
|         # async |  | ||||||
|         'trio', |  | ||||||
|         'trio-websocket', |  | ||||||
|         'trio-util', |  | ||||||
|         'async_generator', |  | ||||||
| 
 |  | ||||||
|         # from github currently (see requirements.txt) |  | ||||||
|         # normally pinned to particular git hashes.. |  | ||||||
|         # 'tractor', |  | ||||||
|         # 'asyncvnc', |  | ||||||
|         # 'anyio-marketstore',  # mkts tsdb client |  | ||||||
| 
 |  | ||||||
|         # brokers |  | ||||||
|         'asks',  # for non-ws rest apis |  | ||||||
|         'ib_insync', |  | ||||||
| 
 |  | ||||||
|         # numerics |  | ||||||
|         'pendulum', # easier datetimes |  | ||||||
|         'bidict',  # 2 way map |  | ||||||
|         'cython', |  | ||||||
|         'numpy', |  | ||||||
|         'numba', |  | ||||||
|         'polars',  # dataframes |  | ||||||
| 
 |  | ||||||
|         # UI |  | ||||||
|         'PyQt5', |  | ||||||
|         # 'pyqtgraph',  from our fork see reqs.txt |  | ||||||
|         'qdarkstyle >= 3.0.2',  # themeing |  | ||||||
|         'fuzzywuzzy[speedup]',  # fuzzy search |  | ||||||
|     ], |  | ||||||
|     extras_require={ |  | ||||||
|         'tsdb': [ |  | ||||||
|             'docker', |  | ||||||
|         ], |  | ||||||
|         'es': [ |  | ||||||
|             'docker', |  | ||||||
|             'elasticsearch' |  | ||||||
|         ] |  | ||||||
|     }, |  | ||||||
|     tests_require=['pytest'], |  | ||||||
|     python_requires=">=3.10", |  | ||||||
|     keywords=[ |  | ||||||
|         "async", |  | ||||||
|         "trading", |  | ||||||
|         "finance", |  | ||||||
|         "quant", |  | ||||||
|         "charting", |  | ||||||
|     ], |  | ||||||
|     classifiers=[ |  | ||||||
|         'Development Status :: 3 - Alpha', |  | ||||||
|         'License :: OSI Approved :: ', |  | ||||||
|         'Operating System :: POSIX :: Linux', |  | ||||||
|         "Programming Language :: Python :: Implementation :: CPython", |  | ||||||
|         "Programming Language :: Python :: 3 :: Only", |  | ||||||
|         "Programming Language :: Python :: 3.10", |  | ||||||
|         "Programming Language :: Python :: 3.11", |  | ||||||
|         'Intended Audience :: Financial and Insurance Industry', |  | ||||||
|         'Intended Audience :: Science/Research', |  | ||||||
|         'Intended Audience :: Developers', |  | ||||||
|         'Intended Audience :: Education', |  | ||||||
|     ], |  | ||||||
| ) |  | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue