Compare commits
	
		
			44 Commits 
		
	
	
		
			2cb7b505e1
			...
			18e6b8beaa
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | 18e6b8beaa | |
|  | cd592afed8 | |
|  | d475347e53 | |
|  | a4954393f9 | |
|  | 82be0e7935 | |
|  | 62c67163ec | |
|  | 52e45ccabf | |
|  | 0fcf278f8f | |
|  | 33387c4baa | |
|  | 44dcad1745 | |
|  | 3a65f95636 | |
|  | 7fdf6fb5ea | |
|  | 89e0e53e2f | |
|  | 2cdaf1ad94 | |
|  | c933f2ad56 | |
|  | 00108010c9 | |
|  | 8a4901c517 | |
|  | d7f6a5ab63 | |
|  | e0fdabf651 | |
|  | cb88dfc9da | |
|  | bb41dd6d18 | |
|  | 99e90129ad | |
|  | cceb7a37b9 | |
|  | 5382815b2d | |
|  | cb1ba8a05f | |
|  | 6c65ec4d3b | |
|  | 12e371b027 | |
|  | 014bd58db4 | |
|  | 844544ed8e | |
|  | f479252d26 | |
|  | 033ef2e35e | |
|  | 2cdece244c | |
|  | 018694bbdb | |
|  | 128a2d507f | |
|  | 430650a6a7 | |
|  | 1da3cf5698 | |
|  | a348603fc4 | |
|  | 86047824d8 | |
|  | cb92abbc38 | |
|  | 70332e375b | |
|  | 4940aabe05 | |
|  | 4f9998e9fb | |
|  | c92a236196 | |
|  | e4cd1f85f6 | 
							
								
								
									
										234
									
								
								README.rst
								
								
								
								
							
							
						
						
									
										234
									
								
								README.rst
								
								
								
								
							|  | @ -1,162 +1,161 @@ | ||||||
| piker | piker | ||||||
| ----- | ----- | ||||||
| trading gear for hackers. | trading gear for hackers | ||||||
| 
 | 
 | ||||||
| |gh_actions| | |gh_actions| | ||||||
| 
 | 
 | ||||||
| .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpikers%2Fpiker%2Fbadge&style=popout-square | .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpikers%2Fpiker%2Fbadge&style=popout-square | ||||||
|     :target: https://actions-badge.atrox.dev/piker/pikers/goto |     :target: https://actions-badge.atrox.dev/piker/pikers/goto | ||||||
| 
 | 
 | ||||||
| ``piker`` is a broker agnostic, next-gen FOSS toolset for real-time | ``piker`` is a broker agnostic, next-gen FOSS toolset and runtime for | ||||||
| computational trading targeted at `hardcore Linux users <comp_trader>`_ . | real-time computational trading targeted at `hardcore Linux users | ||||||
|  | <comp_trader>`_ . | ||||||
| 
 | 
 | ||||||
| we use as much bleeding edge tech as possible including (but not limited to): | we use much bleeding edge tech including (but not limited to): | ||||||
| 
 | 
 | ||||||
| - latest python for glue_ | - latest python for glue_ | ||||||
| - trio_ & tractor_ for our distributed, multi-core, real-time streaming | - uv_ for packaging and distribution | ||||||
|   `structured concurrency`_ runtime B) | - trio_ & tractor_ for our distributed `structured concurrency`_ runtime | ||||||
| - Qt_ for pristine high performance UIs | - Qt_ for pristine low latency UIs | ||||||
| - pyqtgraph_ for real-time charting | - pyqtgraph_ (which we've extended) for real-time charting and graphics | ||||||
| - ``polars`` ``numpy`` and ``numba`` for `fast numerics`_ | - ``polars`` ``numpy`` and ``numba`` for redic `fast numerics`_ | ||||||
| - `apache arrow and parquet`_ for time series history management | - `apache arrow and parquet`_ for time-series storage | ||||||
|   persistence and sharing |  | ||||||
| - (prototyped) techtonicdb_ for L2 book storage |  | ||||||
| 
 | 
 | ||||||
| .. |travis| image:: https://img.shields.io/travis/pikers/piker/master.svg | potential projects we might integrate with soon, | ||||||
|     :target: https://travis-ci.org/pikers/piker | 
 | ||||||
|  | - (already prototyped in ) techtonicdb_ for L2 book storage | ||||||
|  | 
 | ||||||
|  | .. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/ | ||||||
|  | .. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue | ||||||
|  | .. _uv: https://docs.astral.sh/uv/ | ||||||
| .. _trio: https://github.com/python-trio/trio | .. _trio: https://github.com/python-trio/trio | ||||||
| .. _tractor: https://github.com/goodboy/tractor | .. _tractor: https://github.com/goodboy/tractor | ||||||
| .. _structured concurrency: https://trio.discourse.group/ | .. _structured concurrency: https://trio.discourse.group/ | ||||||
| .. _marketstore: https://github.com/alpacahq/marketstore |  | ||||||
| .. _techtonicdb: https://github.com/0b01/tectonicdb |  | ||||||
| .. _Qt: https://www.qt.io/ | .. _Qt: https://www.qt.io/ | ||||||
| .. _pyqtgraph: https://github.com/pyqtgraph/pyqtgraph | .. _pyqtgraph: https://github.com/pyqtgraph/pyqtgraph | ||||||
| .. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue |  | ||||||
| .. _apache arrow and parquet: https://arrow.apache.org/faq/ | .. _apache arrow and parquet: https://arrow.apache.org/faq/ | ||||||
| .. _fast numerics: https://zerowithdot.com/python-numpy-and-pandas-performance/ | .. _fast numerics: https://zerowithdot.com/python-numpy-and-pandas-performance/ | ||||||
| .. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/ | .. _techtonicdb: https://github.com/0b01/tectonicdb | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| focus and features: | focus and feats: | ||||||
| ******************* | **************** | ||||||
| - 100% federated: your code, your hardware, your data feeds, your broker fills. | fitting with these tenets, we're always open to new | ||||||
| - zero web: low latency, native software that doesn't try to re-invent the OS | framework/lib/service interop suggestions and ideas! | ||||||
| - maximal **privacy**: prevent brokers and mms from knowing your |  | ||||||
|   planz; smack their spreads with dark volume. |  | ||||||
| - zero clutter: modal, context oriented UIs that echew minimalism, reduce |  | ||||||
|   thought noise and encourage un-emotion. |  | ||||||
| - first class parallelism: built from the ground up on next-gen structured concurrency |  | ||||||
|   primitives. |  | ||||||
| - traders first: broker/exchange/asset-class agnostic |  | ||||||
| - systems grounded: real-time financial signal processing that will |  | ||||||
|   make any queuing or DSP eng juice their shorts. |  | ||||||
| - non-tina UX: sleek, powerful keyboard driven interaction with expected use in tiling wms |  | ||||||
| - data collaboration: every process and protocol is multi-host scalable. |  | ||||||
| - fight club ready: zero interest in adoption by suits; no corporate friendly license, ever. |  | ||||||
| 
 | 
 | ||||||
| fitting with these tenets, we're always open to new framework suggestions and ideas. | - **100% federated**: | ||||||
|  |   your code, your hardware, your data feeds, your broker fills. | ||||||
| 
 | 
 | ||||||
| building the best looking, most reliable, keyboard friendly trading | - **zero web**: | ||||||
| platform is the dream; join the cause. |   low latency as a prime objective, native UIs and modern IPC | ||||||
|  |   protocols without trying to re-invent the "OS-as-an-app".. | ||||||
|  | 
 | ||||||
|  | - **maximal privacy**: | ||||||
|  |   prevent brokers and mms from knowing your planz; smack their | ||||||
|  |   spreads with dark volume from a VPN tunnel. | ||||||
|  | 
 | ||||||
|  | - **zero clutter**: | ||||||
|  |   modal, context oriented UIs that echew minimalism, reduce thought | ||||||
|  |   noise and encourage un-emotion. | ||||||
|  | 
 | ||||||
|  | - **first class parallelism**: | ||||||
|  |   built from the ground up on a next-gen structured concurrency | ||||||
|  |   supervision sys. | ||||||
|  | 
 | ||||||
|  | - **traders first**: | ||||||
|  |   broker/exchange/venue/asset-class/money-sys agnostic | ||||||
|  | 
 | ||||||
|  | - **systems grounded**: | ||||||
|  |   real-time financial signal processing (fsp) that will make any | ||||||
|  |   queuing or DSP eng juice their shorts. | ||||||
|  | 
 | ||||||
|  | - **non-tina UX**: | ||||||
|  |   sleek, powerful keyboard driven interaction with expected use in | ||||||
|  |   tiling wms (or maybe even a DDE). | ||||||
|  | 
 | ||||||
|  | - **data collab at scale**: | ||||||
|  |   every actor-process and protocol is multi-host aware. | ||||||
|  | 
 | ||||||
|  | - **fight club ready**: | ||||||
|  |   zero interest in adoption by suits; no corporate friendly license, | ||||||
|  |   ever. | ||||||
|  | 
 | ||||||
|  | building the hottest looking, fastest, most reliable, keyboard | ||||||
|  | friendly FOSS trading platform is the dream; join the cause. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| sane install with `poetry` | a sane install with `uv` | ||||||
| ************************** | ************************ | ||||||
| TODO! | bc why install with `python` when you can faster with `rust` :: | ||||||
| 
 | 
 | ||||||
| 
 |     uv lock | ||||||
| rigorous install on ``nixos`` using ``poetry2nix`` |  | ||||||
| ************************************************** |  | ||||||
| TODO! |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| hacky install on nixos | hacky install on nixos | ||||||
| ********************** | ********************** | ||||||
| `NixOS` is our core devs' distro of choice for which we offer | ``NixOS`` is our core devs' distro of choice for which we offer | ||||||
| a stringently defined development shell envoirment that can be loaded with:: | a stringently defined development shell envoirment that can be loaded with:: | ||||||
| 
 | 
 | ||||||
|     nix-shell develop.nix |     nix-shell default.nix | ||||||
| 
 |  | ||||||
| this will setup the required python environment to run piker, make sure to |  | ||||||
| run:: |  | ||||||
| 
 |  | ||||||
|     pip install -r requirements.txt -e . |  | ||||||
| 
 |  | ||||||
| once after loading the shell |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| install wild-west style via `pip` | start a chart | ||||||
| ********************************* | ************* | ||||||
| ``piker`` is currently under heavy pre-alpha development and as such | run a realtime OHLCV chart stand-alone:: | ||||||
| should be cloned from this repo and hacked on directly. |  | ||||||
| 
 | 
 | ||||||
| for a development install:: |     piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken | ||||||
| 
 | 
 | ||||||
|     git clone git@github.com:pikers/piker.git | this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes | ||||||
|     cd piker | overlayed on the same graph. Use of `piker` without first starting | ||||||
|     virtualenv env | a daemon (`pikerd` - see below) means there is an implicit spawning of the | ||||||
|     source ./env/bin/activate | multi-actor-runtime (implemented as a `tractor` app). | ||||||
|     pip install -r requirements.txt -e . | 
 | ||||||
|  | For additional subsystem feats available through our chart UI see the | ||||||
|  | various sub-readmes: | ||||||
|  | 
 | ||||||
|  | - order control using a mouse-n-keyboard UX B) | ||||||
|  | - cross venue market-pair (what most call "symbol") search, select, overlay Bo | ||||||
|  | - financial-signal-processing (`piker.fsp`) write-n-reload to sub-chart BO | ||||||
|  | - src-asset derivatives scan for anal, like the infamous "max pain" XO | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| check out our charts | spawn a daemon standalone | ||||||
| ******************** | ************************* | ||||||
| bet you weren't expecting this from the foss:: | we call the root actor-process the ``pikerd``. it can be (and is | ||||||
| 
 | recommended normally to be) started separately from the ``piker | ||||||
|     piker -l info -b kraken -b binance chart btcusdt.binance --pdb | chart`` program:: | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| this runs the main chart (currently with 1m sampled OHLC) in in debug |  | ||||||
| mode and you can practice paper trading using the following |  | ||||||
| micro-manual: |  | ||||||
| 
 |  | ||||||
| ``order_mode`` ( |  | ||||||
|     edge triggered activation by any of the following keys, |  | ||||||
|     ``mouse-click`` on y-level to submit at that price |  | ||||||
|     ): |  | ||||||
| 
 |  | ||||||
|     - ``f``/ ``ctl-f`` to stage buy |  | ||||||
|     - ``d``/ ``ctl-d`` to stage sell |  | ||||||
|     - ``a`` to stage alert |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| ``search_mode`` ( |  | ||||||
|     ``ctl-l`` or ``ctl-space`` to open, |  | ||||||
|     ``ctl-c`` or ``ctl-space`` to close |  | ||||||
|     ) : |  | ||||||
| 
 |  | ||||||
|     - begin typing to have symbol search automatically lookup |  | ||||||
|       symbols from all loaded backend (broker) providers |  | ||||||
|     - arrow keys and mouse click to navigate selection |  | ||||||
|     - vi-like ``ctl-[hjkl]`` for navigation |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| you can also configure your position allocation limits from the |  | ||||||
| sidepane. |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| run in distributed mode |  | ||||||
| *********************** |  | ||||||
| start the service manager and data feed daemon in the background and |  | ||||||
| connect to it:: |  | ||||||
| 
 | 
 | ||||||
|     pikerd -l info --pdb |     pikerd -l info --pdb | ||||||
| 
 | 
 | ||||||
|  | the daemon does nothing until a ``piker``-client (like ``piker | ||||||
|  | chart``) connects and requests some particular sub-system. for | ||||||
|  | a connecting chart ``pikerd`` will spawn and manage at least, | ||||||
| 
 | 
 | ||||||
| connect your chart:: | - a data-feed daemon: ``datad`` which does all the work of comms with | ||||||
|  |   the backend provider (in this case the ``binance`` cex). | ||||||
|  | - a paper-trading engine instance, ``paperboi.binance``, (if no live | ||||||
|  |   account has been configured) which allows for auto/manual order | ||||||
|  |   control against the live quote stream. | ||||||
| 
 | 
 | ||||||
|     piker -l info -b kraken -b binance chart xmrusdt.binance --pdb | *using* an actor-service (aka micro-daemon) manager which dynamically | ||||||
|  | supervises various sub-subsystems-as-services throughout the ``piker`` | ||||||
|  | runtime-stack. | ||||||
| 
 | 
 | ||||||
|  | now you can (implicitly) connect your chart:: | ||||||
| 
 | 
 | ||||||
| enjoy persistent real-time data feeds tied to daemon lifetime. the next |     piker chart btcusdt.spot.binance | ||||||
| time you spawn a chart it will load much faster since the data feed has | 
 | ||||||
| been cached and is now always running live in the background until you | since ``pikerd`` was started separately you can now enjoy a persistent | ||||||
| kill ``pikerd``. | real-time data stream tied to the daemon-tree's lifetime. i.e. the next | ||||||
|  | time you spawn a chart it will obviously not only load much faster | ||||||
|  | (since the underlying ``datad.binance`` is left running with its | ||||||
|  | in-memory IPC data structures) but also the data-feed and any order | ||||||
|  | mgmt states should be persistent until you finally cancel ``pikerd``. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if anyone asks you what this project is about | if anyone asks you what this project is about | ||||||
| ********************************************* | ********************************************* | ||||||
| you don't talk about it. | you don't talk about it; just use it. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| how do i get involved? | how do i get involved? | ||||||
|  | @ -166,6 +165,15 @@ enter the matrix. | ||||||
| 
 | 
 | ||||||
| how come there ain't that many docs | how come there ain't that many docs | ||||||
| *********************************** | *********************************** | ||||||
| suck it up, learn the code; no one is trying to sell you on anything. | i mean we want/need them but building the core right has been higher | ||||||
| also, we need lotsa help so if you want to start somewhere and can't | prio then marketting (and likely will stay that way Bp). | ||||||
| necessarily write serious code, this might be the place for you! | 
 | ||||||
|  | soo, suck it up bc, | ||||||
|  | 
 | ||||||
|  | - no one is trying to sell you on anything | ||||||
|  | - learning the code base is prolly way more valuable | ||||||
|  | - the UI/UXs are intended to be "intuitive" for any hacker.. | ||||||
|  | 
 | ||||||
|  | we obviously need tonz help so if you want to start somewhere and | ||||||
|  | can't necessarily write "advanced" concurrent python/rust code, this | ||||||
|  | helping document literally anything might be the place for you! | ||||||
|  |  | ||||||
|  | @ -0,0 +1,134 @@ | ||||||
|  | with (import <nixpkgs> {}); | ||||||
|  | let | ||||||
|  |   glibStorePath = lib.getLib glib; | ||||||
|  |   zlibStorePath = lib.getLib zlib; | ||||||
|  |   zstdStorePath = lib.getLib zstd; | ||||||
|  |   dbusStorePath = lib.getLib dbus; | ||||||
|  |   libGLStorePath = lib.getLib libGL; | ||||||
|  |   freetypeStorePath = lib.getLib freetype; | ||||||
|  |   qt6baseStorePath = lib.getLib qt6.qtbase; | ||||||
|  |   fontconfigStorePath = lib.getLib fontconfig; | ||||||
|  |   libxkbcommonStorePath = lib.getLib libxkbcommon; | ||||||
|  |   xcbutilcursorStorePath = lib.getLib xcb-util-cursor; | ||||||
|  | 
 | ||||||
|  |   qtpyStorePath = lib.getLib python312Packages.qtpy; | ||||||
|  |   pyqt6StorePath = lib.getLib python312Packages.pyqt6; | ||||||
|  |   pyqt6SipStorePath = lib.getLib python312Packages.pyqt6-sip; | ||||||
|  |   rapidfuzzStorePath = lib.getLib python312Packages.rapidfuzz; | ||||||
|  |   qdarkstyleStorePath = lib.getLib python312Packages.qdarkstyle; | ||||||
|  | 
 | ||||||
|  |   xorgLibX11StorePath = lib.getLib xorg.libX11; | ||||||
|  |   xorgLibxcbStorePath = lib.getLib xorg.libxcb; | ||||||
|  |   xorgxcbutilwmStorePath = lib.getLib xorg.xcbutilwm; | ||||||
|  |   xorgxcbutilimageStorePath = lib.getLib xorg.xcbutilimage; | ||||||
|  |   xorgxcbutilerrorsStorePath = lib.getLib xorg.xcbutilerrors; | ||||||
|  |   xorgxcbutilkeysymsStorePath = lib.getLib xorg.xcbutilkeysyms; | ||||||
|  |   xorgxcbutilrenderutilStorePath = lib.getLib xorg.xcbutilrenderutil; | ||||||
|  | in | ||||||
|  | stdenv.mkDerivation { | ||||||
|  |   name = "piker-qt6-uv"; | ||||||
|  |   buildInputs = [ | ||||||
|  |     # System requirements. | ||||||
|  |     glib | ||||||
|  |     zlib | ||||||
|  |     dbus | ||||||
|  |     zstd | ||||||
|  |     libGL | ||||||
|  |     freetype | ||||||
|  |     qt6.qtbase | ||||||
|  |     libgcc.lib | ||||||
|  |     fontconfig | ||||||
|  |     libxkbcommon | ||||||
|  | 
 | ||||||
|  |     # Xorg requirements | ||||||
|  |     xcb-util-cursor | ||||||
|  |     xorg.libxcb | ||||||
|  |     xorg.libX11 | ||||||
|  |     xorg.xcbutilwm | ||||||
|  |     xorg.xcbutilimage | ||||||
|  |     xorg.xcbutilerrors | ||||||
|  |     xorg.xcbutilkeysyms | ||||||
|  |     xorg.xcbutilrenderutil | ||||||
|  | 
 | ||||||
|  |     # Python requirements. | ||||||
|  |     python312Full | ||||||
|  |     python312Packages.uv | ||||||
|  |     python312Packages.qdarkstyle | ||||||
|  |     python312Packages.rapidfuzz | ||||||
|  |     python312Packages.pyqt6 | ||||||
|  |     python312Packages.qtpy | ||||||
|  |   ]; | ||||||
|  |   src = null; | ||||||
|  |   shellHook = '' | ||||||
|  |     set -e | ||||||
|  | 
 | ||||||
|  |     # Set the Qt plugin path | ||||||
|  |     # export QT_DEBUG_PLUGINS=1 | ||||||
|  | 
 | ||||||
|  |     QTBASE_PATH="${qt6baseStorePath}/lib" | ||||||
|  |     QT_PLUGIN_PATH="$QTBASE_PATH/qt-6/plugins" | ||||||
|  |     QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms" | ||||||
|  | 
 | ||||||
|  |     LIB_GCC_PATH="${libgcc.lib}/lib" | ||||||
|  |     GLIB_PATH="${glibStorePath}/lib" | ||||||
|  |     ZSTD_PATH="${zstdStorePath}/lib" | ||||||
|  |     ZLIB_PATH="${zlibStorePath}/lib" | ||||||
|  |     DBUS_PATH="${dbusStorePath}/lib" | ||||||
|  |     LIBGL_PATH="${libGLStorePath}/lib" | ||||||
|  |     FREETYPE_PATH="${freetypeStorePath}/lib" | ||||||
|  |     FONTCONFIG_PATH="${fontconfigStorePath}/lib" | ||||||
|  |     LIB_XKB_COMMON_PATH="${libxkbcommonStorePath}/lib" | ||||||
|  | 
 | ||||||
|  |     XCB_UTIL_CURSOR_PATH="${xcbutilcursorStorePath}/lib" | ||||||
|  |     XORG_LIB_X11_PATH="${xorgLibX11StorePath}/lib" | ||||||
|  |     XORG_LIB_XCB_PATH="${xorgLibxcbStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_IMAGE_PATH="${xorgxcbutilimageStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_WM_PATH="${xorgxcbutilwmStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_RENDER_UTIL_PATH="${xorgxcbutilrenderutilStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_KEYSYMS_PATH="${xorgxcbutilkeysymsStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_ERRORS_PATH="${xorgxcbutilerrorsStorePath}/lib" | ||||||
|  | 
 | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QTBASE_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_PLUGIN_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_QPA_PLATFORM_PLUGIN_PATH" | ||||||
|  | 
 | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_GCC_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$DBUS_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GLIB_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZLIB_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZSTD_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIBGL_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FONTCONFIG_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FREETYPE_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_XKB_COMMON_PATH" | ||||||
|  | 
 | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XCB_UTIL_CURSOR_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_X11_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_XCB_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_IMAGE_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_WM_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_RENDER_UTIL_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_KEYSYMS_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_ERRORS_PATH" | ||||||
|  | 
 | ||||||
|  |     export LD_LIBRARY_PATH | ||||||
|  | 
 | ||||||
|  |     RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages" | ||||||
|  |     QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages" | ||||||
|  |     QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages" | ||||||
|  |     PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages" | ||||||
|  |     PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages" | ||||||
|  | 
 | ||||||
|  |     PATCH="$PATCH:$RPDFUZZ_PATH" | ||||||
|  |     PATCH="$PATCH:$QDRKSTYLE_PATH" | ||||||
|  |     PATCH="$PATCH:$QTPY_PATH" | ||||||
|  |     PATCH="$PATCH:$PYQT6_PATH" | ||||||
|  |     PATCH="$PATCH:$PYQT6_SIP_PATH" | ||||||
|  | 
 | ||||||
|  |     export PATCH | ||||||
|  | 
 | ||||||
|  |     # Install deps | ||||||
|  |     uv lock | ||||||
|  | 
 | ||||||
|  |   ''; | ||||||
|  | } | ||||||
|  | @ -0,0 +1,139 @@ | ||||||
|  | #!/usr/bin/env python | ||||||
|  | from decimal import ( | ||||||
|  |     Decimal, | ||||||
|  | ) | ||||||
|  | import trio | ||||||
|  | import tractor | ||||||
|  | from datetime import datetime | ||||||
|  | from pprint import pformat | ||||||
|  | from piker.brokers.deribit.api import ( | ||||||
|  |     get_client, | ||||||
|  |     maybe_open_oi_feed, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | def check_if_complete( | ||||||
|  |         oi: dict[str, dict[str, Decimal | None]] | ||||||
|  |     ) -> bool: | ||||||
|  |     return all( | ||||||
|  |         oi[strike]['C'] is not None | ||||||
|  |         and | ||||||
|  |         oi[strike]['P'] is not None for strike in oi | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def max_pain_daemon( | ||||||
|  | ) -> None: | ||||||
|  |     oi_by_strikes: dict[str, dict[str, Decimal | None]] | ||||||
|  |     instruments: list[Symbol] = [] | ||||||
|  |     expiry_dates: list[str] | ||||||
|  |     expiry_date: str | ||||||
|  |     currency: str = 'btc' | ||||||
|  |     kind: str = 'option' | ||||||
|  | 
 | ||||||
|  |     async with get_client( | ||||||
|  |     ) as client: | ||||||
|  |         expiry_dates: list[str] = await client.get_expiration_dates( | ||||||
|  |             currency=currency, | ||||||
|  |             kind=kind | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         print(f'Available expiration dates for {currency}-{kind}:') | ||||||
|  |         print(f'{expiry_dates}') | ||||||
|  |         expiry_date = input('Please enter a valid expiration date: ').upper() | ||||||
|  |         print('Starting little daemon...') | ||||||
|  | 
 | ||||||
|  |         oi_by_strikes: dict[str, dict[str, Decimal]] | ||||||
|  |         instruments = await client.get_instruments( | ||||||
|  |             expiry_date=expiry_date, | ||||||
|  |         ) | ||||||
|  |         oi_by_strikes = client.get_strikes_dict(instruments) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def update_oi_by_strikes(msg: tuple): | ||||||
|  |         nonlocal oi_by_strikes | ||||||
|  |         if 'oi' == msg[0]: | ||||||
|  |             strike_price = msg[1]['strike_price'] | ||||||
|  |             option_type = msg[1]['option_type'] | ||||||
|  |             open_interest = msg[1]['open_interest'] | ||||||
|  |             oi_by_strikes.setdefault( | ||||||
|  |                 strike_price, {} | ||||||
|  |             ).update( | ||||||
|  |                 {option_type: open_interest} | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |     def get_max_pain( | ||||||
|  |         oi_by_strikes: dict[str, dict[str, Decimal]] | ||||||
|  |     ) -> dict[str, str | Decimal]: | ||||||
|  |         ''' | ||||||
|  |         This method requires only the strike_prices and oi for call | ||||||
|  |         and puts, the closes list are the same as the strike_prices | ||||||
|  |         the idea is to sum all the calls and puts cash for each strike | ||||||
|  |         and the ITM strikes from that strike, the lowest value is what we | ||||||
|  |         are looking for the intrinsic value. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  | 
 | ||||||
|  |         nonlocal timestamp | ||||||
|  |         # We meed to find the lowest value, so we start at | ||||||
|  |         # infinity to ensure that, and the max_pain must be | ||||||
|  |         # an amount greater than zero. | ||||||
|  |         total_intrinsic_value: Decimal = Decimal('Infinity') | ||||||
|  |         max_pain: Decimal = Decimal(0) | ||||||
|  |         call_cash: Decimal = Decimal(0) | ||||||
|  |         put_cash: Decimal = Decimal(0) | ||||||
|  |         intrinsic_values: dict[str, dict[str, Decimal]] = {} | ||||||
|  |         closes: list = sorted(Decimal(close) for close in oi_by_strikes) | ||||||
|  | 
 | ||||||
|  |         for strike, oi in oi_by_strikes.items(): | ||||||
|  |             s = Decimal(strike) | ||||||
|  |             call_cash = sum(max(0, (s - c) * oi_by_strikes[str(c)]['C']) for c in closes) | ||||||
|  |             put_cash = sum(max(0, (c - s) * oi_by_strikes[str(c)]['P']) for c in closes) | ||||||
|  | 
 | ||||||
|  |             intrinsic_values[strike] = { | ||||||
|  |                 'C': call_cash, | ||||||
|  |                 'P': put_cash, | ||||||
|  |                 'total': call_cash + put_cash, | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             if intrinsic_values[strike]['total'] < total_intrinsic_value: | ||||||
|  |                 total_intrinsic_value = intrinsic_values[strike]['total'] | ||||||
|  |                 max_pain = s | ||||||
|  | 
 | ||||||
|  |         return { | ||||||
|  |             'timestamp': timestamp, | ||||||
|  |             'expiry_date': expiry_date, | ||||||
|  |             'total_intrinsic_value': total_intrinsic_value, | ||||||
|  |             'max_pain': max_pain, | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |     async with maybe_open_oi_feed( | ||||||
|  |         instruments, | ||||||
|  |     ) as oi_feed: | ||||||
|  |         async for msg in oi_feed: | ||||||
|  | 
 | ||||||
|  |             update_oi_by_strikes(msg) | ||||||
|  |             if check_if_complete(oi_by_strikes): | ||||||
|  |                 if 'oi' == msg[0]: | ||||||
|  |                     timestamp = msg[1]['timestamp'] | ||||||
|  |                     max_pain = get_max_pain(oi_by_strikes) | ||||||
|  |                     print('-----------------------------------------------') | ||||||
|  |                     print(f'timestamp:             {datetime.fromtimestamp(max_pain['timestamp'])}') | ||||||
|  |                     print(f'expiry_date:           {max_pain['expiry_date']}') | ||||||
|  |                     print(f'max_pain:              {max_pain['max_pain']}') | ||||||
|  |                     print(f'total intrinsic value: {max_pain['total_intrinsic_value']}') | ||||||
|  |                     print('-----------------------------------------------') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def main(): | ||||||
|  | 
 | ||||||
|  |     async with tractor.open_nursery() as n: | ||||||
|  | 
 | ||||||
|  |         p: tractor.Portal = await n.start_actor( | ||||||
|  |             'max_pain_daemon', | ||||||
|  |             enable_modules=[__name__], | ||||||
|  |             infect_asyncio=True, | ||||||
|  |         ) | ||||||
|  |         await p.run(max_pain_daemon) | ||||||
|  | 
 | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     trio.run(main) | ||||||
|  | @ -0,0 +1,19 @@ | ||||||
|  | ## Max Pain Calculation for Deribit Options | ||||||
|  | 
 | ||||||
|  | This feature, which calculates the max pain point for options traded on the Deribit exchange using cryptofeed library. | ||||||
|  | 
 | ||||||
|  | - Functions in the api module for fetching options data from Deribit. [commit](https://pikers.dev/pikers/piker/commit/da55856dd2876291f55a06eb0561438a912d8241) | ||||||
|  | 
 | ||||||
|  | - Compute the max pain point based on open interest data using deribit's api. [commit](https://pikers.dev/pikers/piker/commit/0d9d6e15ba0edeb662ec97f7599dd66af3046b94) | ||||||
|  | 
 | ||||||
|  | ### How to test it? | ||||||
|  | 
 | ||||||
|  | **Before start:** in order to get this working with `uv`,  you **must** use my `tractor` [fork](https://pikers.dev/ntorres/tractor/src/branch/aio_abandons) and this branch: `aio_abandons`, the reason is that I cherry-pick the `uv_migration` that guille made, for some reason that a didn't dive into, in my system y need tractor using `uv` too. quite hacky I guess. | ||||||
|  | 
 | ||||||
|  | 1. `uv lock` | ||||||
|  | 
 | ||||||
|  | 2. `uv run --no-dev python examples/max_pain.py` | ||||||
|  | 
 | ||||||
|  | 3. A message should be display, enter one of the expiration date available. | ||||||
|  | 
 | ||||||
|  | 4. The script should be up and running. | ||||||
|  | @ -50,7 +50,8 @@ __brokers__: list[str] = [ | ||||||
|     'binance', |     'binance', | ||||||
|     'ib', |     'ib', | ||||||
|     'kraken', |     'kraken', | ||||||
|     'kucoin' |     'kucoin', | ||||||
|  |     'deribit', | ||||||
| 
 | 
 | ||||||
|     # broken but used to work |     # broken but used to work | ||||||
|     # 'questrade', |     # 'questrade', | ||||||
|  | @ -61,7 +62,6 @@ __brokers__: list[str] = [ | ||||||
|     # wstrade |     # wstrade | ||||||
|     # iex |     # iex | ||||||
| 
 | 
 | ||||||
|     # deribit |  | ||||||
|     # bitso |     # bitso | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | @ -71,7 +71,7 @@ def get_brokermod(brokername: str) -> ModuleType: | ||||||
|     Return the imported broker module by name. |     Return the imported broker module by name. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     module = import_module('.' + brokername, 'piker.brokers') |     module: ModuleType = import_module('.' + brokername, 'piker.brokers') | ||||||
|     # we only allow monkeying because it's for internal keying |     # we only allow monkeying because it's for internal keying | ||||||
|     module.name = module.__name__.split('.')[-1] |     module.name = module.__name__.split('.')[-1] | ||||||
|     return module |     return module | ||||||
|  |  | ||||||
|  | @ -18,10 +18,11 @@ | ||||||
| Handy cross-broker utils. | Handy cross-broker utils. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
|  | from __future__ import annotations | ||||||
| from functools import partial | from functools import partial | ||||||
| 
 | 
 | ||||||
| import json | import json | ||||||
| import asks | import httpx | ||||||
| import logging | import logging | ||||||
| 
 | 
 | ||||||
| from ..log import ( | from ..log import ( | ||||||
|  | @ -60,11 +61,11 @@ class NoData(BrokerError): | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         *args, |         *args, | ||||||
|         info: dict, |         info: dict|None = None, | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         super().__init__(*args) |         super().__init__(*args) | ||||||
|         self.info: dict = info |         self.info: dict|None = info | ||||||
| 
 | 
 | ||||||
|         # when raised, machinery can check if the backend |         # when raised, machinery can check if the backend | ||||||
|         # set a "frame size" for doing datetime calcs. |         # set a "frame size" for doing datetime calcs. | ||||||
|  | @ -90,16 +91,18 @@ class DataThrottle(BrokerError): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def resproc( | def resproc( | ||||||
|     resp: asks.response_objects.Response, |     resp: httpx.Response, | ||||||
|     log: logging.Logger, |     log: logging.Logger, | ||||||
|     return_json: bool = True, |     return_json: bool = True, | ||||||
|     log_resp: bool = False, |     log_resp: bool = False, | ||||||
| 
 | 
 | ||||||
| ) -> asks.response_objects.Response: | ) -> httpx.Response: | ||||||
|     """Process response and return its json content. |     ''' | ||||||
|  |     Process response and return its json content. | ||||||
| 
 | 
 | ||||||
|     Raise the appropriate error on non-200 OK responses. |     Raise the appropriate error on non-200 OK responses. | ||||||
|     """ | 
 | ||||||
|  |     ''' | ||||||
|     if not resp.status_code == 200: |     if not resp.status_code == 200: | ||||||
|         raise BrokerError(resp.body) |         raise BrokerError(resp.body) | ||||||
|     try: |     try: | ||||||
|  |  | ||||||
|  | @ -25,14 +25,13 @@ from __future__ import annotations | ||||||
| from collections import ChainMap | from collections import ChainMap | ||||||
| from contextlib import ( | from contextlib import ( | ||||||
|     asynccontextmanager as acm, |     asynccontextmanager as acm, | ||||||
|  |     AsyncExitStack, | ||||||
| ) | ) | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from pprint import pformat | from pprint import pformat | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     Callable, |     Callable, | ||||||
|     Hashable, |  | ||||||
|     Sequence, |  | ||||||
|     Type, |     Type, | ||||||
| ) | ) | ||||||
| import hmac | import hmac | ||||||
|  | @ -43,8 +42,7 @@ import trio | ||||||
| from pendulum import ( | from pendulum import ( | ||||||
|     now, |     now, | ||||||
| ) | ) | ||||||
| import asks | import httpx | ||||||
| from rapidfuzz import process as fuzzy |  | ||||||
| import numpy as np | import numpy as np | ||||||
| 
 | 
 | ||||||
| from piker import config | from piker import config | ||||||
|  | @ -54,6 +52,7 @@ from piker.clearing._messages import ( | ||||||
| from piker.accounting import ( | from piker.accounting import ( | ||||||
|     Asset, |     Asset, | ||||||
|     digits_to_dec, |     digits_to_dec, | ||||||
|  |     MktPair, | ||||||
| ) | ) | ||||||
| from piker.types import Struct | from piker.types import Struct | ||||||
| from piker.data import ( | from piker.data import ( | ||||||
|  | @ -69,7 +68,6 @@ from .venues import ( | ||||||
|     PAIRTYPES, |     PAIRTYPES, | ||||||
|     Pair, |     Pair, | ||||||
|     MarketType, |     MarketType, | ||||||
| 
 |  | ||||||
|     _spot_url, |     _spot_url, | ||||||
|     _futes_url, |     _futes_url, | ||||||
|     _testnet_futes_url, |     _testnet_futes_url, | ||||||
|  | @ -79,19 +77,18 @@ from .venues import ( | ||||||
| log = get_logger('piker.brokers.binance') | log = get_logger('piker.brokers.binance') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_config() -> dict: | def get_config() -> dict[str, Any]: | ||||||
| 
 |  | ||||||
|     conf: dict |     conf: dict | ||||||
|     path: Path |     path: Path | ||||||
|     conf, path = config.load( |     conf, path = config.load( | ||||||
|         conf_name='brokers', |         conf_name='brokers', | ||||||
|         touch_if_dne=True, |         touch_if_dne=True, | ||||||
|     ) |     ) | ||||||
| 
 |     section: dict = conf.get('binance') | ||||||
|     section = conf.get('binance') |  | ||||||
| 
 |  | ||||||
|     if not section: |     if not section: | ||||||
|         log.warning(f'No config section found for binance in {path}') |         log.warning( | ||||||
|  |             f'No config section found for binance in {path}' | ||||||
|  |         ) | ||||||
|         return {} |         return {} | ||||||
| 
 | 
 | ||||||
|     return section |     return section | ||||||
|  | @ -147,7 +144,7 @@ def binance_timestamp( | ||||||
| 
 | 
 | ||||||
| class Client: | class Client: | ||||||
|     ''' |     ''' | ||||||
|     Async ReST API client using ``trio`` + ``asks`` B) |     Async ReST API client using `trio` + `httpx` B) | ||||||
| 
 | 
 | ||||||
|     Supports all of the spot, margin and futures endpoints depending |     Supports all of the spot, margin and futures endpoints depending | ||||||
|     on method. |     on method. | ||||||
|  | @ -156,10 +153,17 @@ class Client: | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
| 
 | 
 | ||||||
|  |         venue_sessions: dict[ | ||||||
|  |             str,  # venue key | ||||||
|  |             tuple[httpx.AsyncClient, str]  # session, eps path | ||||||
|  |         ], | ||||||
|  |         conf: dict[str, Any], | ||||||
|         # TODO: change this to `Client.[mkt_]venue: MarketType`? |         # TODO: change this to `Client.[mkt_]venue: MarketType`? | ||||||
|         mkt_mode: MarketType = 'spot', |         mkt_mode: MarketType = 'spot', | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|  |         self.conf = conf | ||||||
|  | 
 | ||||||
|         # build out pair info tables for each market type |         # build out pair info tables for each market type | ||||||
|         # and wrap in a chain-map view for search / query. |         # and wrap in a chain-map view for search / query. | ||||||
|         self._spot_pairs: dict[str, Pair] = {}  # spot info table |         self._spot_pairs: dict[str, Pair] = {}  # spot info table | ||||||
|  | @ -186,44 +190,13 @@ class Client: | ||||||
|         # market symbols for use by search. See `.exch_info()`. |         # market symbols for use by search. See `.exch_info()`. | ||||||
|         self._pairs: ChainMap[str, Pair] = ChainMap() |         self._pairs: ChainMap[str, Pair] = ChainMap() | ||||||
| 
 | 
 | ||||||
|         # spot EPs sesh |  | ||||||
|         self._sesh = asks.Session(connections=4) |  | ||||||
|         self._sesh.base_location: str = _spot_url |  | ||||||
|         # spot testnet |  | ||||||
|         self._test_sesh: asks.Session = asks.Session(connections=4) |  | ||||||
|         self._test_sesh.base_location: str = _testnet_spot_url |  | ||||||
| 
 |  | ||||||
|         # margin and extended spot endpoints session. |  | ||||||
|         self._sapi_sesh = asks.Session(connections=4) |  | ||||||
|         self._sapi_sesh.base_location: str = _spot_url |  | ||||||
| 
 |  | ||||||
|         # futes EPs sesh |  | ||||||
|         self._fapi_sesh = asks.Session(connections=4) |  | ||||||
|         self._fapi_sesh.base_location: str = _futes_url |  | ||||||
|         # futes testnet |  | ||||||
|         self._test_fapi_sesh: asks.Session = asks.Session(connections=4) |  | ||||||
|         self._test_fapi_sesh.base_location: str = _testnet_futes_url |  | ||||||
| 
 |  | ||||||
|         # global client "venue selection" mode. |         # global client "venue selection" mode. | ||||||
|         # set this when you want to switch venues and not have to |         # set this when you want to switch venues and not have to | ||||||
|         # specify the venue for the next request. |         # specify the venue for the next request. | ||||||
|         self.mkt_mode: MarketType = mkt_mode |         self.mkt_mode: MarketType = mkt_mode | ||||||
| 
 | 
 | ||||||
|         # per 8 |         # per-mkt-venue API client table | ||||||
|         self.venue_sesh: dict[ |         self.venue_sesh = venue_sessions | ||||||
|             str,  # venue key |  | ||||||
|             tuple[asks.Session, str]  # session, eps path |  | ||||||
|         ] = { |  | ||||||
|             'spot': (self._sesh, '/api/v3/'), |  | ||||||
|             'spot_testnet': (self._test_sesh, '/fapi/v1/'), |  | ||||||
| 
 |  | ||||||
|             'margin': (self._sapi_sesh, '/sapi/v1/'), |  | ||||||
| 
 |  | ||||||
|             'usdtm_futes': (self._fapi_sesh, '/fapi/v1/'), |  | ||||||
|             'usdtm_futes_testnet': (self._test_fapi_sesh, '/fapi/v1/'), |  | ||||||
| 
 |  | ||||||
|             # 'futes_coin': self._dapi,  # TODO |  | ||||||
|         } |  | ||||||
| 
 | 
 | ||||||
|         # lookup for going from `.mkt_mode: str` to the config |         # lookup for going from `.mkt_mode: str` to the config | ||||||
|         # subsection `key: str` |         # subsection `key: str` | ||||||
|  | @ -238,40 +211,6 @@ class Client: | ||||||
|             'futes': ['usdtm_futes'], |             'futes': ['usdtm_futes'], | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         # for creating API keys see, |  | ||||||
|         # https://www.binance.com/en/support/faq/how-to-create-api-keys-on-binance-360002502072 |  | ||||||
|         self.conf: dict = get_config() |  | ||||||
| 
 |  | ||||||
|         for key, subconf in self.conf.items(): |  | ||||||
|             if api_key := subconf.get('api_key', ''): |  | ||||||
|                 venue_keys: list[str] = self.confkey2venuekeys[key] |  | ||||||
| 
 |  | ||||||
|                 venue_key: str |  | ||||||
|                 sesh: asks.Session |  | ||||||
|                 for venue_key in venue_keys: |  | ||||||
|                     sesh, _ = self.venue_sesh[venue_key] |  | ||||||
| 
 |  | ||||||
|                     api_key_header: dict = { |  | ||||||
|                         # taken from official: |  | ||||||
|                         # https://github.com/binance/binance-futures-connector-python/blob/main/binance/api.py#L47 |  | ||||||
|                         "Content-Type": "application/json;charset=utf-8", |  | ||||||
| 
 |  | ||||||
|                         # TODO: prolly should just always query and copy |  | ||||||
|                         # in the real latest ver? |  | ||||||
|                         "User-Agent": "binance-connector/6.1.6smbz6", |  | ||||||
|                         "X-MBX-APIKEY": api_key, |  | ||||||
|                     } |  | ||||||
|                     sesh.headers.update(api_key_header) |  | ||||||
| 
 |  | ||||||
|                     # if `.use_tesnet = true` in the config then |  | ||||||
|                     # also add headers for the testnet session which |  | ||||||
|                     # will be used for all order control |  | ||||||
|                     if subconf.get('use_testnet', False): |  | ||||||
|                         testnet_sesh, _ = self.venue_sesh[ |  | ||||||
|                             venue_key + '_testnet' |  | ||||||
|                         ] |  | ||||||
|                         testnet_sesh.headers.update(api_key_header) |  | ||||||
| 
 |  | ||||||
|     def _mk_sig( |     def _mk_sig( | ||||||
|         self, |         self, | ||||||
|         data: dict, |         data: dict, | ||||||
|  | @ -290,7 +229,6 @@ class Client: | ||||||
|                 'to define the creds for auth-ed endpoints!?' |                 'to define the creds for auth-ed endpoints!?' | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|         # XXX: Info on security and authentification |         # XXX: Info on security and authentification | ||||||
|         # https://binance-docs.github.io/apidocs/#endpoint-security-type |         # https://binance-docs.github.io/apidocs/#endpoint-security-type | ||||||
|         if not (api_secret := subconf.get('api_secret')): |         if not (api_secret := subconf.get('api_secret')): | ||||||
|  | @ -330,8 +268,9 @@ class Client: | ||||||
|         - /fapi/v3/ USD-M FUTURES, or |         - /fapi/v3/ USD-M FUTURES, or | ||||||
|         - /api/v3/ SPOT/MARGIN |         - /api/v3/ SPOT/MARGIN | ||||||
| 
 | 
 | ||||||
|         account/market endpoint request depending on either passed in `venue: str` |         account/market endpoint request depending on either passed in | ||||||
|         or the current setting `.mkt_mode: str` setting, default `'spot'`. |         `venue: str` or the current setting `.mkt_mode: str` setting, | ||||||
|  |         default `'spot'`. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|         Docs per venue API: |         Docs per venue API: | ||||||
|  | @ -360,9 +299,6 @@ class Client: | ||||||
|                 venue=venue_key, |                 venue=venue_key, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         sesh: asks.Session |  | ||||||
|         path: str |  | ||||||
| 
 |  | ||||||
|         # Check if we're configured to route order requests to the |         # Check if we're configured to route order requests to the | ||||||
|         # venue equivalent's testnet. |         # venue equivalent's testnet. | ||||||
|         use_testnet: bool = False |         use_testnet: bool = False | ||||||
|  | @ -387,11 +323,12 @@ class Client: | ||||||
|             # ctl machinery B) |             # ctl machinery B) | ||||||
|             venue_key += '_testnet' |             venue_key += '_testnet' | ||||||
| 
 | 
 | ||||||
|         sesh, path = self.venue_sesh[venue_key] |         client: httpx.AsyncClient | ||||||
| 
 |         path: str | ||||||
|         meth: Callable = getattr(sesh, method) |         client, path = self.venue_sesh[venue_key] | ||||||
|  |         meth: Callable = getattr(client, method) | ||||||
|         resp = await meth( |         resp = await meth( | ||||||
|             path=path + endpoint, |             url=path + endpoint, | ||||||
|             params=params, |             params=params, | ||||||
|             timeout=float('inf'), |             timeout=float('inf'), | ||||||
|         ) |         ) | ||||||
|  | @ -433,7 +370,15 @@ class Client: | ||||||
|                 item['filters'] = filters |                 item['filters'] = filters | ||||||
| 
 | 
 | ||||||
|             pair_type: Type = PAIRTYPES[venue] |             pair_type: Type = PAIRTYPES[venue] | ||||||
|  |             try: | ||||||
|                 pair: Pair = pair_type(**item) |                 pair: Pair = pair_type(**item) | ||||||
|  |             except Exception as e: | ||||||
|  |                 e.add_note( | ||||||
|  |                     "\nDon't panic, prolly stupid binance changed their symbology schema again..\n" | ||||||
|  |                     'Check out their API docs here:\n\n' | ||||||
|  |                     'https://binance-docs.github.io/apidocs/spot/en/#exchange-information' | ||||||
|  |                 ) | ||||||
|  |                 raise | ||||||
|             pair_table[pair.symbol.upper()] = pair |             pair_table[pair.symbol.upper()] = pair | ||||||
| 
 | 
 | ||||||
|             # update an additional top-level-cross-venue-table |             # update an additional top-level-cross-venue-table | ||||||
|  | @ -528,7 +473,9 @@ class Client: | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         pair_table: dict[str, Pair] = self._venue2pairs[ |         pair_table: dict[str, Pair] = self._venue2pairs[ | ||||||
|             venue or self.mkt_mode |             venue | ||||||
|  |             or | ||||||
|  |             self.mkt_mode | ||||||
|         ] |         ] | ||||||
|         if ( |         if ( | ||||||
|             expiry |             expiry | ||||||
|  | @ -547,9 +494,9 @@ class Client: | ||||||
|             venues: list[str] = [venue] |             venues: list[str] = [venue] | ||||||
| 
 | 
 | ||||||
|         # batch per-venue download of all exchange infos |         # batch per-venue download of all exchange infos | ||||||
|         async with trio.open_nursery() as rn: |         async with trio.open_nursery() as tn: | ||||||
|             for ven in venues: |             for ven in venues: | ||||||
|                 rn.start_soon( |                 tn.start_soon( | ||||||
|                     self._cache_pairs, |                     self._cache_pairs, | ||||||
|                     ven, |                     ven, | ||||||
|                 ) |                 ) | ||||||
|  | @ -602,11 +549,11 @@ class Client: | ||||||
| 
 | 
 | ||||||
|     ) -> dict[str, Any]: |     ) -> dict[str, Any]: | ||||||
| 
 | 
 | ||||||
|         fq_pairs: dict = await self.exch_info() |         fq_pairs: dict[str, Pair] = await self.exch_info() | ||||||
| 
 | 
 | ||||||
|         # TODO: cache this list like we were in |         # TODO: cache this list like we were in | ||||||
|         # `open_symbol_search()`? |         # `open_symbol_search()`? | ||||||
|         keys: list[str] = list(fq_pairs) |         # keys: list[str] = list(fq_pairs) | ||||||
| 
 | 
 | ||||||
|         return match_from_pairs( |         return match_from_pairs( | ||||||
|             pairs=fq_pairs, |             pairs=fq_pairs, | ||||||
|  | @ -614,9 +561,20 @@ class Client: | ||||||
|             score_cutoff=50, |             score_cutoff=50, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |     def pair2venuekey( | ||||||
|  |         self, | ||||||
|  |         pair: Pair, | ||||||
|  |     ) -> str: | ||||||
|  |         return { | ||||||
|  |             'USDTM': 'usdtm_futes', | ||||||
|  |             'SPOT': 'spot', | ||||||
|  |             # 'COINM': 'coin_futes', | ||||||
|  |             # ^-TODO-^ bc someone might want it..? | ||||||
|  |         }[pair.venue] | ||||||
|  | 
 | ||||||
|     async def bars( |     async def bars( | ||||||
|         self, |         self, | ||||||
|         symbol: str, |         mkt: MktPair, | ||||||
| 
 | 
 | ||||||
|         start_dt: datetime | None = None, |         start_dt: datetime | None = None, | ||||||
|         end_dt: datetime | None = None, |         end_dt: datetime | None = None, | ||||||
|  | @ -646,16 +604,20 @@ class Client: | ||||||
|         start_time = binance_timestamp(start_dt) |         start_time = binance_timestamp(start_dt) | ||||||
|         end_time = binance_timestamp(end_dt) |         end_time = binance_timestamp(end_dt) | ||||||
| 
 | 
 | ||||||
|  |         bs_pair: Pair = self._pairs[mkt.bs_fqme.upper()] | ||||||
|  | 
 | ||||||
|         # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data |         # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data | ||||||
|         bars = await self._api( |         bars = await self._api( | ||||||
|             'klines', |             'klines', | ||||||
|             params={ |             params={ | ||||||
|                 'symbol': symbol.upper(), |                 # NOTE: always query using their native symbology! | ||||||
|  |                 'symbol': mkt.bs_mktid.upper(), | ||||||
|                 'interval': '1m', |                 'interval': '1m', | ||||||
|                 'startTime': start_time, |                 'startTime': start_time, | ||||||
|                 'endTime': end_time, |                 'endTime': end_time, | ||||||
|                 'limit': limit |                 'limit': limit | ||||||
|             }, |             }, | ||||||
|  |             venue=self.pair2venuekey(bs_pair), | ||||||
|             allow_testnet=False, |             allow_testnet=False, | ||||||
|         ) |         ) | ||||||
|         new_bars: list[tuple] = [] |         new_bars: list[tuple] = [] | ||||||
|  | @ -972,17 +934,148 @@ class Client: | ||||||
|         await self.close_listen_key(key) |         await self.close_listen_key(key) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | _venue_urls: dict[str, str] = { | ||||||
| async def get_client() -> Client: |     'spot': ( | ||||||
|  |         _spot_url, | ||||||
|  |         '/api/v3/', | ||||||
|  |     ), | ||||||
|  |     'spot_testnet': ( | ||||||
|  |         _testnet_spot_url, | ||||||
|  |         '/fapi/v1/' | ||||||
|  |     ), | ||||||
|  |     # margin and extended spot endpoints session. | ||||||
|  |     # TODO: did this ever get implemented fully? | ||||||
|  |     # 'margin': ( | ||||||
|  |     #     _spot_url, | ||||||
|  |     #     '/sapi/v1/' | ||||||
|  |     # ), | ||||||
| 
 | 
 | ||||||
|     client = Client() |     'usdtm_futes': ( | ||||||
|     await client.exch_info() |         _futes_url, | ||||||
|     log.info( |         '/fapi/v1/', | ||||||
|         f'{client} in {client.mkt_mode} mode: caching exchange infos..\n' |     ), | ||||||
|         'Cached multi-market pairs:\n' | 
 | ||||||
|         f'spot: {len(client._spot_pairs)}\n' |     'usdtm_futes_testnet': ( | ||||||
|         f'usdtm_futes: {len(client._ufutes_pairs)}\n' |         _testnet_futes_url, | ||||||
|         f'Total: {len(client._pairs)}\n' |         '/fapi/v1/', | ||||||
|  |     ), | ||||||
|  | 
 | ||||||
|  |     # TODO: for anyone who actually needs it ;P | ||||||
|  |     # 'coin_futes': () | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def init_api_keys( | ||||||
|  |     client: Client, | ||||||
|  |     conf: dict[str, Any], | ||||||
|  | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     Set up per-venue API keys each http client according to the user's | ||||||
|  |     `brokers.conf`. | ||||||
|  | 
 | ||||||
|  |     For ex, to use spot-testnet and live usdt futures APIs: | ||||||
|  | 
 | ||||||
|  |     ```toml | ||||||
|  |         [binance] | ||||||
|  |         # spot test net | ||||||
|  |         spot.use_testnet = true | ||||||
|  |         spot.api_key = '<spot_api_key_from_binance_account>' | ||||||
|  |         spot.api_secret = '<spot_api_key_password>' | ||||||
|  | 
 | ||||||
|  |         # futes live | ||||||
|  |         futes.use_testnet = false | ||||||
|  |         accounts.usdtm = 'futes' | ||||||
|  |         futes.api_key = '<futes_api_key_from_binance>' | ||||||
|  |         futes.api_secret = '<futes_api_key_password>'' | ||||||
|  | 
 | ||||||
|  |         # if uncommented will use the built-in paper engine and not | ||||||
|  |         # connect to `binance` API servers for order ctl. | ||||||
|  |         # accounts.paper = 'paper' | ||||||
|  |     ``` | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     for key, subconf in conf.items(): | ||||||
|  |         if api_key := subconf.get('api_key', ''): | ||||||
|  |             venue_keys: list[str] = client.confkey2venuekeys[key] | ||||||
|  | 
 | ||||||
|  |             venue_key: str | ||||||
|  |             client: httpx.AsyncClient | ||||||
|  |             for venue_key in venue_keys: | ||||||
|  |                 client, _ = client.venue_sesh[venue_key] | ||||||
|  | 
 | ||||||
|  |                 api_key_header: dict = { | ||||||
|  |                     # taken from official: | ||||||
|  |                     # https://github.com/binance/binance-futures-connector-python/blob/main/binance/api.py#L47 | ||||||
|  |                     "Content-Type": "application/json;charset=utf-8", | ||||||
|  | 
 | ||||||
|  |                     # TODO: prolly should just always query and copy | ||||||
|  |                     # in the real latest ver? | ||||||
|  |                     "User-Agent": "binance-connector/6.1.6smbz6", | ||||||
|  |                     "X-MBX-APIKEY": api_key, | ||||||
|  |                 } | ||||||
|  |                 client.headers.update(api_key_header) | ||||||
|  | 
 | ||||||
|  |                 # if `.use_tesnet = true` in the config then | ||||||
|  |                 # also add headers for the testnet session which | ||||||
|  |                 # will be used for all order control | ||||||
|  |                 if subconf.get('use_testnet', False): | ||||||
|  |                     testnet_sesh, _ = client.venue_sesh[ | ||||||
|  |                         venue_key + '_testnet' | ||||||
|  |                     ] | ||||||
|  |                     testnet_sesh.headers.update(api_key_header) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def get_client( | ||||||
|  |     mkt_mode: MarketType = 'spot', | ||||||
|  | ) -> Client: | ||||||
|  |     ''' | ||||||
|  |     Construct an single `piker` client which composes multiple underlying venue | ||||||
|  |     specific API clients both for live and test networks. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     venue_sessions: dict[ | ||||||
|  |         str,  # venue key | ||||||
|  |         tuple[httpx.AsyncClient, str]  # session, eps path | ||||||
|  |     ] = {} | ||||||
|  |     async with AsyncExitStack() as client_stack: | ||||||
|  |         for name, (base_url, path) in _venue_urls.items(): | ||||||
|  |             api: httpx.AsyncClient = await client_stack.enter_async_context( | ||||||
|  |                 httpx.AsyncClient( | ||||||
|  |                     base_url=base_url, | ||||||
|  |                     # headers={}, | ||||||
|  | 
 | ||||||
|  |                     # TODO: is there a way to numerate this? | ||||||
|  |                     # https://www.python-httpx.org/advanced/clients/#why-use-a-client | ||||||
|  |                     # connections=4 | ||||||
|  |                 ) | ||||||
|  |             ) | ||||||
|  |             venue_sessions[name] = ( | ||||||
|  |                 api, | ||||||
|  |                 path, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|  |         conf: dict[str, Any] = get_config() | ||||||
|  |         # for creating API keys see, | ||||||
|  |         # https://www.binance.com/en/support/faq/how-to-create-api-keys-on-binance-360002502072 | ||||||
|  |         client = Client( | ||||||
|  |             venue_sessions=venue_sessions, | ||||||
|  |             conf=conf, | ||||||
|  |             mkt_mode=mkt_mode, | ||||||
|  |         ) | ||||||
|  |         init_api_keys( | ||||||
|  |             client=client, | ||||||
|  |             conf=conf, | ||||||
|  |         ) | ||||||
|  |         fq_pairs: dict[str, Pair] = await client.exch_info() | ||||||
|  |         assert fq_pairs | ||||||
|  |         log.info( | ||||||
|  |             f'Loaded multi-venue `Client` in mkt_mode={client.mkt_mode!r}\n\n' | ||||||
|  |             f'Symbology Summary:\n' | ||||||
|  |             f'------ - ------\n' | ||||||
|  |             f'spot: {len(client._spot_pairs)}\n' | ||||||
|  |             f'usdtm_futes: {len(client._ufutes_pairs)}\n' | ||||||
|  |             '------ - ------\n' | ||||||
|  |             f'total: {len(client._pairs)}\n' | ||||||
|  |         ) | ||||||
|         yield client |         yield client | ||||||
|  |  | ||||||
|  | @ -264,15 +264,20 @@ async def open_trade_dialog( | ||||||
|     # do a open_symcache() call.. though maybe we can hide |     # do a open_symcache() call.. though maybe we can hide | ||||||
|     # this in a new async version of open_account()? |     # this in a new async version of open_account()? | ||||||
|     async with open_cached_client('binance') as client: |     async with open_cached_client('binance') as client: | ||||||
|         subconf: dict = client.conf[venue_name] |         subconf: dict|None = client.conf.get(venue_name) | ||||||
|         use_testnet = subconf.get('use_testnet', False) |  | ||||||
| 
 | 
 | ||||||
|         # XXX: if no futes.api_key or spot.api_key has been set we |         # XXX: if no futes.api_key or spot.api_key has been set we | ||||||
|         # always fall back to the paper engine! |         # always fall back to the paper engine! | ||||||
|         if not subconf.get('api_key'): |         if ( | ||||||
|  |             not subconf | ||||||
|  |             or | ||||||
|  |             not subconf.get('api_key') | ||||||
|  |         ): | ||||||
|             await ctx.started('paper') |             await ctx.started('paper') | ||||||
|             return |             return | ||||||
| 
 | 
 | ||||||
|  |         use_testnet: bool = subconf.get('use_testnet', False) | ||||||
|  | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         open_cached_client('binance') as client, |         open_cached_client('binance') as client, | ||||||
|     ): |     ): | ||||||
|  |  | ||||||
|  | @ -42,12 +42,12 @@ from trio_typing import TaskStatus | ||||||
| from pendulum import ( | from pendulum import ( | ||||||
|     from_timestamp, |     from_timestamp, | ||||||
| ) | ) | ||||||
| from rapidfuzz import process as fuzzy |  | ||||||
| import numpy as np | import numpy as np | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| from piker.brokers import ( | from piker.brokers import ( | ||||||
|     open_cached_client, |     open_cached_client, | ||||||
|  |     NoData, | ||||||
| ) | ) | ||||||
| from piker._cacheables import ( | from piker._cacheables import ( | ||||||
|     async_lifo_cache, |     async_lifo_cache, | ||||||
|  | @ -110,6 +110,7 @@ class AggTrade(Struct, frozen=True): | ||||||
| 
 | 
 | ||||||
| async def stream_messages( | async def stream_messages( | ||||||
|     ws: NoBsWs, |     ws: NoBsWs, | ||||||
|  | 
 | ||||||
| ) -> AsyncGenerator[NoBsWs, dict]: | ) -> AsyncGenerator[NoBsWs, dict]: | ||||||
| 
 | 
 | ||||||
|     # TODO: match syntax here! |     # TODO: match syntax here! | ||||||
|  | @ -220,6 +221,8 @@ def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO, why aren't frame resp `log.info()`s showing in upstream | ||||||
|  | # code?! | ||||||
| @acm | @acm | ||||||
| async def open_history_client( | async def open_history_client( | ||||||
|     mkt: MktPair, |     mkt: MktPair, | ||||||
|  | @ -252,24 +255,30 @@ async def open_history_client( | ||||||
|             else: |             else: | ||||||
|                 client.mkt_mode = 'spot' |                 client.mkt_mode = 'spot' | ||||||
| 
 | 
 | ||||||
|             # NOTE: always query using their native symbology! |             array: np.ndarray = await client.bars( | ||||||
|             mktid: str = mkt.bs_mktid |                 mkt=mkt, | ||||||
|             array = await client.bars( |  | ||||||
|                 mktid, |  | ||||||
|                 start_dt=start_dt, |                 start_dt=start_dt, | ||||||
|                 end_dt=end_dt, |                 end_dt=end_dt, | ||||||
|             ) |             ) | ||||||
|  |             if array.size == 0: | ||||||
|  |                 raise NoData( | ||||||
|  |                     f'No frame for {start_dt} -> {end_dt}\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|             times = array['time'] |             times = array['time'] | ||||||
|             if ( |             if not times.any(): | ||||||
|                 end_dt is None |                 raise ValueError( | ||||||
|             ): |                     'Bad frame with null-times?\n\n' | ||||||
|                 inow = round(time.time()) |                     f'{times}' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             if end_dt is None: | ||||||
|  |                 inow: int = round(time.time()) | ||||||
|                 if (inow - times[-1]) > 60: |                 if (inow - times[-1]) > 60: | ||||||
|                     await tractor.pause() |                     await tractor.pause() | ||||||
| 
 | 
 | ||||||
|             start_dt = from_timestamp(times[0]) |             start_dt = from_timestamp(times[0]) | ||||||
|             end_dt = from_timestamp(times[-1]) |             end_dt = from_timestamp(times[-1]) | ||||||
| 
 |  | ||||||
|             return array, start_dt, end_dt |             return array, start_dt, end_dt | ||||||
| 
 | 
 | ||||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} |         yield get_ohlc, {'erlangs': 3, 'rate': 3} | ||||||
|  | @ -456,6 +465,8 @@ async def stream_quotes( | ||||||
|     ): |     ): | ||||||
|         init_msgs: list[FeedInit] = [] |         init_msgs: list[FeedInit] = [] | ||||||
|         for sym in symbols: |         for sym in symbols: | ||||||
|  |             mkt: MktPair | ||||||
|  |             pair: Pair | ||||||
|             mkt, pair = await get_mkt_info(sym) |             mkt, pair = await get_mkt_info(sym) | ||||||
| 
 | 
 | ||||||
|             # build out init msgs according to latest spec |             # build out init msgs according to latest spec | ||||||
|  | @ -504,7 +515,6 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|             # start streaming |             # start streaming | ||||||
|             async for typ, quote in msg_gen: |             async for typ, quote in msg_gen: | ||||||
| 
 |  | ||||||
|                 # period = time.time() - last |                 # period = time.time() - last | ||||||
|                 # hz = 1/period if period else float('inf') |                 # hz = 1/period if period else float('inf') | ||||||
|                 # if hz > 60: |                 # if hz > 60: | ||||||
|  | @ -540,7 +550,7 @@ async def open_symbol_search( | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|                 # repack in fqme-keyed table |                 # repack in fqme-keyed table | ||||||
|                 byfqme: dict[start, Pair] = {} |                 byfqme: dict[str, Pair] = {} | ||||||
|                 for pair in pairs.values(): |                 for pair in pairs.values(): | ||||||
|                     byfqme[pair.bs_fqme] = pair |                     byfqme[pair.bs_fqme] = pair | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -137,10 +137,12 @@ class SpotPair(Pair, frozen=True): | ||||||
|     quoteOrderQtyMarketAllowed: bool |     quoteOrderQtyMarketAllowed: bool | ||||||
|     isSpotTradingAllowed: bool |     isSpotTradingAllowed: bool | ||||||
|     isMarginTradingAllowed: bool |     isMarginTradingAllowed: bool | ||||||
|  |     otoAllowed: bool | ||||||
| 
 | 
 | ||||||
|     defaultSelfTradePreventionMode: str |     defaultSelfTradePreventionMode: str | ||||||
|     allowedSelfTradePreventionModes: list[str] |     allowedSelfTradePreventionModes: list[str] | ||||||
|     permissions: list[str] |     permissions: list[str] | ||||||
|  |     permissionSets: list[list[str]] | ||||||
| 
 | 
 | ||||||
|     # NOTE: see `.data._symcache.SymbologyCache.load()` for why |     # NOTE: see `.data._symcache.SymbologyCache.load()` for why | ||||||
|     ns_path: str = 'piker.brokers.binance:SpotPair' |     ns_path: str = 'piker.brokers.binance:SpotPair' | ||||||
|  | @ -179,7 +181,6 @@ class FutesPair(Pair): | ||||||
|     quoteAsset: str  # 'USDT', |     quoteAsset: str  # 'USDT', | ||||||
|     quotePrecision: int  # 8, |     quotePrecision: int  # 8, | ||||||
|     requiredMarginPercent: float  # '5.0000', |     requiredMarginPercent: float  # '5.0000', | ||||||
|     settlePlan: int  # 0, |  | ||||||
|     timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'], |     timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'], | ||||||
|     triggerProtect: float  # '0.0500', |     triggerProtect: float  # '0.0500', | ||||||
|     underlyingSubType: list[str]  # ['PoW'], |     underlyingSubType: list[str]  # ['PoW'], | ||||||
|  |  | ||||||
|  | @ -25,6 +25,7 @@ from .api import ( | ||||||
|     get_client, |     get_client, | ||||||
| ) | ) | ||||||
| from .feed import ( | from .feed import ( | ||||||
|  |     get_mkt_info, | ||||||
|     open_history_client, |     open_history_client, | ||||||
|     open_symbol_search, |     open_symbol_search, | ||||||
|     stream_quotes, |     stream_quotes, | ||||||
|  | @ -34,15 +35,20 @@ from .feed import ( | ||||||
|     # open_trade_dialog, |     # open_trade_dialog, | ||||||
|     # norm_trade_records, |     # norm_trade_records, | ||||||
| # ) | # ) | ||||||
|  | from .venues import ( | ||||||
|  |     OptionPair, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| __all__ = [ | __all__ = [ | ||||||
|     'get_client', |     'get_client', | ||||||
| #    'trades_dialogue', | #    'trades_dialogue', | ||||||
|  |     'get_mkt_info', | ||||||
|     'open_history_client', |     'open_history_client', | ||||||
|     'open_symbol_search', |     'open_symbol_search', | ||||||
|     'stream_quotes', |     'stream_quotes', | ||||||
|  |     'OptionPair', | ||||||
| #    'norm_trade_records', | #    'norm_trade_records', | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -18,38 +18,59 @@ | ||||||
| Deribit backend. | Deribit backend. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
|  | from __future__ import annotations | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from typing import Any, Optional, Callable | from typing import ( | ||||||
|  |     # Any, | ||||||
|  |     # Optional, | ||||||
|  |     Callable, | ||||||
|  | ) | ||||||
|  | # from pprint import pformat | ||||||
| import time | import time | ||||||
| 
 | 
 | ||||||
|  | import cryptofeed | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import pendulum | from pendulum import ( | ||||||
| from rapidfuzz import process as fuzzy |     from_timestamp, | ||||||
|  | ) | ||||||
| import numpy as np | import numpy as np | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| from piker.brokers import open_cached_client | from piker.accounting import ( | ||||||
| from piker.log import get_logger, get_console_log |     Asset, | ||||||
| from piker.data import ShmArray |     MktPair, | ||||||
| from piker.brokers._util import ( |     unpack_fqme, | ||||||
|     BrokerError, | ) | ||||||
|  | from piker.brokers import ( | ||||||
|  |     open_cached_client, | ||||||
|  |     NoData, | ||||||
|     DataUnavailable, |     DataUnavailable, | ||||||
| ) | ) | ||||||
| 
 | from piker._cacheables import ( | ||||||
| from cryptofeed import FeedHandler |     async_lifo_cache, | ||||||
| from cryptofeed.defines import ( |  | ||||||
|     DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT |  | ||||||
| ) | ) | ||||||
| from cryptofeed.symbols import Symbol | from piker.log import ( | ||||||
|  |     get_logger, | ||||||
|  |     mk_repr, | ||||||
|  | ) | ||||||
|  | from piker.data.validate import FeedInit | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| from .api import ( | from .api import ( | ||||||
|     Client, Trade, |     Client, | ||||||
|     get_config, |     # get_config, | ||||||
|     str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst, |     piker_sym_to_cb_sym, | ||||||
|  |     cb_sym_to_deribit_inst, | ||||||
|  |     str_to_cb_sym, | ||||||
|     maybe_open_price_feed |     maybe_open_price_feed | ||||||
| ) | ) | ||||||
|  | from .venues import ( | ||||||
|  |     Pair, | ||||||
|  |     OptionPair, | ||||||
|  |     Trade, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| _spawn_kwargs = { | _spawn_kwargs = { | ||||||
|     'infect_asyncio': True, |     'infect_asyncio': True, | ||||||
|  | @ -64,90 +85,215 @@ async def open_history_client( | ||||||
|     mkt: MktPair, |     mkt: MktPair, | ||||||
| ) -> tuple[Callable, int]: | ) -> tuple[Callable, int]: | ||||||
| 
 | 
 | ||||||
|     fnstrument: str = mkt.bs_fqme |  | ||||||
|     # TODO implement history getter for the new storage layer. |     # TODO implement history getter for the new storage layer. | ||||||
|     async with open_cached_client('deribit') as client: |     async with open_cached_client('deribit') as client: | ||||||
| 
 | 
 | ||||||
|  |         pair: OptionPair = client._pairs[mkt.dst.name] | ||||||
|  |         # XXX NOTE, the cuckers use ms !!! | ||||||
|  |         creation_time_s: int = pair.creation_timestamp/1000 | ||||||
|  | 
 | ||||||
|         async def get_ohlc( |         async def get_ohlc( | ||||||
|             end_dt: Optional[datetime] = None, |             timeframe: float, | ||||||
|             start_dt: Optional[datetime] = None, |             end_dt: datetime | None = None, | ||||||
|  |             start_dt: datetime | None = None, | ||||||
| 
 | 
 | ||||||
|         ) -> tuple[ |         ) -> tuple[ | ||||||
|             np.ndarray, |             np.ndarray, | ||||||
|             datetime,  # start |             datetime,  # start | ||||||
|             datetime,  # end |             datetime,  # end | ||||||
|         ]: |         ]: | ||||||
|  |             if timeframe != 60: | ||||||
|  |                 raise DataUnavailable('Only 1m bars are supported') | ||||||
| 
 | 
 | ||||||
|             array = await client.bars( |             array: np.ndarray = await client.bars( | ||||||
|                 instrument, |                 mkt, | ||||||
|                 start_dt=start_dt, |                 start_dt=start_dt, | ||||||
|                 end_dt=end_dt, |                 end_dt=end_dt, | ||||||
|             ) |             ) | ||||||
|             if len(array) == 0: |             if len(array) == 0: | ||||||
|                 raise DataUnavailable |                 if ( | ||||||
|  |                     end_dt is None | ||||||
|  |                 ): | ||||||
|  |                     raise DataUnavailable( | ||||||
|  |                         'No history seems to exist yet?\n\n' | ||||||
|  |                         f'{mkt}' | ||||||
|  |                     ) | ||||||
|  |                 elif ( | ||||||
|  |                     end_dt | ||||||
|  |                     and | ||||||
|  |                     end_dt.timestamp() < creation_time_s | ||||||
|  |                 ): | ||||||
|  |                     # the contract can't have history | ||||||
|  |                     # before it was created. | ||||||
|  |                     pair_type_str: str = type(pair).__name__ | ||||||
|  |                     create_dt: datetime = from_timestamp(creation_time_s) | ||||||
|  |                     raise DataUnavailable( | ||||||
|  |                         f'No history prior to\n' | ||||||
|  |                         f'`{pair_type_str}.creation_timestamp: int = ' | ||||||
|  |                         f'{pair.creation_timestamp}\n\n' | ||||||
|  |                         f'------ deribit sux ------\n' | ||||||
|  |                         f'WHICH IN "NORMAL PEOPLE WHO USE EPOCH TIME" form is,\n' | ||||||
|  |                         f'creation_time_s: {creation_time_s}\n' | ||||||
|  |                         f'create_dt: {create_dt}\n' | ||||||
|  |                     ) | ||||||
|  |                 raise NoData( | ||||||
|  |                     f'No frame for {start_dt} -> {end_dt}\n' | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) |             start_dt = from_timestamp(array[0]['time']) | ||||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) |             end_dt = from_timestamp(array[-1]['time']) | ||||||
|  | 
 | ||||||
|  |             times = array['time'] | ||||||
|  |             if not times.any(): | ||||||
|  |                 raise ValueError( | ||||||
|  |                     'Bad frame with null-times?\n\n' | ||||||
|  |                     f'{times}' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             if end_dt is None: | ||||||
|  |                 inow: int = round(time.time()) | ||||||
|  |                 if (inow - times[-1]) > 60: | ||||||
|  |                     await tractor.pause() | ||||||
| 
 | 
 | ||||||
|             return array, start_dt, end_dt |             return array, start_dt, end_dt | ||||||
| 
 | 
 | ||||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} |         yield ( | ||||||
|  |             get_ohlc, | ||||||
|  |             {  # backfill config | ||||||
|  |                 'erlangs': 3, | ||||||
|  |                 'rate': 3, | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @async_lifo_cache() | ||||||
|  | async def get_mkt_info( | ||||||
|  |     fqme: str, | ||||||
|  | 
 | ||||||
|  | ) -> tuple[MktPair, Pair|OptionPair] | None: | ||||||
|  | 
 | ||||||
|  |     # uppercase since kraken bs_mktid is always upper | ||||||
|  |     if 'deribit' not in fqme.lower(): | ||||||
|  |         fqme += '.deribit' | ||||||
|  | 
 | ||||||
|  |     mkt_mode: str = '' | ||||||
|  |     broker, mkt_ep, venue, expiry = unpack_fqme(fqme) | ||||||
|  | 
 | ||||||
|  |     # NOTE: we always upper case all tokens to be consistent with | ||||||
|  |     # binance's symbology style for pairs, like `BTCUSDT`, but in | ||||||
|  |     # theory we could also just keep things lower case; as long as | ||||||
|  |     # we're consistent and the symcache matches whatever this func | ||||||
|  |     # returns, always! | ||||||
|  |     expiry: str = expiry.upper() | ||||||
|  |     venue: str = venue.upper() | ||||||
|  |     # venue_lower: str = venue.lower() | ||||||
|  | 
 | ||||||
|  |     mkt_mode: str = 'option' | ||||||
|  | 
 | ||||||
|  |     async with open_cached_client( | ||||||
|  |         'deribit', | ||||||
|  |     ) as client: | ||||||
|  | 
 | ||||||
|  |         assets: dict[str, Asset] = await client.get_assets() | ||||||
|  |         pair_str: str = mkt_ep.lower() | ||||||
|  | 
 | ||||||
|  |         pair: Pair = await client.exch_info( | ||||||
|  |             sym=pair_str, | ||||||
|  |         ) | ||||||
|  |         mkt_mode = pair.venue | ||||||
|  |         client.mkt_mode = mkt_mode | ||||||
|  | 
 | ||||||
|  |         dst: Asset | None = assets.get(pair.bs_dst_asset) | ||||||
|  |         src: Asset | None = assets.get(pair.bs_src_asset) | ||||||
|  | 
 | ||||||
|  |         mkt = MktPair( | ||||||
|  |             dst=dst, | ||||||
|  |             src=src, | ||||||
|  |             price_tick=pair.price_tick, | ||||||
|  |             size_tick=pair.size_tick, | ||||||
|  |             bs_mktid=pair.symbol, | ||||||
|  |             venue=mkt_mode, | ||||||
|  |             broker='deribit', | ||||||
|  |             _atype=mkt_mode, | ||||||
|  |             _fqme_without_src=True, | ||||||
|  | 
 | ||||||
|  |             # expiry=pair.expiry, | ||||||
|  |             # XXX TODO, currently we don't use it since it's | ||||||
|  |             # already "described" in the `OptionPair.symbol: str` | ||||||
|  |             # and if we slap in the ISO repr it's kinda hideous.. | ||||||
|  |             # -[ ] figure out the best either std | ||||||
|  |         ) | ||||||
|  |         return mkt, pair | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def stream_quotes( | async def stream_quotes( | ||||||
| 
 |  | ||||||
|     send_chan: trio.abc.SendChannel, |     send_chan: trio.abc.SendChannel, | ||||||
|     symbols: list[str], |     symbols: list[str], | ||||||
|     feed_is_live: trio.Event, |     feed_is_live: trio.Event, | ||||||
|     loglevel: str = None, |  | ||||||
| 
 | 
 | ||||||
|     # startup sync |     # startup sync | ||||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, |     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging |     ''' | ||||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) |     Open a live quote stream for the market set defined by `symbols`. | ||||||
| 
 | 
 | ||||||
|     sym = symbols[0] |     Internally this starts a `cryptofeed.FeedHandler` inside an `asyncio`-side | ||||||
|  |     task and relays through L1 and `Trade` msgs here to our `trio.Task`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     sym = symbols[0].split('.')[0] | ||||||
|  |     init_msgs: list[FeedInit] = [] | ||||||
|  | 
 | ||||||
|  |     # multiline nested `dict` formatter (since rn quote-msgs are | ||||||
|  |     # just that). | ||||||
|  |     pfmt: Callable[[str], str] = mk_repr( | ||||||
|  |         # so we can see `deribit`'s delightfully mega-long bs fields.. | ||||||
|  |         maxstring=100, | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         open_cached_client('deribit') as client, |         open_cached_client('deribit') as client, | ||||||
|         send_chan as send_chan |         send_chan as send_chan | ||||||
|     ): |     ): | ||||||
|  |         mkt: MktPair | ||||||
|  |         pair: Pair | ||||||
|  |         mkt, pair = await get_mkt_info(sym) | ||||||
| 
 | 
 | ||||||
|         init_msgs = { |         # build out init msgs according to latest spec | ||||||
|             # pass back token, and bool, signalling if we're the writer |         init_msgs.append( | ||||||
|             # and that history has been written |             FeedInit( | ||||||
|             sym: { |                 mkt_info=mkt, | ||||||
|                 'symbol_info': { |             ) | ||||||
|                     'asset_type': 'option', |         ) | ||||||
|                     'price_tick_size': 0.0005 |         # build `cryptofeed` feed-handle | ||||||
|                 }, |         cf_sym: cryptofeed.Symbol = piker_sym_to_cb_sym(sym) | ||||||
|                 'shm_write_opts': {'sum_tick_vml': False}, |  | ||||||
|                 'fqsn': sym, |  | ||||||
|             }, |  | ||||||
|         } |  | ||||||
| 
 | 
 | ||||||
|         nsym = piker_sym_to_cb_sym(sym) |         from_cf: tractor.to_asyncio.LinkedTaskChannel | ||||||
|  |         async with maybe_open_price_feed(sym) as from_cf: | ||||||
| 
 | 
 | ||||||
|         async with maybe_open_price_feed(sym) as stream: |             # load the "last trades" summary | ||||||
|  |             last_trades_res: cryptofeed.LastTradesResult = await client.last_trades( | ||||||
|  |                 cb_sym_to_deribit_inst(cf_sym), | ||||||
|  |                 count=1, | ||||||
|  |             ) | ||||||
|  |             last_trades: list[Trade] = last_trades_res.trades | ||||||
| 
 | 
 | ||||||
|             cache = await client.cache_symbols() |             # TODO, do we even need this or will the above always | ||||||
|  |             # work? | ||||||
|  |             # if not last_trades: | ||||||
|  |             #     await tractor.pause() | ||||||
|  |             #     async for typ, quote in from_cf: | ||||||
|  |             #         if typ == 'trade': | ||||||
|  |             #             last_trade = Trade(**(quote['data'])) | ||||||
|  |             #             break | ||||||
| 
 | 
 | ||||||
|             last_trades = (await client.last_trades( |             # else: | ||||||
|                 cb_sym_to_deribit_inst(nsym), count=1)).trades |             last_trade = Trade( | ||||||
|  |                 **(last_trades[0]) | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             if len(last_trades) == 0: |             first_quote: dict = { | ||||||
|                 last_trade = None |  | ||||||
|                 async for typ, quote in stream: |  | ||||||
|                     if typ == 'trade': |  | ||||||
|                         last_trade = Trade(**(quote['data'])) |  | ||||||
|                         break |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 last_trade = Trade(**(last_trades[0])) |  | ||||||
| 
 |  | ||||||
|             first_quote = { |  | ||||||
|                 'symbol': sym, |                 'symbol': sym, | ||||||
|                 'last': last_trade.price, |                 'last': last_trade.price, | ||||||
|                 'brokerd_ts': last_trade.timestamp, |                 'brokerd_ts': last_trade.timestamp, | ||||||
|  | @ -158,13 +304,84 @@ async def stream_quotes( | ||||||
|                     'broker_ts': last_trade.timestamp |                     'broker_ts': last_trade.timestamp | ||||||
|                 }] |                 }] | ||||||
|             } |             } | ||||||
|             task_status.started((init_msgs,  first_quote)) |             task_status.started(( | ||||||
|  |                 init_msgs, | ||||||
|  |                 first_quote, | ||||||
|  |             )) | ||||||
| 
 | 
 | ||||||
|             feed_is_live.set() |             feed_is_live.set() | ||||||
| 
 | 
 | ||||||
|             async for typ, quote in stream: |             # NOTE XXX, static for now! | ||||||
|                 topic = quote['symbol'] |             # => since this only handles ONE mkt feed at a time we | ||||||
|                 await send_chan.send({topic: quote}) |             # don't need a lookup table to map interleaved quotes | ||||||
|  |             # from multiple possible mkt-pairs | ||||||
|  |             topic: str = mkt.bs_fqme | ||||||
|  | 
 | ||||||
|  |             # deliver until cancelled | ||||||
|  |             async for typ, ref in from_cf: | ||||||
|  |                 match typ: | ||||||
|  |                     case 'trade': | ||||||
|  |                         trade: cryptofeed.types.Trade = ref | ||||||
|  | 
 | ||||||
|  |                         # TODO, re-impl this according to teh ideal | ||||||
|  |                         # fqme for opts that we choose!! | ||||||
|  |                         bs_fqme: str = cb_sym_to_deribit_inst( | ||||||
|  |                             str_to_cb_sym(trade.symbol) | ||||||
|  |                         ).lower() | ||||||
|  | 
 | ||||||
|  |                         piker_quote: dict = { | ||||||
|  |                             'symbol': bs_fqme, | ||||||
|  |                             'last': trade.price, | ||||||
|  |                             'broker_ts': time.time(), | ||||||
|  |                             # ^TODO, name this `brokerd/datad_ts` and | ||||||
|  |                             # use `time.time_ns()` ?? | ||||||
|  |                             'ticks': [{ | ||||||
|  |                                 'type': 'trade', | ||||||
|  |                                 'price': float(trade.price), | ||||||
|  |                                 'size': float(trade.amount), | ||||||
|  |                                 'broker_ts': trade.timestamp, | ||||||
|  |                             }], | ||||||
|  |                         } | ||||||
|  |                         log.info( | ||||||
|  |                             f'deribit {typ!r} quote for {sym!r}\n\n' | ||||||
|  |                             f'{trade}\n\n' | ||||||
|  |                             f'{pfmt(piker_quote)}\n' | ||||||
|  |                         ) | ||||||
|  | 
 | ||||||
|  |                     case 'l1': | ||||||
|  |                         book: cryptofeed.types.L1Book = ref | ||||||
|  | 
 | ||||||
|  |                         # TODO, so this is where we can possibly change things | ||||||
|  |                         # and instead lever the `MktPair.bs_fqme: str` output? | ||||||
|  |                         bs_fqme: str = cb_sym_to_deribit_inst( | ||||||
|  |                             str_to_cb_sym(book.symbol) | ||||||
|  |                         ).lower() | ||||||
|  | 
 | ||||||
|  |                         piker_quote: dict = { | ||||||
|  |                             'symbol': bs_fqme, | ||||||
|  |                             'ticks': [ | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'bid', | ||||||
|  |                                  'price': float(book.bid_price), | ||||||
|  |                                  'size': float(book.bid_size)}, | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'bsize', | ||||||
|  |                                  'price': float(book.bid_price), | ||||||
|  |                                  'size': float(book.bid_size),}, | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'ask', | ||||||
|  |                                  'price': float(book.ask_price), | ||||||
|  |                                  'size': float(book.ask_size),}, | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'asize', | ||||||
|  |                                  'price': float(book.ask_price), | ||||||
|  |                                  'size': float(book.ask_size),} | ||||||
|  |                             ] | ||||||
|  |                         } | ||||||
|  | 
 | ||||||
|  |                 await send_chan.send({ | ||||||
|  |                     topic: piker_quote, | ||||||
|  |                 }) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
|  | @ -174,12 +391,21 @@ async def open_symbol_search( | ||||||
|     async with open_cached_client('deribit') as client: |     async with open_cached_client('deribit') as client: | ||||||
| 
 | 
 | ||||||
|         # load all symbols locally for fast search |         # load all symbols locally for fast search | ||||||
|         cache = await client.cache_symbols() |         # cache = client._pairs | ||||||
|         await ctx.started() |         await ctx.started() | ||||||
| 
 | 
 | ||||||
|         async with ctx.open_stream() as stream: |         async with ctx.open_stream() as stream: | ||||||
| 
 |             pattern: str | ||||||
|             async for pattern in stream: |             async for pattern in stream: | ||||||
|                 # repack in dict form | 
 | ||||||
|                 await stream.send( |                 # NOTE: pattern fuzzy-matching is done within | ||||||
|                     await client.search_symbols(pattern)) |                 # the methd impl. | ||||||
|  |                 pairs: dict[str, Pair] = await client.search_symbols( | ||||||
|  |                     pattern, | ||||||
|  |                 ) | ||||||
|  |                 # repack in fqme-keyed table | ||||||
|  |                 byfqme: dict[str, Pair] = {} | ||||||
|  |                 for pair in pairs.values(): | ||||||
|  |                     byfqme[pair.bs_fqme] = pair | ||||||
|  | 
 | ||||||
|  |                 await stream.send(byfqme) | ||||||
|  |  | ||||||
|  | @ -0,0 +1,196 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | Per market data-type definitions and schemas types. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | from __future__ import annotations | ||||||
|  | import pendulum | ||||||
|  | from typing import ( | ||||||
|  |     Literal, | ||||||
|  |     Optional, | ||||||
|  | ) | ||||||
|  | from decimal import Decimal | ||||||
|  | 
 | ||||||
|  | from piker.types import Struct | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # API endpoint paths by venue / sub-API | ||||||
|  | _domain: str = 'deribit.com' | ||||||
|  | _url = f'https://www.{_domain}' | ||||||
|  | 
 | ||||||
|  | # WEBsocketz | ||||||
|  | _ws_url: str = f'wss://www.{_domain}/ws/api/v2' | ||||||
|  | 
 | ||||||
|  | # test nets | ||||||
|  | _testnet_ws_url: str = f'wss://test.{_domain}/ws/api/v2' | ||||||
|  | 
 | ||||||
|  | MarketType = Literal[ | ||||||
|  |     'option' | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_api_eps(venue: MarketType) -> tuple[str, str]: | ||||||
|  |     ''' | ||||||
|  |     Return API ep root paths per venue. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     return { | ||||||
|  |         'option': ( | ||||||
|  |             _ws_url, | ||||||
|  |         ), | ||||||
|  |     }[venue] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Pair(Struct, frozen=True, kw_only=True): | ||||||
|  | 
 | ||||||
|  |     symbol: str | ||||||
|  | 
 | ||||||
|  |     # src | ||||||
|  |     quote_currency: str # 'BTC' | ||||||
|  | 
 | ||||||
|  |     # dst | ||||||
|  |     base_currency: str # "BTC", | ||||||
|  | 
 | ||||||
|  |     tick_size: float # 0.0001 # [{'above_price': 0.005, 'tick_size': 0.0005}] | ||||||
|  |     tick_size_steps: list[dict[str, float]]  | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def price_tick(self) -> Decimal: | ||||||
|  |         return Decimal(str(self.tick_size_steps[0]['above_price'])) | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def size_tick(self) -> Decimal: | ||||||
|  |         return Decimal(str(self.tick_size)) | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_fqme(self) -> str: | ||||||
|  |         return f'{self.symbol}' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_mktid(self) -> str: | ||||||
|  |         return f'{self.symbol}.{self.venue}' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class OptionPair(Pair, frozen=True): | ||||||
|  | 
 | ||||||
|  |     taker_commission: float # 0.0003 | ||||||
|  |     strike: float # 5000.0 | ||||||
|  |     settlement_period: str # 'day' | ||||||
|  |     settlement_currency: str # "BTC", | ||||||
|  |     rfq: bool # false | ||||||
|  |     price_index: str # 'btc_usd' | ||||||
|  |     option_type: str # 'call' | ||||||
|  |     min_trade_amount: float # 0.1 | ||||||
|  |     maker_commission: float # 0.0003 | ||||||
|  |     kind: str # 'option' | ||||||
|  |     is_active: bool # true | ||||||
|  |     instrument_type: str # 'reversed' | ||||||
|  |     instrument_name: str # 'BTC-1SEP24-55000-C' | ||||||
|  |     instrument_id: int # 364671 | ||||||
|  |     expiration_timestamp: int # 1725177600000 | ||||||
|  |     creation_timestamp: int # 1724918461000 | ||||||
|  |     counter_currency: str # 'USD'  | ||||||
|  |     contract_size: float # '1.0' | ||||||
|  |     block_trade_tick_size: float # '0.0001' | ||||||
|  |     block_trade_min_trade_amount: int # '25' | ||||||
|  |     block_trade_commission: float # '0.003' | ||||||
|  | 
 | ||||||
|  |     # NOTE: see `.data._symcache.SymbologyCache.load()` for why | ||||||
|  |     ns_path: str = 'piker.brokers.deribit:OptionPair' | ||||||
|  | 
 | ||||||
|  |     # TODO, impl this without the MM:SS part of | ||||||
|  |     # the `'THH:MM:SS..'` etc.. | ||||||
|  |     @property | ||||||
|  |     def expiry(self) -> str: | ||||||
|  |         iso_date = pendulum.from_timestamp( | ||||||
|  |             self.expiration_timestamp / 1000 | ||||||
|  |         ).isoformat() | ||||||
|  |         return iso_date  | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def venue(self) -> str: | ||||||
|  |         return f'{self.instrument_type}_option' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_fqme(self) -> str: | ||||||
|  |         return f'{self.symbol}' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_src_asset(self) -> str: | ||||||
|  |         return f'{self.quote_currency}' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_dst_asset(self) -> str: | ||||||
|  |         return f'{self.symbol}' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | PAIRTYPES: dict[MarketType, Pair] = { | ||||||
|  |     'option': OptionPair, | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class JSONRPCResult(Struct): | ||||||
|  |     id: int | ||||||
|  |     usIn: int | ||||||
|  |     usOut: int | ||||||
|  |     usDiff: int | ||||||
|  |     testnet: bool | ||||||
|  |     jsonrpc: str = '2.0' | ||||||
|  |     error: Optional[dict] = None | ||||||
|  |     result: Optional[list[dict]] = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class JSONRPCChannel(Struct): | ||||||
|  |     method: str | ||||||
|  |     params: dict | ||||||
|  |     jsonrpc: str = '2.0' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class KLinesResult(Struct): | ||||||
|  |     low: list[float] | ||||||
|  |     cost: list[float] | ||||||
|  |     high: list[float] | ||||||
|  |     open: list[float] | ||||||
|  |     close: list[float] | ||||||
|  |     ticks: list[int] | ||||||
|  |     status: str | ||||||
|  |     volume: list[float] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Trade(Struct): | ||||||
|  |     iv: float | ||||||
|  |     price: float | ||||||
|  |     amount: float | ||||||
|  |     trade_id: str | ||||||
|  |     contracts: float | ||||||
|  |     direction: str | ||||||
|  |     trade_seq: int | ||||||
|  |     timestamp: int | ||||||
|  |     mark_price: float | ||||||
|  |     index_price: float | ||||||
|  |     tick_direction: int | ||||||
|  |     instrument_name: str | ||||||
|  |     combo_id: Optional[str] = '', | ||||||
|  |     combo_trade_id: Optional[int] = 0, | ||||||
|  |     block_trade_id: Optional[str] = '', | ||||||
|  |     block_trade_leg_count: Optional[int] = 0, | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class LastTradesResult(Struct): | ||||||
|  |     trades: list[Trade] | ||||||
|  |     has_more: bool | ||||||
|  | @ -100,7 +100,7 @@ async def data_reset_hack( | ||||||
|         log.warning( |         log.warning( | ||||||
|             no_setup_msg |             no_setup_msg | ||||||
|             + |             + | ||||||
|             f'REQUIRES A `vnc_addrs: array` ENTRY' |             'REQUIRES A `vnc_addrs: array` ENTRY' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     vnc_host, vnc_port = vnc_sockaddr.get( |     vnc_host, vnc_port = vnc_sockaddr.get( | ||||||
|  |  | ||||||
|  | @ -287,9 +287,31 @@ class Client: | ||||||
|         self.conf = config |         self.conf = config | ||||||
| 
 | 
 | ||||||
|         # NOTE: the ib.client here is "throttled" to 45 rps by default |         # NOTE: the ib.client here is "throttled" to 45 rps by default | ||||||
|         self.ib = ib |         self.ib: IB = ib | ||||||
|         self.ib.RaiseRequestErrors: bool = True |         self.ib.RaiseRequestErrors: bool = True | ||||||
| 
 | 
 | ||||||
|  |         # self._acnt_names: set[str] = {} | ||||||
|  |         self._acnt_names: list[str] = [] | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def acnts(self) -> list[str]: | ||||||
|  |         # return list(self._acnt_names) | ||||||
|  |         return self._acnt_names | ||||||
|  | 
 | ||||||
|  |     def __repr__(self) -> str: | ||||||
|  |         return ( | ||||||
|  |             f'<{type(self).__name__}(' | ||||||
|  |             f'ib={self.ib} ' | ||||||
|  |             f'acnts={self.acnts}' | ||||||
|  | 
 | ||||||
|  |             # TODO: we need to mask out acnt-#s and other private | ||||||
|  |             # infos if we're going to console this! | ||||||
|  |             # f' |_.conf:\n' | ||||||
|  |             # f'    {pformat(self.conf)}\n' | ||||||
|  | 
 | ||||||
|  |             ')>' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|     async def get_fills(self) -> list[Fill]: |     async def get_fills(self) -> list[Fill]: | ||||||
|         ''' |         ''' | ||||||
|         Return list of rents `Fills` from trading session. |         Return list of rents `Fills` from trading session. | ||||||
|  | @ -376,19 +398,21 @@ class Client: | ||||||
|             # whatToShow='MIDPOINT', |             # whatToShow='MIDPOINT', | ||||||
|             # whatToShow='TRADES', |             # whatToShow='TRADES', | ||||||
|         ) |         ) | ||||||
|         log.info( |  | ||||||
|             f'REQUESTING {ib_duration_str} worth {bar_size} BARS\n' |  | ||||||
|             f'fqme: {fqme}\n' |  | ||||||
|             f'global _enters: {_enters}\n' |  | ||||||
|             f'kwargs: {pformat(kwargs)}\n' |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         bars = await self.ib.reqHistoricalDataAsync( |         bars = await self.ib.reqHistoricalDataAsync( | ||||||
|             **kwargs, |             **kwargs, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |         query_info: str = ( | ||||||
|  |             f'REQUESTING IB history BARS\n' | ||||||
|  |             f'    ------ - ------\n' | ||||||
|  |             f'dt_duration: {dt_duration}\n' | ||||||
|  |             f'ib_duration_str: {ib_duration_str}\n' | ||||||
|  |             f'bar_size: {bar_size}\n' | ||||||
|  |             f'fqme: {fqme}\n' | ||||||
|  |             f'actor-global _enters: {_enters}\n' | ||||||
|  |             f'kwargs: {pformat(kwargs)}\n' | ||||||
|  |         ) | ||||||
|         # tail case if no history for range or none prior. |         # tail case if no history for range or none prior. | ||||||
|         if not bars: |  | ||||||
|         # NOTE: there's actually 3 cases here to handle (and |         # NOTE: there's actually 3 cases here to handle (and | ||||||
|         # this should be read alongside the implementation of |         # this should be read alongside the implementation of | ||||||
|         # `.reqHistoricalDataAsync()`): |         # `.reqHistoricalDataAsync()`): | ||||||
|  | @ -398,33 +422,39 @@ class Client: | ||||||
|         #   a weekend, holiday or other non-trading period prior to |         #   a weekend, holiday or other non-trading period prior to | ||||||
|         #   ``end_dt`` which exceeds the ``duration``, |         #   ``end_dt`` which exceeds the ``duration``, | ||||||
|         # - LITERALLY this is the start of the mkt's history! |         # - LITERALLY this is the start of the mkt's history! | ||||||
|  |         if not bars: | ||||||
|  |             # TODO: figure out wut's going on here. | ||||||
| 
 | 
 | ||||||
|  |             # TODO: is this handy, a sync requester for tinkering | ||||||
|  |             # with empty frame cases? | ||||||
|  |             # def get_hist(): | ||||||
|  |             #     return self.ib.reqHistoricalData(**kwargs) | ||||||
|  |             # import pdbp | ||||||
|  |             # pdbp.set_trace() | ||||||
| 
 | 
 | ||||||
|             # sync requester for debugging empty frame cases |             log.critical( | ||||||
|             def get_hist(): |                 'STUPID IB SAYS NO HISTORY\n\n' | ||||||
|                 return self.ib.reqHistoricalData(**kwargs) |                 + query_info | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             assert get_hist |  | ||||||
|             import pdbp |  | ||||||
|             pdbp.set_trace() |  | ||||||
| 
 |  | ||||||
|             return [], np.empty(0), dt_duration |  | ||||||
|             # TODO: we could maybe raise ``NoData`` instead if we |             # TODO: we could maybe raise ``NoData`` instead if we | ||||||
|             # rewrite the method in the first case? right now there's no |             # rewrite the method in the first case? | ||||||
|             # way to detect a timeout. |             # right now there's no way to detect a timeout.. | ||||||
|  |             return [], np.empty(0), dt_duration | ||||||
| 
 | 
 | ||||||
|         # NOTE XXX: ensure minimum duration in bars B) |         log.info(query_info) | ||||||
|         # => we recursively call this method until we get at least |         # NOTE XXX: ensure minimum duration in bars? | ||||||
|         # as many bars such that they sum in aggregate to the the |         # => recursively call this method until we get at least as | ||||||
|  |         #   many bars such that they sum in aggregate to the the | ||||||
|         #   desired total time (duration) at most. |         #   desired total time (duration) at most. | ||||||
|         # XXX XXX XXX |  | ||||||
|         # WHY DID WE EVEN NEED THIS ORIGINALLY!? |  | ||||||
|         # XXX XXX XXX |  | ||||||
|         #  - if you query over a gap and get no data |         #  - if you query over a gap and get no data | ||||||
|         #    that may short circuit the history |         #    that may short circuit the history | ||||||
|         if ( |         if ( | ||||||
|             end_dt |             # XXX XXX XXX | ||||||
|             and False |             # => WHY DID WE EVEN NEED THIS ORIGINALLY!? <= | ||||||
|  |             # XXX XXX XXX | ||||||
|  |             False | ||||||
|  |             and end_dt | ||||||
|         ): |         ): | ||||||
|             nparr: np.ndarray = bars_to_np(bars) |             nparr: np.ndarray = bars_to_np(bars) | ||||||
|             times: np.ndarray = nparr['time'] |             times: np.ndarray = nparr['time'] | ||||||
|  | @ -927,7 +957,10 @@ class Client: | ||||||
|                         warnset = True |                         warnset = True | ||||||
| 
 | 
 | ||||||
|             else: |             else: | ||||||
|                 log.info(f'Got first quote for {contract}') |                 log.info( | ||||||
|  |                     'Got first quote for contract\n' | ||||||
|  |                     f'{contract}\n' | ||||||
|  |                 ) | ||||||
|                 break |                 break | ||||||
|         else: |         else: | ||||||
|             if timeouterr and raise_on_timeout: |             if timeouterr and raise_on_timeout: | ||||||
|  | @ -991,8 +1024,12 @@ class Client: | ||||||
|                     outsideRth=True, |                     outsideRth=True, | ||||||
| 
 | 
 | ||||||
|                     optOutSmartRouting=True, |                     optOutSmartRouting=True, | ||||||
|  |                     # TODO: need to understand this setting better as | ||||||
|  |                     # it pertains to shit ass mms.. | ||||||
|                     routeMarketableToBbo=True, |                     routeMarketableToBbo=True, | ||||||
|  | 
 | ||||||
|                     designatedLocation='SMART', |                     designatedLocation='SMART', | ||||||
|  | 
 | ||||||
|                     # TODO: make all orders GTC? |                     # TODO: make all orders GTC? | ||||||
|                     # https://interactivebrokers.github.io/tws-api/classIBApi_1_1Order.html#a95539081751afb9980f4c6bd1655a6ba |                     # https://interactivebrokers.github.io/tws-api/classIBApi_1_1Order.html#a95539081751afb9980f4c6bd1655a6ba | ||||||
|                     # goodTillDate=f"yyyyMMdd-HH:mm:ss", |                     # goodTillDate=f"yyyyMMdd-HH:mm:ss", | ||||||
|  | @ -1120,8 +1157,8 @@ def get_config() -> dict[str, Any]: | ||||||
|     names = list(accounts.keys()) |     names = list(accounts.keys()) | ||||||
|     accts = section['accounts'] = bidict(accounts) |     accts = section['accounts'] = bidict(accounts) | ||||||
|     log.info( |     log.info( | ||||||
|         f'brokers.toml defines {len(accts)} accounts: ' |         f'{path} defines {len(accts)} account aliases:\n' | ||||||
|         f'{pformat(names)}' |         f'{pformat(names)}\n' | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     if section is None: |     if section is None: | ||||||
|  | @ -1188,7 +1225,7 @@ async def load_aio_clients( | ||||||
|         try_ports = list(try_ports.values()) |         try_ports = list(try_ports.values()) | ||||||
| 
 | 
 | ||||||
|     _err = None |     _err = None | ||||||
|     accounts_def = config.load_accounts(['ib']) |     accounts_def: dict[str, str] = config.load_accounts(['ib']) | ||||||
|     ports = try_ports if port is None else [port] |     ports = try_ports if port is None else [port] | ||||||
|     combos = list(itertools.product(hosts, ports)) |     combos = list(itertools.product(hosts, ports)) | ||||||
|     accounts_found: dict[str, Client] = {} |     accounts_found: dict[str, Client] = {} | ||||||
|  | @ -1213,6 +1250,12 @@ async def load_aio_clients( | ||||||
| 
 | 
 | ||||||
|         for i in range(connect_retries): |         for i in range(connect_retries): | ||||||
|             try: |             try: | ||||||
|  |                 log.info( | ||||||
|  |                     'Trying `ib_async` connect\n' | ||||||
|  |                     f'{host}: {port}\n' | ||||||
|  |                     f'clientId: {client_id}\n' | ||||||
|  |                     f'timeout: {connect_timeout}\n' | ||||||
|  |                 ) | ||||||
|                 await ib.connectAsync( |                 await ib.connectAsync( | ||||||
|                     host, |                     host, | ||||||
|                     port, |                     port, | ||||||
|  | @ -1227,7 +1270,9 @@ async def load_aio_clients( | ||||||
|                 client = Client(ib=ib, config=conf) |                 client = Client(ib=ib, config=conf) | ||||||
| 
 | 
 | ||||||
|                 # update all actor-global caches |                 # update all actor-global caches | ||||||
|                 log.info(f"Caching client for {sockaddr}") |                 log.runtime( | ||||||
|  |                     f'Connected and caching `Client` @ {sockaddr!r}' | ||||||
|  |                 ) | ||||||
|                 _client_cache[sockaddr] = client |                 _client_cache[sockaddr] = client | ||||||
|                 break |                 break | ||||||
| 
 | 
 | ||||||
|  | @ -1242,32 +1287,54 @@ async def load_aio_clients( | ||||||
|                 OSError, |                 OSError, | ||||||
|             ) as ce: |             ) as ce: | ||||||
|                 _err = ce |                 _err = ce | ||||||
|                 log.warning( |                 message: str = ( | ||||||
|                     f'Failed to connect on {host}:{port} for {i} time with,\n' |                     f'Failed to connect on {host}:{port} after {i} tries with\n' | ||||||
|                     f'{ib.client.apiError.value()}\n' |                     f'{ib.client.apiError.value()!r}\n\n' | ||||||
|                     'retrying with a new client id..') |                     'Retrying with a new client id..\n' | ||||||
|  |                 ) | ||||||
|  |                 log.runtime(message) | ||||||
|  |         else: | ||||||
|  |             # XXX report loudly if we never established after all | ||||||
|  |             # re-tries | ||||||
|  |             log.warning(message) | ||||||
| 
 | 
 | ||||||
|         # Pre-collect all accounts available for this |         # Pre-collect all accounts available for this | ||||||
|         # connection and map account names to this client |         # connection and map account names to this client | ||||||
|         # instance. |         # instance. | ||||||
|         for value in ib.accountValues(): |         for value in ib.accountValues(): | ||||||
|             acct_number = value.account |             acct_number: str = value.account | ||||||
| 
 | 
 | ||||||
|             entry = accounts_def.inverse.get(acct_number) |             acnt_alias: str = accounts_def.inverse.get(acct_number) | ||||||
|             if not entry: |             if not acnt_alias: | ||||||
|  | 
 | ||||||
|  |                 # TODO: should we constuct the below reco-ex from | ||||||
|  |                 # the existing config content? | ||||||
|  |                 _, path = config.load( | ||||||
|  |                     conf_name='brokers', | ||||||
|  |                 ) | ||||||
|                 raise ValueError( |                 raise ValueError( | ||||||
|                     'No section in brokers.toml for account:' |                     'No alias in account section for account!\n' | ||||||
|                     f' {acct_number}\n' |                     f'Please add an acnt alias entry to your {path}\n' | ||||||
|                     f'Please add entry to continue using this API client' |                     'For example,\n\n' | ||||||
|  | 
 | ||||||
|  |                     '[ib.accounts]\n' | ||||||
|  |                     'margin = {accnt_number!r}\n' | ||||||
|  |                     '^^^^^^ <- you need this part!\n\n' | ||||||
|  | 
 | ||||||
|  |                     'This ensures `piker` will not leak private acnt info ' | ||||||
|  |                     'to console output by default!\n' | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|             # surjection of account names to operating clients. |             # surjection of account names to operating clients. | ||||||
|             if acct_number not in accounts_found: |             if acnt_alias not in accounts_found: | ||||||
|                 accounts_found[entry] = client |                 accounts_found[acnt_alias] = client | ||||||
|  |                 # client._acnt_names.add(acnt_alias) | ||||||
|  |                 client._acnt_names.append(acnt_alias) | ||||||
| 
 | 
 | ||||||
|  |         if accounts_found: | ||||||
|             log.info( |             log.info( | ||||||
|             f'Loaded accounts for client @ {host}:{port}\n' |                 f'Loaded accounts for api client\n\n' | ||||||
|             f'{pformat(accounts_found)}' |                 f'{pformat(accounts_found)}\n' | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             # XXX: why aren't we just updating this directy above |             # XXX: why aren't we just updating this directy above | ||||||
|  | @ -1306,7 +1373,9 @@ async def load_clients_for_trio( | ||||||
|     a ``tractor.to_asyncio.open_channel_from()``. |     a ``tractor.to_asyncio.open_channel_from()``. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async with load_aio_clients() as accts2clients: |     async with load_aio_clients( | ||||||
|  |         disconnect_on_exit=False, | ||||||
|  |     ) as accts2clients: | ||||||
| 
 | 
 | ||||||
|         to_trio.send_nowait(accts2clients) |         to_trio.send_nowait(accts2clients) | ||||||
| 
 | 
 | ||||||
|  | @ -1472,7 +1541,7 @@ async def open_aio_client_method_relay( | ||||||
|         msg: tuple[str, dict] | dict | None = await from_trio.get() |         msg: tuple[str, dict] | dict | None = await from_trio.get() | ||||||
|         match msg: |         match msg: | ||||||
|             case None:  # termination sentinel |             case None:  # termination sentinel | ||||||
|                 print('asyncio PROXY-RELAY SHUTDOWN') |                 log.info('asyncio `Client` method-proxy SHUTDOWN!') | ||||||
|                 break |                 break | ||||||
| 
 | 
 | ||||||
|             case (meth_name, kwargs): |             case (meth_name, kwargs): | ||||||
|  |  | ||||||
|  | @ -1183,7 +1183,14 @@ async def deliver_trade_events( | ||||||
|                         pos |                         pos | ||||||
|                         and fill |                         and fill | ||||||
|                     ): |                     ): | ||||||
|                         assert fill.commissionReport == cr |                         now_cr: CommissionReport = fill.commissionReport | ||||||
|  |                         if (now_cr != cr): | ||||||
|  |                             log.warning( | ||||||
|  |                                 'UhhHh ib updated the commission report mid-fill..?\n' | ||||||
|  |                                 f'was: {pformat(cr)}\n' | ||||||
|  |                                 f'now: {pformat(now_cr)}\n' | ||||||
|  |                             ) | ||||||
|  | 
 | ||||||
|                         await emit_pp_update( |                         await emit_pp_update( | ||||||
|                             ems_stream, |                             ems_stream, | ||||||
|                             accounts_def, |                             accounts_def, | ||||||
|  |  | ||||||
|  | @ -671,8 +671,8 @@ async def _setup_quote_stream( | ||||||
|         # making them mostly useless and explains why the scanner |         # making them mostly useless and explains why the scanner | ||||||
|         # is always slow XD |         # is always slow XD | ||||||
|         # '293',  # Trade count for day |         # '293',  # Trade count for day | ||||||
|         '294',  # Trade rate / minute |         # '294',  # Trade rate / minute | ||||||
|         '295',  # Vlm rate / minute |         # '295',  # Vlm rate / minute | ||||||
|     ), |     ), | ||||||
|     contract: Contract | None = None, |     contract: Contract | None = None, | ||||||
| 
 | 
 | ||||||
|  | @ -915,9 +915,13 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|         if first_ticker: |         if first_ticker: | ||||||
|             first_quote: dict = normalize(first_ticker) |             first_quote: dict = normalize(first_ticker) | ||||||
|             log.info( | 
 | ||||||
|                 'Rxed init quote:\n' |             # TODO: we need a stack-oriented log levels filters for | ||||||
|                 f'{pformat(first_quote)}' |             # this! | ||||||
|  |             # log.info(message, filter={'stack': 'live_feed'}) ? | ||||||
|  |             log.runtime( | ||||||
|  |                 'Rxed init quote:\n\n' | ||||||
|  |                 f'{pformat(first_quote)}\n' | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         # NOTE: it might be outside regular trading hours for |         # NOTE: it might be outside regular trading hours for | ||||||
|  | @ -969,7 +973,11 @@ async def stream_quotes( | ||||||
|             raise_on_timeout=True, |             raise_on_timeout=True, | ||||||
|         ) |         ) | ||||||
|         first_quote: dict = normalize(first_ticker) |         first_quote: dict = normalize(first_ticker) | ||||||
|         log.info( | 
 | ||||||
|  |         # TODO: we need a stack-oriented log levels filters for | ||||||
|  |         # this! | ||||||
|  |         # log.info(message, filter={'stack': 'live_feed'}) ? | ||||||
|  |         log.runtime( | ||||||
|             'Rxed init quote:\n' |             'Rxed init quote:\n' | ||||||
|             f'{pformat(first_quote)}' |             f'{pformat(first_quote)}' | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  | @ -31,7 +31,11 @@ from typing import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| from bidict import bidict | from bidict import bidict | ||||||
| import pendulum | from pendulum import ( | ||||||
|  |     DateTime, | ||||||
|  |     parse, | ||||||
|  |     from_timestamp, | ||||||
|  | ) | ||||||
| from ib_insync import ( | from ib_insync import ( | ||||||
|     Contract, |     Contract, | ||||||
|     Commodity, |     Commodity, | ||||||
|  | @ -66,10 +70,11 @@ tx_sort: Callable = partial( | ||||||
|     iter_by_dt, |     iter_by_dt, | ||||||
|     parsers={ |     parsers={ | ||||||
|         'dateTime': parse_flex_dt, |         'dateTime': parse_flex_dt, | ||||||
|         'datetime': pendulum.parse, |         'datetime': parse, | ||||||
|         # for some some fucking 2022 and | 
 | ||||||
|         # back options records...fuck me. |         # XXX: for some some fucking 2022 and | ||||||
|         'date': pendulum.parse, |         # back options records.. f@#$ me.. | ||||||
|  |         'date': parse, | ||||||
|     } |     } | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | @ -89,15 +94,38 @@ def norm_trade( | ||||||
| 
 | 
 | ||||||
|     conid: int = str(record.get('conId') or record['conid']) |     conid: int = str(record.get('conId') or record['conid']) | ||||||
|     bs_mktid: str = str(conid) |     bs_mktid: str = str(conid) | ||||||
|     comms = record.get('commission') |  | ||||||
|     if comms is None: |  | ||||||
|         comms = -1*record['ibCommission'] |  | ||||||
| 
 | 
 | ||||||
|     price = record.get('price') or record['tradePrice'] |     # NOTE: sometimes weird records (like BTTX?) | ||||||
|  |     # have no field for this? | ||||||
|  |     comms: float = -1 * ( | ||||||
|  |         record.get('commission') | ||||||
|  |         or record.get('ibCommission') | ||||||
|  |         or 0 | ||||||
|  |     ) | ||||||
|  |     if not comms: | ||||||
|  |         log.warning( | ||||||
|  |             'No commissions found for record?\n' | ||||||
|  |             f'{pformat(record)}\n' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     price: float = ( | ||||||
|  |         record.get('price') | ||||||
|  |         or record.get('tradePrice') | ||||||
|  |     ) | ||||||
|  |     if price is None: | ||||||
|  |         log.warning( | ||||||
|  |             'No `price` field found in record?\n' | ||||||
|  |             'Skipping normalization..\n' | ||||||
|  |             f'{pformat(record)}\n' | ||||||
|  |         ) | ||||||
|  |         return None | ||||||
| 
 | 
 | ||||||
|     # the api doesn't do the -/+ on the quantity for you but flex |     # the api doesn't do the -/+ on the quantity for you but flex | ||||||
|     # records do.. are you fucking serious ib...!? |     # records do.. are you fucking serious ib...!? | ||||||
|     size = record.get('quantity') or record['shares'] * { |     size: float|int = ( | ||||||
|  |         record.get('quantity') | ||||||
|  |         or record['shares'] | ||||||
|  |     ) * { | ||||||
|         'BOT': 1, |         'BOT': 1, | ||||||
|         'SLD': -1, |         'SLD': -1, | ||||||
|     }[record['side']] |     }[record['side']] | ||||||
|  | @ -128,26 +156,31 @@ def norm_trade( | ||||||
|         # otype = tail[6] |         # otype = tail[6] | ||||||
|         # strike = tail[7:] |         # strike = tail[7:] | ||||||
| 
 | 
 | ||||||
|         print(f'skipping opts contract {symbol}') |         log.warning( | ||||||
|  |             f'Skipping option contract -> NO SUPPORT YET!\n' | ||||||
|  |             f'{symbol}\n' | ||||||
|  |         ) | ||||||
|         return None |         return None | ||||||
| 
 | 
 | ||||||
|     # timestamping is way different in API records |     # timestamping is way different in API records | ||||||
|     dtstr = record.get('datetime') |     dtstr: str = record.get('datetime') | ||||||
|     date = record.get('date') |     date: str = record.get('date') | ||||||
|     flex_dtstr = record.get('dateTime') |     flex_dtstr: str = record.get('dateTime') | ||||||
| 
 | 
 | ||||||
|     if dtstr or date: |     if dtstr or date: | ||||||
|         dt = pendulum.parse(dtstr or date) |         dt: DateTime = parse(dtstr or date) | ||||||
| 
 | 
 | ||||||
|     elif flex_dtstr: |     elif flex_dtstr: | ||||||
|         # probably a flex record with a wonky non-std timestamp.. |         # probably a flex record with a wonky non-std timestamp.. | ||||||
|         dt = parse_flex_dt(record['dateTime']) |         dt: DateTime = parse_flex_dt(record['dateTime']) | ||||||
| 
 | 
 | ||||||
|     # special handling of symbol extraction from |     # special handling of symbol extraction from | ||||||
|     # flex records using some ad-hoc schema parsing. |     # flex records using some ad-hoc schema parsing. | ||||||
|     asset_type: str = record.get( |     asset_type: str = ( | ||||||
|         'assetCategory' |         record.get('assetCategory') | ||||||
|     ) or record.get('secType', 'STK') |         or record.get('secType') | ||||||
|  |         or 'STK' | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     if (expiry := ( |     if (expiry := ( | ||||||
|             record.get('lastTradeDateOrContractMonth') |             record.get('lastTradeDateOrContractMonth') | ||||||
|  | @ -357,6 +390,7 @@ def norm_trade_records( | ||||||
|         if txn is None: |         if txn is None: | ||||||
|             continue |             continue | ||||||
| 
 | 
 | ||||||
|  |         # inject txns sorted by datetime | ||||||
|         insort( |         insort( | ||||||
|             records, |             records, | ||||||
|             txn, |             txn, | ||||||
|  | @ -405,7 +439,7 @@ def api_trades_to_ledger_entries( | ||||||
|                     txn_dict[attr_name] = val |                     txn_dict[attr_name] = val | ||||||
| 
 | 
 | ||||||
|         tid = str(txn_dict['execId']) |         tid = str(txn_dict['execId']) | ||||||
|         dt = pendulum.from_timestamp(txn_dict['time']) |         dt = from_timestamp(txn_dict['time']) | ||||||
|         txn_dict['datetime'] = str(dt) |         txn_dict['datetime'] = str(dt) | ||||||
|         acctid = accounts[txn_dict['acctNumber']] |         acctid = accounts[txn_dict['acctNumber']] | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -209,7 +209,10 @@ async def open_symbol_search(ctx: tractor.Context) -> None: | ||||||
|                 break |                 break | ||||||
| 
 | 
 | ||||||
|             ib_client = proxy._aio_ns.ib |             ib_client = proxy._aio_ns.ib | ||||||
|             log.info(f'Using {ib_client} for symbol search') |             log.info( | ||||||
|  |                 f'Using API client for symbol-search\n' | ||||||
|  |                 f'{ib_client}\n' | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             last = time.time() |             last = time.time() | ||||||
|             async for pattern in stream: |             async for pattern in stream: | ||||||
|  | @ -294,7 +297,7 @@ async def open_symbol_search(ctx: tractor.Context) -> None: | ||||||
|                     elif stock_results: |                     elif stock_results: | ||||||
|                         break |                         break | ||||||
|                     # else: |                     # else: | ||||||
|                     await tractor.pause() |                     # await tractor.pause() | ||||||
| 
 | 
 | ||||||
|                     # # match against our ad-hoc set immediately |                     # # match against our ad-hoc set immediately | ||||||
|                     # adhoc_matches = fuzzy.extract( |                     # adhoc_matches = fuzzy.extract( | ||||||
|  | @ -522,7 +525,21 @@ async def get_mkt_info( | ||||||
|         venue = con.primaryExchange or con.exchange |         venue = con.primaryExchange or con.exchange | ||||||
| 
 | 
 | ||||||
|     price_tick: Decimal = Decimal(str(details.minTick)) |     price_tick: Decimal = Decimal(str(details.minTick)) | ||||||
|     # price_tick: Decimal = Decimal('0.01') |     ib_min_tick_gt_2: Decimal = Decimal('0.01') | ||||||
|  |     if ( | ||||||
|  |         price_tick < ib_min_tick_gt_2 | ||||||
|  |     ): | ||||||
|  |         # TODO: we need to add some kinda dynamic rounding sys | ||||||
|  |         # to our MktPair i guess? | ||||||
|  |         # not sure where the logic should sit, but likely inside | ||||||
|  |         # the `.clearing._ems` i suppose... | ||||||
|  |         log.warning( | ||||||
|  |             'IB seems to disallow a min price tick < 0.01 ' | ||||||
|  |             'when the price is > 2.0..?\n' | ||||||
|  |             f'Decreasing min tick precision for {fqme} to 0.01' | ||||||
|  |         ) | ||||||
|  |         # price_tick = ib_min_tick | ||||||
|  |         # await tractor.pause() | ||||||
| 
 | 
 | ||||||
|     if atype == 'stock': |     if atype == 'stock': | ||||||
|         # XXX: GRRRR they don't support fractional share sizes for |         # XXX: GRRRR they don't support fractional share sizes for | ||||||
|  |  | ||||||
|  | @ -27,8 +27,8 @@ from typing import ( | ||||||
| ) | ) | ||||||
| import time | import time | ||||||
| 
 | 
 | ||||||
|  | import httpx | ||||||
| import pendulum | import pendulum | ||||||
| import asks |  | ||||||
| import numpy as np | import numpy as np | ||||||
| import urllib.parse | import urllib.parse | ||||||
| import hashlib | import hashlib | ||||||
|  | @ -60,6 +60,11 @@ log = get_logger('piker.brokers.kraken') | ||||||
| 
 | 
 | ||||||
| # <uri>/<version>/ | # <uri>/<version>/ | ||||||
| _url = 'https://api.kraken.com/0' | _url = 'https://api.kraken.com/0' | ||||||
|  | 
 | ||||||
|  | _headers: dict[str, str] = { | ||||||
|  |     'User-Agent': 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)' | ||||||
|  | } | ||||||
|  | 
 | ||||||
| # TODO: this is the only backend providing this right? | # TODO: this is the only backend providing this right? | ||||||
| # in which case we should drop it from the defaults and | # in which case we should drop it from the defaults and | ||||||
| # instead make a custom fields descr in this module! | # instead make a custom fields descr in this module! | ||||||
|  | @ -135,16 +140,15 @@ class Client: | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         config: dict[str, str], |         config: dict[str, str], | ||||||
|  |         httpx_client: httpx.AsyncClient, | ||||||
|  | 
 | ||||||
|         name: str = '', |         name: str = '', | ||||||
|         api_key: str = '', |         api_key: str = '', | ||||||
|         secret: str = '' |         secret: str = '' | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         self._sesh = asks.Session(connections=4) | 
 | ||||||
|         self._sesh.base_location = _url |         self._sesh: httpx.AsyncClient = httpx_client | ||||||
|         self._sesh.headers.update({ | 
 | ||||||
|             'User-Agent': |  | ||||||
|                 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)' |  | ||||||
|         }) |  | ||||||
|         self._name = name |         self._name = name | ||||||
|         self._api_key = api_key |         self._api_key = api_key | ||||||
|         self._secret = secret |         self._secret = secret | ||||||
|  | @ -166,10 +170,9 @@ class Client: | ||||||
|         method: str, |         method: str, | ||||||
|         data: dict, |         data: dict, | ||||||
|     ) -> dict[str, Any]: |     ) -> dict[str, Any]: | ||||||
|         resp = await self._sesh.post( |         resp: httpx.Response = await self._sesh.post( | ||||||
|             path=f'/public/{method}', |             url=f'/public/{method}', | ||||||
|             json=data, |             json=data, | ||||||
|             timeout=float('inf') |  | ||||||
|         ) |         ) | ||||||
|         return resproc(resp, log) |         return resproc(resp, log) | ||||||
| 
 | 
 | ||||||
|  | @ -180,18 +183,18 @@ class Client: | ||||||
|         uri_path: str |         uri_path: str | ||||||
|     ) -> dict[str, Any]: |     ) -> dict[str, Any]: | ||||||
|         headers = { |         headers = { | ||||||
|             'Content-Type': |             'Content-Type': 'application/x-www-form-urlencoded', | ||||||
|                 'application/x-www-form-urlencoded', |             'API-Key': self._api_key, | ||||||
|             'API-Key': |             'API-Sign': get_kraken_signature( | ||||||
|                 self._api_key, |                 uri_path, | ||||||
|             'API-Sign': |                 data, | ||||||
|                 get_kraken_signature(uri_path, data, self._secret) |                 self._secret, | ||||||
|  |             ), | ||||||
|         } |         } | ||||||
|         resp = await self._sesh.post( |         resp: httpx.Response = await self._sesh.post( | ||||||
|             path=f'/private/{method}', |             url=f'/private/{method}', | ||||||
|             data=data, |             data=data, | ||||||
|             headers=headers, |             headers=headers, | ||||||
|             timeout=float('inf') |  | ||||||
|         ) |         ) | ||||||
|         return resproc(resp, log) |         return resproc(resp, log) | ||||||
| 
 | 
 | ||||||
|  | @ -665,10 +668,19 @@ class Client: | ||||||
| @acm | @acm | ||||||
| async def get_client() -> Client: | async def get_client() -> Client: | ||||||
| 
 | 
 | ||||||
|     conf = get_config() |     conf: dict[str, Any] = get_config() | ||||||
|  |     async with httpx.AsyncClient( | ||||||
|  |         base_url=_url, | ||||||
|  |         headers=_headers, | ||||||
|  | 
 | ||||||
|  |         # TODO: is there a way to numerate this? | ||||||
|  |         # https://www.python-httpx.org/advanced/clients/#why-use-a-client | ||||||
|  |         # connections=4 | ||||||
|  |     ) as trio_client: | ||||||
|         if conf: |         if conf: | ||||||
|             client = Client( |             client = Client( | ||||||
|                 conf, |                 conf, | ||||||
|  |                 httpx_client=trio_client, | ||||||
| 
 | 
 | ||||||
|                 # TODO: don't break these up and just do internal |                 # TODO: don't break these up and just do internal | ||||||
|                 # conf lookups instead.. |                 # conf lookups instead.. | ||||||
|  | @ -677,7 +689,10 @@ async def get_client() -> Client: | ||||||
|                 secret=conf['secret'] |                 secret=conf['secret'] | ||||||
|             ) |             ) | ||||||
|         else: |         else: | ||||||
|         client = Client({}) |             client = Client( | ||||||
|  |                 conf={}, | ||||||
|  |                 httpx_client=trio_client, | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         # at startup, load all symbols, and asset info in |         # at startup, load all symbols, and asset info in | ||||||
|         # batch requests. |         # batch requests. | ||||||
|  |  | ||||||
|  | @ -612,18 +612,18 @@ async def open_trade_dialog( | ||||||
| 
 | 
 | ||||||
|                 # enter relay loop |                 # enter relay loop | ||||||
|                 await handle_order_updates( |                 await handle_order_updates( | ||||||
|                     client, |                     client=client, | ||||||
|                     ws, |                     ws=ws, | ||||||
|                     stream, |                     ws_stream=stream, | ||||||
|                     ems_stream, |                     ems_stream=ems_stream, | ||||||
|                     apiflows, |                     apiflows=apiflows, | ||||||
|                     ids, |                     ids=ids, | ||||||
|                     reqids2txids, |                     reqids2txids=reqids2txids, | ||||||
|                     acnt, |                     acnt=acnt, | ||||||
|                     api_trans, |                     ledger=ledger, | ||||||
|                     acctid, |                     acctid=acctid, | ||||||
|                     acc_name, |                     acc_name=acc_name, | ||||||
|                     token, |                     token=token, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -639,7 +639,8 @@ async def handle_order_updates( | ||||||
| 
 | 
 | ||||||
|     # transaction records which will be updated |     # transaction records which will be updated | ||||||
|     # on new trade clearing events (aka order "fills") |     # on new trade clearing events (aka order "fills") | ||||||
|     ledger_trans: dict[str, Transaction], |     ledger: TransactionLedger, | ||||||
|  |     # ledger_trans: dict[str, Transaction], | ||||||
|     acctid: str, |     acctid: str, | ||||||
|     acc_name: str, |     acc_name: str, | ||||||
|     token: str, |     token: str, | ||||||
|  | @ -699,7 +700,8 @@ async def handle_order_updates( | ||||||
|                     # if tid not in ledger_trans |                     # if tid not in ledger_trans | ||||||
|                 } |                 } | ||||||
|                 for tid, trade in trades.items(): |                 for tid, trade in trades.items(): | ||||||
|                     assert tid not in ledger_trans |                     # assert tid not in ledger_trans | ||||||
|  |                     assert tid not in ledger | ||||||
|                     txid = trade['ordertxid'] |                     txid = trade['ordertxid'] | ||||||
|                     reqid = trade.get('userref') |                     reqid = trade.get('userref') | ||||||
| 
 | 
 | ||||||
|  | @ -747,11 +749,17 @@ async def handle_order_updates( | ||||||
|                     client, |                     client, | ||||||
|                     api_name_set='wsname', |                     api_name_set='wsname', | ||||||
|                 ) |                 ) | ||||||
|                 ppmsgs = trades2pps( |                 ppmsgs: list[BrokerdPosition] = trades2pps( | ||||||
|                     acnt, |                     acnt=acnt, | ||||||
|                     acctid, |                     ledger=ledger, | ||||||
|                     new_trans, |                     acctid=acctid, | ||||||
|  |                     new_trans=new_trans, | ||||||
|                 ) |                 ) | ||||||
|  |                 # ppmsgs = trades2pps( | ||||||
|  |                 #     acnt, | ||||||
|  |                 #     acctid, | ||||||
|  |                 #     new_trans, | ||||||
|  |                 # ) | ||||||
|                 for pp_msg in ppmsgs: |                 for pp_msg in ppmsgs: | ||||||
|                     await ems_stream.send(pp_msg) |                     await ems_stream.send(pp_msg) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -16,10 +16,9 @@ | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| Kucoin broker backend | Kucoin cex API backend. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| 
 |  | ||||||
| from contextlib import ( | from contextlib import ( | ||||||
|     asynccontextmanager as acm, |     asynccontextmanager as acm, | ||||||
|     aclosing, |     aclosing, | ||||||
|  | @ -42,7 +41,7 @@ import wsproto | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
| 
 | 
 | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import asks | import httpx | ||||||
| from bidict import bidict | from bidict import bidict | ||||||
| import numpy as np | import numpy as np | ||||||
| import pendulum | import pendulum | ||||||
|  | @ -63,7 +62,7 @@ from piker._cacheables import ( | ||||||
| ) | ) | ||||||
| from piker.log import get_logger | from piker.log import get_logger | ||||||
| from piker.data.validate import FeedInit | from piker.data.validate import FeedInit | ||||||
| from piker.types import Struct | from piker.types import Struct  # NOTE, this is already a `tractor.msg.Struct` | ||||||
| from piker.data import ( | from piker.data import ( | ||||||
|     def_iohlcv_fields, |     def_iohlcv_fields, | ||||||
|     match_from_pairs, |     match_from_pairs, | ||||||
|  | @ -99,9 +98,18 @@ class KucoinMktPair(Struct, frozen=True): | ||||||
|     def size_tick(self) -> Decimal: |     def size_tick(self) -> Decimal: | ||||||
|         return Decimal(str(self.quoteMinSize)) |         return Decimal(str(self.quoteMinSize)) | ||||||
| 
 | 
 | ||||||
|  |     callauctionFirstStageStartTime: None|float | ||||||
|  |     callauctionIsEnabled: bool | ||||||
|  |     callauctionPriceCeiling: float|None | ||||||
|  |     callauctionPriceFloor: float|None | ||||||
|  |     callauctionSecondStageStartTime: float|None | ||||||
|  |     callauctionThirdStageStartTime: float|None | ||||||
|  | 
 | ||||||
|     enableTrading: bool |     enableTrading: bool | ||||||
|  |     feeCategory: int | ||||||
|     feeCurrency: str |     feeCurrency: str | ||||||
|     isMarginEnabled: bool |     isMarginEnabled: bool | ||||||
|  |     makerFeeCoefficient: float | ||||||
|     market: str |     market: str | ||||||
|     minFunds: float |     minFunds: float | ||||||
|     name: str |     name: str | ||||||
|  | @ -111,7 +119,10 @@ class KucoinMktPair(Struct, frozen=True): | ||||||
|     quoteIncrement: float |     quoteIncrement: float | ||||||
|     quoteMaxSize: float |     quoteMaxSize: float | ||||||
|     quoteMinSize: float |     quoteMinSize: float | ||||||
|  |     st: bool | ||||||
|     symbol: str  # our bs_mktid, kucoin's internal id |     symbol: str  # our bs_mktid, kucoin's internal id | ||||||
|  |     takerFeeCoefficient: float | ||||||
|  |     tradingStartTime: float|None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class AccountTrade(Struct, frozen=True): | class AccountTrade(Struct, frozen=True): | ||||||
|  | @ -212,7 +223,11 @@ def get_config() -> BrokerConfig | None: | ||||||
| 
 | 
 | ||||||
| class Client: | class Client: | ||||||
| 
 | 
 | ||||||
|     def __init__(self) -> None: |     def __init__( | ||||||
|  |         self, | ||||||
|  |         httpx_client: httpx.AsyncClient, | ||||||
|  |     ) -> None: | ||||||
|  |         self._http: httpx.AsyncClient = httpx_client | ||||||
|         self._config: BrokerConfig|None = get_config() |         self._config: BrokerConfig|None = get_config() | ||||||
|         self._pairs: dict[str, KucoinMktPair] = {} |         self._pairs: dict[str, KucoinMktPair] = {} | ||||||
|         self._fqmes2mktids: bidict[str, str] = bidict() |         self._fqmes2mktids: bidict[str, str] = bidict() | ||||||
|  | @ -227,18 +242,24 @@ class Client: | ||||||
| 
 | 
 | ||||||
|     ) -> dict[str, str | bytes]: |     ) -> dict[str, str | bytes]: | ||||||
|         ''' |         ''' | ||||||
|         Generate authenticated request headers |         Generate authenticated request headers: | ||||||
|  | 
 | ||||||
|         https://docs.kucoin.com/#authentication |         https://docs.kucoin.com/#authentication | ||||||
|  |         https://www.kucoin.com/docs/basic-info/connection-method/authentication/creating-a-request | ||||||
|  |         https://www.kucoin.com/docs/basic-info/connection-method/authentication/signing-a-message | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
| 
 |  | ||||||
|         if not self._config: |         if not self._config: | ||||||
|             raise ValueError( |             raise ValueError( | ||||||
|                 'No config found when trying to send authenticated request') |                 'No config found when trying to send authenticated request' | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         str_to_sign = ( |         str_to_sign = ( | ||||||
|             str(int(time.time() * 1000)) |             str(int(time.time() * 1000)) | ||||||
|             + action + f'/api/{api}/{endpoint.lstrip("/")}' |             + | ||||||
|  |             action | ||||||
|  |             + | ||||||
|  |             f'/api/{api}/{endpoint.lstrip("/")}' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         signature = base64.b64encode( |         signature = base64.b64encode( | ||||||
|  | @ -249,6 +270,7 @@ class Client: | ||||||
|             ).digest() |             ).digest() | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |         # TODO: can we cache this between calls? | ||||||
|         passphrase = base64.b64encode( |         passphrase = base64.b64encode( | ||||||
|             hmac.new( |             hmac.new( | ||||||
|                 self._config.key_secret.encode('utf-8'), |                 self._config.key_secret.encode('utf-8'), | ||||||
|  | @ -270,8 +292,10 @@ class Client: | ||||||
|         self, |         self, | ||||||
|         action: Literal['POST', 'GET'], |         action: Literal['POST', 'GET'], | ||||||
|         endpoint: str, |         endpoint: str, | ||||||
|  | 
 | ||||||
|         api: str = 'v2', |         api: str = 'v2', | ||||||
|         headers: dict = {}, |         headers: dict = {}, | ||||||
|  | 
 | ||||||
|     ) -> Any: |     ) -> Any: | ||||||
|         ''' |         ''' | ||||||
|         Generic request wrapper for Kucoin API |         Generic request wrapper for Kucoin API | ||||||
|  | @ -284,14 +308,19 @@ class Client: | ||||||
|                 api, |                 api, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         api_url = f'https://api.kucoin.com/api/{api}/{endpoint}' |         req_meth: Callable = getattr( | ||||||
| 
 |             self._http, | ||||||
|         res = await asks.request(action, api_url, headers=headers) |             action.lower(), | ||||||
| 
 |         ) | ||||||
|         json = res.json() |         res = await req_meth( | ||||||
|         if 'data' in json: |             url=f'/{api}/{endpoint}', | ||||||
|             return json['data'] |             headers=headers, | ||||||
|  |         ) | ||||||
|  |         json: dict = res.json() | ||||||
|  |         if (data := json.get('data')) is not None: | ||||||
|  |             return data | ||||||
|         else: |         else: | ||||||
|  |             api_url: str = self._http.base_url | ||||||
|             log.error( |             log.error( | ||||||
|                 f'Error making request to {api_url} ->\n' |                 f'Error making request to {api_url} ->\n' | ||||||
|                 f'{pformat(res)}' |                 f'{pformat(res)}' | ||||||
|  | @ -349,8 +378,8 @@ class Client: | ||||||
|             currencies: dict[str, Currency] = {} |             currencies: dict[str, Currency] = {} | ||||||
|             entries: list[dict] = await self._request( |             entries: list[dict] = await self._request( | ||||||
|                 'GET', |                 'GET', | ||||||
|                 api='v1', |  | ||||||
|                 endpoint='currencies', |                 endpoint='currencies', | ||||||
|  |                 api='v1', | ||||||
|             ) |             ) | ||||||
|             for entry in entries: |             for entry in entries: | ||||||
|                 curr = Currency(**entry).copy() |                 curr = Currency(**entry).copy() | ||||||
|  | @ -366,13 +395,22 @@ class Client: | ||||||
|         dict[str, KucoinMktPair], |         dict[str, KucoinMktPair], | ||||||
|         bidict[str, KucoinMktPair], |         bidict[str, KucoinMktPair], | ||||||
|     ]: |     ]: | ||||||
|         entries = await self._request('GET', 'symbols') |         entries = await self._request( | ||||||
|  |             'GET', | ||||||
|  |             endpoint='symbols', | ||||||
|  |         ) | ||||||
|         log.info(f' {len(entries)} Kucoin market pairs fetched') |         log.info(f' {len(entries)} Kucoin market pairs fetched') | ||||||
| 
 | 
 | ||||||
|         pairs: dict[str, KucoinMktPair] = {} |         pairs: dict[str, KucoinMktPair] = {} | ||||||
|         fqmes2mktids: bidict[str, str] = bidict() |         fqmes2mktids: bidict[str, str] = bidict() | ||||||
|         for item in entries: |         for item in entries: | ||||||
|  |             try: | ||||||
|                 pair = pairs[item['name']] = KucoinMktPair(**item) |                 pair = pairs[item['name']] = KucoinMktPair(**item) | ||||||
|  |             except TypeError as te: | ||||||
|  |                 raise TypeError( | ||||||
|  |                     '`KucoinMktPair` and reponse fields do not match ??\n' | ||||||
|  |                     f'{KucoinMktPair.fields_diff(item)}\n' | ||||||
|  |                 ) from te | ||||||
|             fqmes2mktids[ |             fqmes2mktids[ | ||||||
|                 item['name'].lower().replace('-', '') |                 item['name'].lower().replace('-', '') | ||||||
|             ] = pair.name |             ] = pair.name | ||||||
|  | @ -567,10 +605,18 @@ def fqme_to_kucoin_sym( | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def get_client() -> AsyncGenerator[Client, None]: | async def get_client() -> AsyncGenerator[Client, None]: | ||||||
|     client = Client() |     ''' | ||||||
|  |     Load an API `Client` preconfigured from user settings | ||||||
| 
 | 
 | ||||||
|     async with trio.open_nursery() as n: |     ''' | ||||||
|         n.start_soon(client.get_mkt_pairs) |     async with ( | ||||||
|  |         httpx.AsyncClient( | ||||||
|  |             base_url='https://api.kucoin.com/api', | ||||||
|  |         ) as trio_client, | ||||||
|  |     ): | ||||||
|  |         client = Client(httpx_client=trio_client) | ||||||
|  |         async with trio.open_nursery() as tn: | ||||||
|  |             tn.start_soon(client.get_mkt_pairs) | ||||||
|             await client.get_currencies() |             await client.get_currencies() | ||||||
| 
 | 
 | ||||||
|         yield client |         yield client | ||||||
|  | @ -609,7 +655,7 @@ async def open_ping_task( | ||||||
|                 await trio.sleep((ping_interval - 1000) / 1000) |                 await trio.sleep((ping_interval - 1000) / 1000) | ||||||
|                 await ws.send_msg({'id': connect_id, 'type': 'ping'}) |                 await ws.send_msg({'id': connect_id, 'type': 'ping'}) | ||||||
| 
 | 
 | ||||||
|         log.info('Starting ping task for kucoin ws connection') |         log.warning('Starting ping task for kucoin ws connection') | ||||||
|         n.start_soon(ping_server) |         n.start_soon(ping_server) | ||||||
| 
 | 
 | ||||||
|         yield |         yield | ||||||
|  | @ -621,9 +667,14 @@ async def open_ping_task( | ||||||
| async def get_mkt_info( | async def get_mkt_info( | ||||||
|     fqme: str, |     fqme: str, | ||||||
| 
 | 
 | ||||||
| ) -> tuple[MktPair, KucoinMktPair]: | ) -> tuple[ | ||||||
|  |     MktPair, | ||||||
|  |     KucoinMktPair, | ||||||
|  | ]: | ||||||
|     ''' |     ''' | ||||||
|     Query for and return a `MktPair` and `KucoinMktPair`. |     Query for and return both a `piker.accounting.MktPair` and | ||||||
|  |     `KucoinMktPair` from provided `fqme: str` | ||||||
|  |     (fully-qualified-market-endpoint). | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async with open_cached_client('kucoin') as client: |     async with open_cached_client('kucoin') as client: | ||||||
|  | @ -698,6 +749,8 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|         log.info(f'Starting up quote stream(s) for {symbols}') |         log.info(f'Starting up quote stream(s) for {symbols}') | ||||||
|         for sym_str in symbols: |         for sym_str in symbols: | ||||||
|  |             mkt: MktPair | ||||||
|  |             pair: KucoinMktPair | ||||||
|             mkt, pair = await get_mkt_info(sym_str) |             mkt, pair = await get_mkt_info(sym_str) | ||||||
|             init_msgs.append( |             init_msgs.append( | ||||||
|                 FeedInit(mkt_info=mkt) |                 FeedInit(mkt_info=mkt) | ||||||
|  | @ -705,7 +758,11 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|         ws: NoBsWs |         ws: NoBsWs | ||||||
|         token, ping_interval = await client._get_ws_token() |         token, ping_interval = await client._get_ws_token() | ||||||
|         connect_id = str(uuid4()) |         log.info('API reported ping_interval: {ping_interval}\n') | ||||||
|  | 
 | ||||||
|  |         connect_id: str = str(uuid4()) | ||||||
|  |         typ: str | ||||||
|  |         quote: dict | ||||||
|         async with ( |         async with ( | ||||||
|             open_autorecon_ws( |             open_autorecon_ws( | ||||||
|                 ( |                 ( | ||||||
|  | @ -719,20 +776,37 @@ async def stream_quotes( | ||||||
|                 ), |                 ), | ||||||
|             ) as ws, |             ) as ws, | ||||||
|             open_ping_task(ws, ping_interval, connect_id), |             open_ping_task(ws, ping_interval, connect_id), | ||||||
|             aclosing(stream_messages(ws, sym_str)) as msg_gen, |             aclosing( | ||||||
|  |                 iter_normed_quotes( | ||||||
|  |                     ws, sym_str | ||||||
|  |                 ) | ||||||
|  |             ) as iter_quotes, | ||||||
|         ): |         ): | ||||||
|             typ, quote = await anext(msg_gen) |             typ, quote = await anext(iter_quotes) | ||||||
| 
 | 
 | ||||||
|             while typ != 'trade': |  | ||||||
|             # take care to not unblock here until we get a real |             # take care to not unblock here until we get a real | ||||||
|                 # trade quote |             # trade quote? | ||||||
|                 typ, quote = await anext(msg_gen) |             # ^TODO, remove this right? | ||||||
|  |             # -[ ] what often blocks chart boot/new-feed switching | ||||||
|  |             #   since we'ere waiting for a live quote instead of just | ||||||
|  |             #   loading history afap.. | ||||||
|  |             #  |_ XXX, not sure if we require a bit of rework to core | ||||||
|  |             #    feed init logic or if backends justg gotta be | ||||||
|  |             #    changed up.. feel like there was some causality | ||||||
|  |             #    dilema prolly only seen with IB too.. | ||||||
|  |             # while typ != 'trade': | ||||||
|  |             #     typ, quote = await anext(iter_quotes) | ||||||
| 
 | 
 | ||||||
|             task_status.started((init_msgs, quote)) |             task_status.started((init_msgs, quote)) | ||||||
|             feed_is_live.set() |             feed_is_live.set() | ||||||
| 
 | 
 | ||||||
|             async for typ, msg in msg_gen: |             # XXX NOTE, DO NOT include the `.<backend>` suffix! | ||||||
|                 await send_chan.send({sym_str: msg}) |             # OW the sampling loop will not broadcast correctly.. | ||||||
|  |             # since `bus._subscribers.setdefault(bs_fqme, set())` | ||||||
|  |             # is used inside `.data.open_feed_bus()` !!! | ||||||
|  |             topic: str = mkt.bs_fqme | ||||||
|  |             async for typ, quote in iter_quotes: | ||||||
|  |                 await send_chan.send({topic: quote}) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
|  | @ -787,7 +861,7 @@ async def subscribe( | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def stream_messages( | async def iter_normed_quotes( | ||||||
|     ws: NoBsWs, |     ws: NoBsWs, | ||||||
|     sym: str, |     sym: str, | ||||||
| 
 | 
 | ||||||
|  | @ -818,6 +892,9 @@ async def stream_messages( | ||||||
| 
 | 
 | ||||||
|                 yield 'trade', { |                 yield 'trade', { | ||||||
|                     'symbol': sym, |                     'symbol': sym, | ||||||
|  |                     # TODO, is 'last' even used elsewhere/a-good | ||||||
|  |                     # semantic? can't we just read the ticks with our | ||||||
|  |                     # .data.ticktools.frame_ticks()`/ | ||||||
|                     'last': trade_data.price, |                     'last': trade_data.price, | ||||||
|                     'brokerd_ts': last_trade_ts, |                     'brokerd_ts': last_trade_ts, | ||||||
|                     'ticks': [ |                     'ticks': [ | ||||||
|  | @ -910,7 +987,7 @@ async def open_history_client( | ||||||
|             if end_dt is None: |             if end_dt is None: | ||||||
|                 inow = round(time.time()) |                 inow = round(time.time()) | ||||||
| 
 | 
 | ||||||
|                 print( |                 log.debug( | ||||||
|                     f'difference in time between load and processing' |                     f'difference in time between load and processing' | ||||||
|                     f'{inow - times[-1]}' |                     f'{inow - times[-1]}' | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|  | @ -0,0 +1,49 @@ | ||||||
|  | piker.clearing | ||||||
|  | ______________ | ||||||
|  | trade execution-n-control subsys for both live and paper trading as | ||||||
|  | well as algo-trading manual override/interaction across any backend | ||||||
|  | broker and data provider. | ||||||
|  | 
 | ||||||
|  | avail UIs | ||||||
|  | ********* | ||||||
|  | 
 | ||||||
|  | order ctl | ||||||
|  | --------- | ||||||
|  | the `piker.clearing` subsys is exposed mainly though | ||||||
|  | the `piker chart` GUI as a "chart trader" style UX and | ||||||
|  | is automatically enabled whenever a chart is opened. | ||||||
|  | 
 | ||||||
|  | .. ^TODO, more prose here! | ||||||
|  | 
 | ||||||
|  | the "manual" order control features are exposed via the | ||||||
|  | `piker.ui.order_mode` API and can pretty much always be | ||||||
|  | used (at least) in simulated-trading mode, aka "paper"-mode, and | ||||||
|  | the micro-manual is as follows: | ||||||
|  | 
 | ||||||
|  | ``order_mode`` ( | ||||||
|  |     edge triggered activation by any of the following keys, | ||||||
|  |     ``mouse-click`` on y-level to submit at that price | ||||||
|  |     ): | ||||||
|  | 
 | ||||||
|  |     - ``f``/ ``ctl-f`` to stage buy | ||||||
|  |     - ``d``/ ``ctl-d`` to stage sell | ||||||
|  |     - ``a`` to stage alert | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | ``search_mode`` ( | ||||||
|  |     ``ctl-l`` or ``ctl-space`` to open, | ||||||
|  |     ``ctl-c`` or ``ctl-space`` to close | ||||||
|  |     ) : | ||||||
|  | 
 | ||||||
|  |     - begin typing to have symbol search automatically lookup | ||||||
|  |       symbols from all loaded backend (broker) providers | ||||||
|  |     - arrow keys and mouse click to navigate selection | ||||||
|  |     - vi-like ``ctl-[hjkl]`` for navigation | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | position (pp) mgmt | ||||||
|  | ------------------ | ||||||
|  | you can also configure your position allocation limits from the | ||||||
|  | sidepane. | ||||||
|  | 
 | ||||||
|  | .. ^TODO, explain and provide tut once more refined! | ||||||
|  | @ -104,14 +104,15 @@ def get_app_dir( | ||||||
|     # `tractor`) with the testing dir and check for it whenever we |     # `tractor`) with the testing dir and check for it whenever we | ||||||
|     # detect `pytest` is being used (which it isn't under normal |     # detect `pytest` is being used (which it isn't under normal | ||||||
|     # operation). |     # operation). | ||||||
|     if "pytest" in sys.modules: |     # if "pytest" in sys.modules: | ||||||
|         import tractor |     #     import tractor | ||||||
|         actor = tractor.current_actor(err_on_no_runtime=False) |     #     actor = tractor.current_actor(err_on_no_runtime=False) | ||||||
|         if actor:  # runtime is up |     #     if actor:  # runtime is up | ||||||
|             rvs = tractor._state._runtime_vars |     #         rvs = tractor._state._runtime_vars | ||||||
|             testdirpath = Path(rvs['piker_vars']['piker_test_dir']) |     #         import pdbp; pdbp.set_trace() | ||||||
|             assert testdirpath.exists(), 'piker test harness might be borked!?' |     #         testdirpath = Path(rvs['piker_vars']['piker_test_dir']) | ||||||
|             app_name = str(testdirpath) |     #         assert testdirpath.exists(), 'piker test harness might be borked!?' | ||||||
|  |     #         app_name = str(testdirpath) | ||||||
| 
 | 
 | ||||||
|     if platform.system() == 'Windows': |     if platform.system() == 'Windows': | ||||||
|         key = "APPDATA" if roaming else "LOCALAPPDATA" |         key = "APPDATA" if roaming else "LOCALAPPDATA" | ||||||
|  |  | ||||||
							
								
								
									
										203
									
								
								pyproject.toml
								
								
								
								
							
							
						
						
									
										203
									
								
								pyproject.toml
								
								
								
								
							|  | @ -15,8 +15,8 @@ | ||||||
| # You should have received a copy of the GNU Affero General Public License | # You should have received a copy of the GNU Affero General Public License | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| [build-system] | [build-system] | ||||||
| requires = ["poetry-core"] | requires = ["hatchling"] | ||||||
| build-backend = "poetry.core.masonry.api" | build-backend = "hatchling.build" | ||||||
| 
 | 
 | ||||||
| # ------ - ------ | # ------ - ------ | ||||||
| 
 | 
 | ||||||
|  | @ -34,104 +34,14 @@ ignore = [] | ||||||
| 
 | 
 | ||||||
| # ------ - ------ | # ------ - ------ | ||||||
| 
 | 
 | ||||||
| [tool.poetry] |  | ||||||
| name = "piker" |  | ||||||
| version = "0.1.0.alpha0.dev0" |  | ||||||
| description = "trading gear for hackers" |  | ||||||
| authors = ["Tyler Goodlet <goodboy_foss@protonmail.com>"] |  | ||||||
| license = "AGPLv3" |  | ||||||
| readme = "README.rst" |  | ||||||
| 
 |  | ||||||
| # ------ - ------ |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies] |  | ||||||
| async-generator = "^1.10" |  | ||||||
| attrs = "^23.1.0" |  | ||||||
| bidict = "^0.22.1" |  | ||||||
| colorama = "^0.4.6" |  | ||||||
| colorlog = "^6.7.0" |  | ||||||
| cython = "^3.0.0" |  | ||||||
| greenback = "^1.1.1" |  | ||||||
| ib-insync = "^0.9.86" |  | ||||||
| msgspec = "^0.18.0" |  | ||||||
| numba = "^0.59.0" |  | ||||||
| numpy = "^1.25" |  | ||||||
| polars = "^0.18.13" |  | ||||||
| pygments = "^2.16.1" |  | ||||||
| python = ">=3.11, <3.13" |  | ||||||
| rich = "^13.5.2" |  | ||||||
| # setuptools = "^68.0.0" |  | ||||||
| tomli = "^2.0.1" |  | ||||||
| tomli-w = "^1.0.0" |  | ||||||
| trio-util = "^0.7.0" |  | ||||||
| trio-websocket = "^0.10.3" |  | ||||||
| typer = "^0.9.0" |  | ||||||
| rapidfuzz = "^3.5.2" |  | ||||||
| pdbp = "^1.5.0" |  | ||||||
| trio = "^0.24" |  | ||||||
| pendulum = "^3.0.0" |  | ||||||
| httpx = "^0.27.0" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies.tractor] |  | ||||||
| develop = true |  | ||||||
| git = 'https://github.com/goodboy/tractor.git' |  | ||||||
| branch = 'asyncio_debugger_support' |  | ||||||
| # path = "../tractor" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies.asyncvnc] |  | ||||||
| git = 'https://github.com/pikers/asyncvnc.git' |  | ||||||
| branch = 'main' |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies.tomlkit] |  | ||||||
| develop = true |  | ||||||
| git = 'https://github.com/pikers/tomlkit.git' |  | ||||||
| branch = 'piker_pin' |  | ||||||
| # path = "../tomlkit/" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.group.uis] |  | ||||||
| optional = true |  | ||||||
| [tool.poetry.group.uis.dependencies] |  | ||||||
| # https://python-poetry.org/docs/managing-dependencies/#dependency-groups |  | ||||||
| # TODO: make sure the levenshtein shit compiles on nix.. |  | ||||||
| # rapidfuzz = {extras = ["speedup"], version = "^0.18.0"} |  | ||||||
| rapidfuzz = "^3.2.0" |  | ||||||
| qdarkstyle = ">=3.0.2" |  | ||||||
| pyqtgraph = { git = 'https://github.com/pikers/pyqtgraph.git' } |  | ||||||
| 
 |  | ||||||
| # ------ - ------ |  | ||||||
| pyqt6 = "^6.7.0" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.group.dev] |  | ||||||
| optional = true |  | ||||||
| [tool.poetry.group.dev.dependencies] |  | ||||||
| # testing / CI |  | ||||||
| pytest = "^6.0.0" |  | ||||||
| elasticsearch = "^8.9.0" |  | ||||||
| xonsh = "^0.14.2" |  | ||||||
| prompt-toolkit = "3.0.40" |  | ||||||
| 
 |  | ||||||
| # console ehancements and eventually remote debugging |  | ||||||
| # extras/helpers. |  | ||||||
| # TODO: add a toolset that makes debugging a `pikerd` service |  | ||||||
| # (tree) easy to hack on directly using more or less the local env: |  | ||||||
| # - xonsh + xxh |  | ||||||
| # - rsyscall + pdbp |  | ||||||
| # - actor runtime control console like BEAM/OTP |  | ||||||
| 
 |  | ||||||
| # ------ - ------ |  | ||||||
| 
 |  | ||||||
| # TODO: add an `--only daemon` group for running non-ui / pikerd |  | ||||||
| # service tree in distributed mode B) |  | ||||||
| # https://python-poetry.org/docs/managing-dependencies/#installing-group-dependencies |  | ||||||
| # [tool.poetry.group.daemon.dependencies] |  | ||||||
| 
 |  | ||||||
| [tool.poetry.scripts] |  | ||||||
| piker = 'piker.cli:cli' |  | ||||||
| pikerd = 'piker.cli:pikerd' |  | ||||||
| ledger = 'piker.accounting.cli:ledger' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| [project] | [project] | ||||||
|  | name = "piker" | ||||||
|  | version = "0.1.0a0dev0" | ||||||
|  | description = "trading gear for hackers" | ||||||
|  | authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }] | ||||||
|  | requires-python = ">=3.12, <3.13" | ||||||
|  | license = "AGPL-3.0-or-later" | ||||||
|  | readme = "README.rst" | ||||||
| keywords = [ | keywords = [ | ||||||
|     "async", |     "async", | ||||||
|     "trading", |     "trading", | ||||||
|  | @ -140,15 +50,98 @@ keywords=[ | ||||||
|     "charting", |     "charting", | ||||||
| ] | ] | ||||||
| classifiers = [ | classifiers = [ | ||||||
|   'Development Status :: 3 - Alpha', |     "Development Status :: 3 - Alpha", | ||||||
|     "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", |     "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||||
|   'Operating System :: POSIX :: Linux', |     "Operating System :: POSIX :: Linux", | ||||||
|     "Programming Language :: Python :: Implementation :: CPython", |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|     "Programming Language :: Python :: 3 :: Only", |     "Programming Language :: Python :: 3 :: Only", | ||||||
|     "Programming Language :: Python :: 3.11", |     "Programming Language :: Python :: 3.11", | ||||||
|     "Programming Language :: Python :: 3.12", |     "Programming Language :: Python :: 3.12", | ||||||
|   'Intended Audience :: Financial and Insurance Industry', |     "Intended Audience :: Financial and Insurance Industry", | ||||||
|   'Intended Audience :: Science/Research', |     "Intended Audience :: Science/Research", | ||||||
|   'Intended Audience :: Developers', |     "Intended Audience :: Developers", | ||||||
|   'Intended Audience :: Education', |     "Intended Audience :: Education", | ||||||
| ] | ] | ||||||
|  | dependencies = [ | ||||||
|  |     "async-generator >=1.10, <2.0.0", | ||||||
|  |     "attrs >=23.1.0, <24.0.0", | ||||||
|  |     "bidict >=0.22.1, <0.23.0", | ||||||
|  |     "colorama >=0.4.6, <0.5.0", | ||||||
|  |     "colorlog >=6.7.0, <7.0.0", | ||||||
|  |     "ib-insync >=0.9.86, <0.10.0", | ||||||
|  |     "numba >=0.59.0, <0.60.0", | ||||||
|  |     "numpy >=1.25, <2.0", | ||||||
|  |     "polars >=0.18.13, <0.19.0", | ||||||
|  |     "pygments >=2.16.1, <3.0.0", | ||||||
|  |     "rich >=13.5.2, <14.0.0", | ||||||
|  |     "tomli >=2.0.1, <3.0.0", | ||||||
|  |     "tomli-w >=1.0.0, <2.0.0", | ||||||
|  |     "trio-util >=0.7.0, <0.8.0", | ||||||
|  |     "trio-websocket >=0.10.3, <0.11.0", | ||||||
|  |     "typer >=0.9.0, <1.0.0", | ||||||
|  |     "rapidfuzz >=3.5.2, <4.0.0", | ||||||
|  |     "pdbp >=1.5.0, <2.0.0", | ||||||
|  |     "trio >=0.24, <0.25", | ||||||
|  |     "pendulum >=3.0.0, <4.0.0", | ||||||
|  |     "httpx >=0.27.0, <0.28.0", | ||||||
|  |     "cryptofeed >=2.4.0, <3.0.0", | ||||||
|  |     "pyarrow >=17.0.0, <18.0.0", | ||||||
|  |     "websockets ==12.0", | ||||||
|  |     "msgspec", | ||||||
|  |     "tractor", | ||||||
|  |     "asyncvnc", | ||||||
|  |     "tomlkit", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [project.optional-dependencies] | ||||||
|  | uis = [ | ||||||
|  |     # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies | ||||||
|  |     # TODO: make sure the levenshtein shit compiles on nix.. | ||||||
|  |     # rapidfuzz = {extras = ["speedup"], version = "^0.18.0"} | ||||||
|  |     "rapidfuzz >=3.2.0, <4.0.0", | ||||||
|  |     "qdarkstyle >=3.0.2, <4.0.0", | ||||||
|  |     "pyqt6 >=6.7.0, <7.0.0", | ||||||
|  |     "pyqtgraph", | ||||||
|  | 
 | ||||||
|  |     # ------ - ------ | ||||||
|  | 
 | ||||||
|  |     # TODO: add an `--only daemon` group for running non-ui / pikerd | ||||||
|  |     # service tree in distributed mode B) | ||||||
|  |     # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies | ||||||
|  |     # [project.optional-dependencies] | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [dependency-groups] | ||||||
|  | dev = [ | ||||||
|  |     "pytest >=6.0.0, <7.0.0", | ||||||
|  |     "elasticsearch >=8.9.0, <9.0.0", | ||||||
|  |     "xonsh >=0.14.2, <0.15.0", | ||||||
|  |     "prompt-toolkit ==3.0.40", | ||||||
|  |     "cython >=3.0.0, <4.0.0", | ||||||
|  |     "greenback >=1.1.1, <2.0.0", | ||||||
|  |     # console ehancements and eventually remote debugging | ||||||
|  |     # extras/helpers. | ||||||
|  |     # TODO: add a toolset that makes debugging a `pikerd` service | ||||||
|  |     # (tree) easy to hack on directly using more or less the local env: | ||||||
|  |     # - xonsh + xxh | ||||||
|  |     # - rsyscall + pdbp | ||||||
|  |     # - actor runtime control console like BEAM/OTP | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [project.scripts] | ||||||
|  | piker = "piker.cli:cli" | ||||||
|  | pikerd = "piker.cli:pikerd" | ||||||
|  | ledger = "piker.accounting.cli:ledger" | ||||||
|  | 
 | ||||||
|  | [tool.hatch.build.targets.sdist] | ||||||
|  | include = ["piker"] | ||||||
|  | 
 | ||||||
|  | [tool.hatch.build.targets.wheel] | ||||||
|  | include = ["piker"] | ||||||
|  | 
 | ||||||
|  | [tool.uv.sources] | ||||||
|  | pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" } | ||||||
|  | asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" } | ||||||
|  | tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" } | ||||||
|  | msgspec = { git = "https://github.com/jcrist/msgspec.git" } | ||||||
|  | tractor = { path = "../tractor", editable = true } | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue