Compare commits
No commits in common. "gitea_feats" and "brokers_config" have entirely different histories.
gitea_feat
...
brokers_co
|
|
@ -1,60 +1,50 @@
|
|||
name: CI
|
||||
|
||||
|
||||
on:
|
||||
# Triggers the workflow on push or pull request events but only for the master branch
|
||||
pull_request:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
# test that we can generate a software distribution and install it
|
||||
# thus avoid missing file issues after packaging.
|
||||
sdist-linux:
|
||||
name: 'sdist'
|
||||
basic_install:
|
||||
name: 'pip install'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: master
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Build sdist
|
||||
run: python setup.py sdist --formats=zip
|
||||
- name: Install dependencies
|
||||
run: pip install -e . --upgrade-strategy eager -r requirements.txt
|
||||
|
||||
- name: Install sdist from .zips
|
||||
run: python -m pip install dist/*.zip
|
||||
- name: Run piker cli
|
||||
run: piker
|
||||
|
||||
testing:
|
||||
name: 'install + test-suite'
|
||||
timeout-minutes: 10
|
||||
name: 'test suite'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# elastic only
|
||||
# - name: Build DB container
|
||||
# run: docker build -t piker:elastic dockering/elastic
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
# elastic only
|
||||
# - name: Install dependencies
|
||||
# run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||
|
|
|
|||
|
|
@ -97,9 +97,6 @@ ENV/
|
|||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# extra scripts dir
|
||||
/snippets
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.vscode/settings.json
|
||||
|
|
|
|||
211
README.rst
211
README.rst
|
|
@ -1,161 +1,133 @@
|
|||
piker
|
||||
-----
|
||||
trading gear for hackers
|
||||
trading gear for hackers.
|
||||
|
||||
|gh_actions|
|
||||
|
||||
.. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpikers%2Fpiker%2Fbadge&style=popout-square
|
||||
:target: https://actions-badge.atrox.dev/piker/pikers/goto
|
||||
|
||||
``piker`` is a broker agnostic, next-gen FOSS toolset and runtime for
|
||||
real-time computational trading targeted at `hardcore Linux users
|
||||
<comp_trader>`_ .
|
||||
``piker`` is a broker agnostic, next-gen FOSS toolset for real-time
|
||||
computational trading targeted at `hardcore Linux users <comp_trader>`_ .
|
||||
|
||||
we use much bleeding edge tech including (but not limited to):
|
||||
we use as much bleeding edge tech as possible including (but not limited to):
|
||||
|
||||
- latest python for glue_
|
||||
- uv_ for packaging and distribution
|
||||
- trio_ & tractor_ for our distributed `structured concurrency`_ runtime
|
||||
- Qt_ for pristine low latency UIs
|
||||
- pyqtgraph_ (which we've extended) for real-time charting and graphics
|
||||
- ``polars`` ``numpy`` and ``numba`` for redic `fast numerics`_
|
||||
- `apache arrow and parquet`_ for time-series storage
|
||||
- trio_ for `structured concurrency`_
|
||||
- tractor_ for distributed, multi-core, real-time streaming
|
||||
- marketstore_ for historical and real-time tick data persistence and sharing
|
||||
- techtonicdb_ for L2 book storage
|
||||
- Qt_ for pristine high performance UIs
|
||||
- pyqtgraph_ for real-time charting
|
||||
- ``numpy`` and ``numba`` for `fast numerics`_
|
||||
|
||||
potential projects we might integrate with soon,
|
||||
|
||||
- (already prototyped in ) techtonicdb_ for L2 book storage
|
||||
|
||||
.. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/
|
||||
.. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue
|
||||
.. _uv: https://docs.astral.sh/uv/
|
||||
.. |travis| image:: https://img.shields.io/travis/pikers/piker/master.svg
|
||||
:target: https://travis-ci.org/pikers/piker
|
||||
.. _trio: https://github.com/python-trio/trio
|
||||
.. _tractor: https://github.com/goodboy/tractor
|
||||
.. _structured concurrency: https://trio.discourse.group/
|
||||
.. _marketstore: https://github.com/alpacahq/marketstore
|
||||
.. _techtonicdb: https://github.com/0b01/tectonicdb
|
||||
.. _Qt: https://www.qt.io/
|
||||
.. _pyqtgraph: https://github.com/pyqtgraph/pyqtgraph
|
||||
.. _apache arrow and parquet: https://arrow.apache.org/faq/
|
||||
.. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue
|
||||
.. _fast numerics: https://zerowithdot.com/python-numpy-and-pandas-performance/
|
||||
.. _techtonicdb: https://github.com/0b01/tectonicdb
|
||||
.. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/
|
||||
|
||||
|
||||
focus and feats:
|
||||
focus and features:
|
||||
*******************
|
||||
- 100% federated: your code, your hardware, your data feeds, your broker fills.
|
||||
- zero web: low latency, native software that doesn't try to re-invent the OS
|
||||
- maximal **privacy**: prevent brokers and mms from knowing your
|
||||
planz; smack their spreads with dark volume.
|
||||
- zero clutter: modal, context oriented UIs that echew minimalism, reduce
|
||||
thought noise and encourage un-emotion.
|
||||
- first class parallelism: built from the ground up on next-gen structured concurrency
|
||||
primitives.
|
||||
- traders first: broker/exchange/asset-class agnostic
|
||||
- systems grounded: real-time financial signal processing that will
|
||||
make any queuing or DSP eng juice their shorts.
|
||||
- non-tina UX: sleek, powerful keyboard driven interaction with expected use in tiling wms
|
||||
- data collaboration: every process and protocol is multi-host scalable.
|
||||
- fight club ready: zero interest in adoption by suits; no corporate friendly license, ever.
|
||||
|
||||
fitting with these tenets, we're always open to new framework suggestions and ideas.
|
||||
|
||||
building the best looking, most reliable, keyboard friendly trading
|
||||
platform is the dream; join the cause.
|
||||
|
||||
|
||||
install
|
||||
*******
|
||||
``piker`` is currently under heavy pre-alpha development and as such
|
||||
should be cloned from this repo and hacked on directly.
|
||||
|
||||
for a development install::
|
||||
|
||||
git clone git@github.com:pikers/piker.git
|
||||
cd piker
|
||||
virtualenv env
|
||||
source ./env/bin/activate
|
||||
pip install -r requirements.txt -e .
|
||||
|
||||
|
||||
provider support
|
||||
****************
|
||||
fitting with these tenets, we're always open to new
|
||||
framework/lib/service interop suggestions and ideas!
|
||||
for live data feeds the in-progress set of supported brokers is:
|
||||
|
||||
- **100% federated**:
|
||||
your code, your hardware, your data feeds, your broker fills.
|
||||
- IB_ via ``ib_insync``
|
||||
- binance_ and kraken_ for crypto over their public websocket API
|
||||
- questrade_ (ish) which comes with effectively free L1
|
||||
|
||||
- **zero web**:
|
||||
low latency as a prime objective, native UIs and modern IPC
|
||||
protocols without trying to re-invent the "OS-as-an-app"..
|
||||
coming soon...
|
||||
|
||||
- **maximal privacy**:
|
||||
prevent brokers and mms from knowing your planz; smack their
|
||||
spreads with dark volume from a VPN tunnel.
|
||||
- webull_ via the reverse engineered public API
|
||||
- yahoo via yliveticker_
|
||||
|
||||
- **zero clutter**:
|
||||
modal, context oriented UIs that echew minimalism, reduce thought
|
||||
noise and encourage un-emotion.
|
||||
if you want your broker supported and they have an API let us know.
|
||||
|
||||
- **first class parallelism**:
|
||||
built from the ground up on a next-gen structured concurrency
|
||||
supervision sys.
|
||||
|
||||
- **traders first**:
|
||||
broker/exchange/venue/asset-class/money-sys agnostic
|
||||
|
||||
- **systems grounded**:
|
||||
real-time financial signal processing (fsp) that will make any
|
||||
queuing or DSP eng juice their shorts.
|
||||
|
||||
- **non-tina UX**:
|
||||
sleek, powerful keyboard driven interaction with expected use in
|
||||
tiling wms (or maybe even a DDE).
|
||||
|
||||
- **data collab at scale**:
|
||||
every actor-process and protocol is multi-host aware.
|
||||
|
||||
- **fight club ready**:
|
||||
zero interest in adoption by suits; no corporate friendly license,
|
||||
ever.
|
||||
|
||||
building the hottest looking, fastest, most reliable, keyboard
|
||||
friendly FOSS trading platform is the dream; join the cause.
|
||||
.. _IB: https://interactivebrokers.github.io/tws-api/index.html
|
||||
.. _questrade: https://www.questrade.com/api/documentation
|
||||
.. _kraken: https://www.kraken.com/features/api#public-market-data
|
||||
.. _binance: https://github.com/pikers/piker/pull/182
|
||||
.. _webull: https://github.com/tedchou12/webull
|
||||
.. _yliveticker: https://github.com/yahoofinancelive/yliveticker
|
||||
.. _coinbase: https://docs.pro.coinbase.com/#websocket-feed
|
||||
|
||||
|
||||
a sane install with `uv`
|
||||
************************
|
||||
bc why install with `python` when you can faster with `rust` ::
|
||||
check out our charts
|
||||
********************
|
||||
bet you weren't expecting this from the foss bby::
|
||||
|
||||
uv lock
|
||||
piker -l info -b kraken -b binance chart btcusdt.binance --pdb
|
||||
|
||||
|
||||
hacky install on nixos
|
||||
**********************
|
||||
``NixOS`` is our core devs' distro of choice for which we offer
|
||||
a stringently defined development shell envoirment that can be loaded with::
|
||||
|
||||
nix-shell default.nix
|
||||
this runs the main chart in in debug mode.
|
||||
|
||||
|
||||
start a chart
|
||||
*************
|
||||
run a realtime OHLCV chart stand-alone::
|
||||
|
||||
piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken
|
||||
|
||||
this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes
|
||||
overlayed on the same graph. Use of `piker` without first starting
|
||||
a daemon (`pikerd` - see below) means there is an implicit spawning of the
|
||||
multi-actor-runtime (implemented as a `tractor` app).
|
||||
|
||||
For additional subsystem feats available through our chart UI see the
|
||||
various sub-readmes:
|
||||
|
||||
- order control using a mouse-n-keyboard UX B)
|
||||
- cross venue market-pair (what most call "symbol") search, select, overlay Bo
|
||||
- financial-signal-processing (`piker.fsp`) write-n-reload to sub-chart BO
|
||||
- src-asset derivatives scan for anal, like the infamous "max pain" XO
|
||||
|
||||
|
||||
spawn a daemon standalone
|
||||
*************************
|
||||
we call the root actor-process the ``pikerd``. it can be (and is
|
||||
recommended normally to be) started separately from the ``piker
|
||||
chart`` program::
|
||||
run in distributed mode
|
||||
***********************
|
||||
start the service manager and data feed daemon in the background and
|
||||
connect to it::
|
||||
|
||||
pikerd -l info --pdb
|
||||
|
||||
the daemon does nothing until a ``piker``-client (like ``piker
|
||||
chart``) connects and requests some particular sub-system. for
|
||||
a connecting chart ``pikerd`` will spawn and manage at least,
|
||||
|
||||
- a data-feed daemon: ``datad`` which does all the work of comms with
|
||||
the backend provider (in this case the ``binance`` cex).
|
||||
- a paper-trading engine instance, ``paperboi.binance``, (if no live
|
||||
account has been configured) which allows for auto/manual order
|
||||
control against the live quote stream.
|
||||
connect your chart::
|
||||
|
||||
*using* an actor-service (aka micro-daemon) manager which dynamically
|
||||
supervises various sub-subsystems-as-services throughout the ``piker``
|
||||
runtime-stack.
|
||||
piker -l info -b kraken -b binance chart xmrusdt.binance --pdb
|
||||
|
||||
now you can (implicitly) connect your chart::
|
||||
|
||||
piker chart btcusdt.spot.binance
|
||||
enjoy persistent real-time data feeds tied to daemon lifetime.
|
||||
|
||||
since ``pikerd`` was started separately you can now enjoy a persistent
|
||||
real-time data stream tied to the daemon-tree's lifetime. i.e. the next
|
||||
time you spawn a chart it will obviously not only load much faster
|
||||
(since the underlying ``datad.binance`` is left running with its
|
||||
in-memory IPC data structures) but also the data-feed and any order
|
||||
mgmt states should be persistent until you finally cancel ``pikerd``.
|
||||
key-bindings and mouse interaction is currently only documented in the
|
||||
doce base. help us write some docs dawg.
|
||||
|
||||
|
||||
if anyone asks you what this project is about
|
||||
*********************************************
|
||||
you don't talk about it; just use it.
|
||||
you don't talk about it.
|
||||
|
||||
|
||||
how do i get involved?
|
||||
|
|
@ -165,15 +137,4 @@ enter the matrix.
|
|||
|
||||
how come there ain't that many docs
|
||||
***********************************
|
||||
i mean we want/need them but building the core right has been higher
|
||||
prio then marketting (and likely will stay that way Bp).
|
||||
|
||||
soo, suck it up bc,
|
||||
|
||||
- no one is trying to sell you on anything
|
||||
- learning the code base is prolly way more valuable
|
||||
- the UI/UXs are intended to be "intuitive" for any hacker..
|
||||
|
||||
we obviously need tonz help so if you want to start somewhere and
|
||||
can't necessarily write "advanced" concurrent python/rust code, this
|
||||
helping document literally anything might be the place for you!
|
||||
suck it up, learn the code; no one is trying to sell you on anything.
|
||||
|
|
|
|||
|
|
@ -1,85 +1,25 @@
|
|||
################
|
||||
# ---- CEXY ----
|
||||
################
|
||||
[binance]
|
||||
accounts.paper = 'paper'
|
||||
|
||||
accounts.usdtm = 'futes'
|
||||
futes.use_testnet = false
|
||||
futes.api_key = ''
|
||||
futes.api_secret = ''
|
||||
|
||||
accounts.spot = 'spot'
|
||||
spot.use_testnet = false
|
||||
spot.api_key = ''
|
||||
spot.api_secret = ''
|
||||
|
||||
|
||||
[deribit]
|
||||
key_id = ''
|
||||
key_secret = ''
|
||||
|
||||
|
||||
[kraken]
|
||||
key_descr = ''
|
||||
api_key = ''
|
||||
secret = ''
|
||||
|
||||
|
||||
[kucoin]
|
||||
key_id = ''
|
||||
key_secret = ''
|
||||
key_passphrase = ''
|
||||
|
||||
|
||||
################
|
||||
# -- BROKERZ ---
|
||||
################
|
||||
[questrade]
|
||||
refresh_token = ''
|
||||
access_token = ''
|
||||
api_server = 'https://api06.iq.questrade.com/'
|
||||
refresh_token = ""
|
||||
access_token = ""
|
||||
api_server = "https://api06.iq.questrade.com/"
|
||||
expires_in = 1800
|
||||
token_type = 'Bearer'
|
||||
token_type = "Bearer"
|
||||
expires_at = 1616095326.355846
|
||||
|
||||
[kraken]
|
||||
key_descr = "api_0"
|
||||
public_key = ""
|
||||
private_key = ""
|
||||
|
||||
[ib]
|
||||
hosts = [
|
||||
'127.0.0.1',
|
||||
]
|
||||
# XXX: the order in which ports will be scanned
|
||||
# (by the `brokerd` daemon-actor)
|
||||
# is determined # by the line order here.
|
||||
# TODO: when we eventually spawn gateways in our
|
||||
# container, we can just dynamically allocate these
|
||||
# using IBC.
|
||||
ports = [
|
||||
4002, # gw
|
||||
7497, # tws
|
||||
]
|
||||
|
||||
# XXX: for a paper account the flex web query service
|
||||
# is not supported so you have to manually download
|
||||
# and XML report and put it in a location that can be
|
||||
# accessed by the ``brokerd.ib`` backend code for parsing.
|
||||
flex_token = ''
|
||||
flex_trades_query_id = '' # live account
|
||||
|
||||
# when clients are being scanned this determines
|
||||
# which clients are preferred to be used for data
|
||||
# feeds based on the order of account names, if
|
||||
# detected as active on an API client.
|
||||
prefer_data_account = [
|
||||
'paper',
|
||||
'margin',
|
||||
'ira',
|
||||
]
|
||||
host = "127.0.0.1"
|
||||
|
||||
[ib.accounts]
|
||||
# the order in which accounts will be selectable
|
||||
# in the order mode UI (if found via clients during
|
||||
# API-app scanning)when a new symbol is loaded.
|
||||
paper = 'XX0000000'
|
||||
margin = 'X0000000'
|
||||
ira = 'X0000000'
|
||||
margin = ""
|
||||
registered = ""
|
||||
paper = ""
|
||||
|
||||
[ib.ports]
|
||||
gw = 4002
|
||||
tws = 7497
|
||||
order = [ "gw", "tws",]
|
||||
|
|
|
|||
|
|
@ -1,12 +0,0 @@
|
|||
[network]
|
||||
tsdb.backend = 'marketstore'
|
||||
tsdb.host = 'localhost'
|
||||
tsdb.grpc_port = 5995
|
||||
|
||||
[ui]
|
||||
# set custom font + size which will scale entire UI
|
||||
# font_size = 16
|
||||
# font_name = 'Monospaced'
|
||||
|
||||
# colorscheme = 'default' # UNUSED
|
||||
# graphics.update_throttle = 60 # Hz # TODO
|
||||
134
default.nix
134
default.nix
|
|
@ -1,134 +0,0 @@
|
|||
with (import <nixpkgs> {});
|
||||
let
|
||||
glibStorePath = lib.getLib glib;
|
||||
zlibStorePath = lib.getLib zlib;
|
||||
zstdStorePath = lib.getLib zstd;
|
||||
dbusStorePath = lib.getLib dbus;
|
||||
libGLStorePath = lib.getLib libGL;
|
||||
freetypeStorePath = lib.getLib freetype;
|
||||
qt6baseStorePath = lib.getLib qt6.qtbase;
|
||||
fontconfigStorePath = lib.getLib fontconfig;
|
||||
libxkbcommonStorePath = lib.getLib libxkbcommon;
|
||||
xcbutilcursorStorePath = lib.getLib xcb-util-cursor;
|
||||
|
||||
qtpyStorePath = lib.getLib python312Packages.qtpy;
|
||||
pyqt6StorePath = lib.getLib python312Packages.pyqt6;
|
||||
pyqt6SipStorePath = lib.getLib python312Packages.pyqt6-sip;
|
||||
rapidfuzzStorePath = lib.getLib python312Packages.rapidfuzz;
|
||||
qdarkstyleStorePath = lib.getLib python312Packages.qdarkstyle;
|
||||
|
||||
xorgLibX11StorePath = lib.getLib xorg.libX11;
|
||||
xorgLibxcbStorePath = lib.getLib xorg.libxcb;
|
||||
xorgxcbutilwmStorePath = lib.getLib xorg.xcbutilwm;
|
||||
xorgxcbutilimageStorePath = lib.getLib xorg.xcbutilimage;
|
||||
xorgxcbutilerrorsStorePath = lib.getLib xorg.xcbutilerrors;
|
||||
xorgxcbutilkeysymsStorePath = lib.getLib xorg.xcbutilkeysyms;
|
||||
xorgxcbutilrenderutilStorePath = lib.getLib xorg.xcbutilrenderutil;
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
name = "piker-qt6-uv";
|
||||
buildInputs = [
|
||||
# System requirements.
|
||||
glib
|
||||
zlib
|
||||
dbus
|
||||
zstd
|
||||
libGL
|
||||
freetype
|
||||
qt6.qtbase
|
||||
libgcc.lib
|
||||
fontconfig
|
||||
libxkbcommon
|
||||
|
||||
# Xorg requirements
|
||||
xcb-util-cursor
|
||||
xorg.libxcb
|
||||
xorg.libX11
|
||||
xorg.xcbutilwm
|
||||
xorg.xcbutilimage
|
||||
xorg.xcbutilerrors
|
||||
xorg.xcbutilkeysyms
|
||||
xorg.xcbutilrenderutil
|
||||
|
||||
# Python requirements.
|
||||
python312Full
|
||||
python312Packages.uv
|
||||
python312Packages.qdarkstyle
|
||||
python312Packages.rapidfuzz
|
||||
python312Packages.pyqt6
|
||||
python312Packages.qtpy
|
||||
];
|
||||
src = null;
|
||||
shellHook = ''
|
||||
set -e
|
||||
|
||||
# Set the Qt plugin path
|
||||
# export QT_DEBUG_PLUGINS=1
|
||||
|
||||
QTBASE_PATH="${qt6baseStorePath}/lib"
|
||||
QT_PLUGIN_PATH="$QTBASE_PATH/qt-6/plugins"
|
||||
QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms"
|
||||
|
||||
LIB_GCC_PATH="${libgcc.lib}/lib"
|
||||
GLIB_PATH="${glibStorePath}/lib"
|
||||
ZSTD_PATH="${zstdStorePath}/lib"
|
||||
ZLIB_PATH="${zlibStorePath}/lib"
|
||||
DBUS_PATH="${dbusStorePath}/lib"
|
||||
LIBGL_PATH="${libGLStorePath}/lib"
|
||||
FREETYPE_PATH="${freetypeStorePath}/lib"
|
||||
FONTCONFIG_PATH="${fontconfigStorePath}/lib"
|
||||
LIB_XKB_COMMON_PATH="${libxkbcommonStorePath}/lib"
|
||||
|
||||
XCB_UTIL_CURSOR_PATH="${xcbutilcursorStorePath}/lib"
|
||||
XORG_LIB_X11_PATH="${xorgLibX11StorePath}/lib"
|
||||
XORG_LIB_XCB_PATH="${xorgLibxcbStorePath}/lib"
|
||||
XORG_XCB_UTIL_IMAGE_PATH="${xorgxcbutilimageStorePath}/lib"
|
||||
XORG_XCB_UTIL_WM_PATH="${xorgxcbutilwmStorePath}/lib"
|
||||
XORG_XCB_UTIL_RENDER_UTIL_PATH="${xorgxcbutilrenderutilStorePath}/lib"
|
||||
XORG_XCB_UTIL_KEYSYMS_PATH="${xorgxcbutilkeysymsStorePath}/lib"
|
||||
XORG_XCB_UTIL_ERRORS_PATH="${xorgxcbutilerrorsStorePath}/lib"
|
||||
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QTBASE_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_PLUGIN_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_QPA_PLATFORM_PLUGIN_PATH"
|
||||
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_GCC_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$DBUS_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GLIB_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZLIB_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZSTD_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIBGL_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FONTCONFIG_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FREETYPE_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_XKB_COMMON_PATH"
|
||||
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XCB_UTIL_CURSOR_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_X11_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_XCB_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_IMAGE_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_WM_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_RENDER_UTIL_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_KEYSYMS_PATH"
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_ERRORS_PATH"
|
||||
|
||||
export LD_LIBRARY_PATH
|
||||
|
||||
RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages"
|
||||
QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages"
|
||||
QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages"
|
||||
PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages"
|
||||
PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages"
|
||||
|
||||
PATCH="$PATCH:$RPDFUZZ_PATH"
|
||||
PATCH="$PATCH:$QDRKSTYLE_PATH"
|
||||
PATCH="$PATCH:$QTPY_PATH"
|
||||
PATCH="$PATCH:$PYQT6_PATH"
|
||||
PATCH="$PATCH:$PYQT6_SIP_PATH"
|
||||
|
||||
export PATCH
|
||||
|
||||
# Install deps
|
||||
uv lock
|
||||
|
||||
'';
|
||||
}
|
||||
47
develop.nix
47
develop.nix
|
|
@ -1,47 +0,0 @@
|
|||
with (import <nixpkgs> {});
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "poetry-env";
|
||||
buildInputs = [
|
||||
# System requirements.
|
||||
readline
|
||||
|
||||
# TODO: hacky non-poetry install stuff we need to get rid of!!
|
||||
poetry
|
||||
# virtualenv
|
||||
# setuptools
|
||||
# pip
|
||||
|
||||
# Python requirements (enough to get a virtualenv going).
|
||||
python311Full
|
||||
|
||||
# obviously, and see below for hacked linking
|
||||
python311Packages.pyqt5
|
||||
python311Packages.pyqt5_sip
|
||||
# python311Packages.qtpy
|
||||
|
||||
# numerics deps
|
||||
python311Packages.levenshtein
|
||||
python311Packages.fastparquet
|
||||
python311Packages.polars
|
||||
|
||||
];
|
||||
# environment.sessionVariables = {
|
||||
# LD_LIBRARY_PATH = "${pkgs.stdenv.cc.cc.lib}/lib";
|
||||
# };
|
||||
src = null;
|
||||
shellHook = ''
|
||||
# Allow the use of wheels.
|
||||
SOURCE_DATE_EPOCH=$(date +%s)
|
||||
|
||||
# Augment the dynamic linker path
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${R}/lib/R/lib:${readline}/lib
|
||||
export QT_QPA_PLATFORM_PLUGIN_PATH="${qt5.qtbase.bin}/lib/qt-${qt5.qtbase.version}/plugins";
|
||||
|
||||
if [ ! -d ".venv" ]; then
|
||||
poetry install --with uis
|
||||
fi
|
||||
|
||||
poetry shell
|
||||
'';
|
||||
}
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
FROM elasticsearch:7.17.4
|
||||
|
||||
ENV ES_JAVA_OPTS "-Xms2g -Xmx2g"
|
||||
ENV ELASTIC_USERNAME "elastic"
|
||||
ENV ELASTIC_PASSWORD "password"
|
||||
|
||||
COPY elasticsearch.yml /usr/share/elasticsearch/config/
|
||||
|
||||
RUN printf "password" | ./bin/elasticsearch-keystore add -f -x "bootstrap.password"
|
||||
|
||||
EXPOSE 19200
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
network.host: 0.0.0.0
|
||||
|
||||
http.port: 19200
|
||||
|
||||
discovery.type: single-node
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
running ``ib`` gateway in ``docker``
|
||||
------------------------------------
|
||||
We have a config based on the (now defunct)
|
||||
image from "waytrade":
|
||||
|
||||
https://github.com/waytrade/ib-gateway-docker
|
||||
|
||||
To startup this image with our custom settings
|
||||
simply run the command::
|
||||
|
||||
docker compose up
|
||||
|
||||
And you should have the following socket-available services:
|
||||
|
||||
- ``x11vnc1@127.0.0.1:3003``
|
||||
- ``ib-gw@127.0.0.1:4002``
|
||||
|
||||
You can attach to the container via a VNC client
|
||||
without password auth.
|
||||
|
||||
SECURITY STUFF!?!?!
|
||||
-------------------
|
||||
Though "``ib``" claims they host filter connections outside
|
||||
localhost (aka ``127.0.0.1``) it's probably better if you filter
|
||||
the socket at the OS level using a stateless firewall rule::
|
||||
|
||||
ip rule add not unicast iif lo to 0.0.0.0/0 dport 4002
|
||||
|
||||
We will soon have this baked into our own custom image but for
|
||||
now you'll have to do it urself dawgy.
|
||||
|
|
@ -1,114 +0,0 @@
|
|||
# rework from the original @
|
||||
# https://github.com/waytrade/ib-gateway-docker/blob/master/docker-compose.yml
|
||||
version: "3.5"
|
||||
|
||||
|
||||
services:
|
||||
|
||||
ib_gw_paper:
|
||||
|
||||
# apparently java is a mega cukc:
|
||||
# https://stackoverflow.com/a/56895801
|
||||
# https://bugs.openjdk.org/browse/JDK-8150460
|
||||
ulimits:
|
||||
# nproc: 65535
|
||||
nproc: 6000
|
||||
nofile:
|
||||
soft: 2000
|
||||
hard: 3000
|
||||
|
||||
# other image tags available:
|
||||
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
||||
# image: waytrade/ib-gateway:1012.2i
|
||||
image: ghcr.io/gnzsnz/ib-gateway:latest
|
||||
|
||||
restart: 'no' # restart on boot whenev there's a crash or user clicsk
|
||||
network_mode: 'host'
|
||||
|
||||
volumes:
|
||||
- type: bind
|
||||
source: ./jts.ini
|
||||
target: /root/Jts/jts.ini
|
||||
# don't let IBC clobber this file for
|
||||
# the main reason of not having a stupid
|
||||
# timezone set..
|
||||
read_only: true
|
||||
|
||||
# force our own IBC config
|
||||
- type: bind
|
||||
source: ./ibc.ini
|
||||
target: /root/ibc/config.ini
|
||||
|
||||
# force our noop script - socat isn't needed in host mode.
|
||||
- type: bind
|
||||
source: ./fork_ports_delayed.sh
|
||||
target: /root/scripts/fork_ports_delayed.sh
|
||||
|
||||
# force our noop script - socat isn't needed in host mode.
|
||||
- type: bind
|
||||
source: ./run_x11_vnc.sh
|
||||
target: /root/scripts/run_x11_vnc.sh
|
||||
read_only: true
|
||||
|
||||
# NOTE:to fill these out, define an `.env` file in the same dir as
|
||||
# this compose file which looks something like:
|
||||
# TWS_USERID='myuser'
|
||||
# TWS_PASSWORD='guest'
|
||||
environment:
|
||||
TWS_USERID: ${TWS_USERID}
|
||||
TWS_PASSWORD: ${TWS_PASSWORD}
|
||||
TRADING_MODE: 'paper'
|
||||
VNC_SERVER_PASSWORD: 'doggy'
|
||||
VNC_SERVER_PORT: '3003'
|
||||
|
||||
# ports:
|
||||
# - target: 4002
|
||||
# host_ip: 127.0.0.1
|
||||
# published: 4002
|
||||
# protocol: tcp
|
||||
|
||||
# original mappings for use in non-host-mode
|
||||
# which we won't really need going forward since
|
||||
# ideally we just pick the port to have ib-gw listen
|
||||
# on **when** we spawn the container - i.e. everything
|
||||
# will be driven by a ``brokers.toml`` def.
|
||||
# - "127.0.0.1:4001:4001"
|
||||
# - "127.0.0.1:4002:4002"
|
||||
# - "127.0.0.1:5900:5900"
|
||||
|
||||
# ib_gw_live:
|
||||
# image: waytrade/ib-gateway:1012.2i
|
||||
# restart: no
|
||||
# network_mode: 'host'
|
||||
|
||||
# volumes:
|
||||
# - type: bind
|
||||
# source: ./jts_live.ini
|
||||
# target: /root/jts/jts.ini
|
||||
# # don't let ibc clobber this file for
|
||||
# # the main reason of not having a stupid
|
||||
# # timezone set..
|
||||
# read_only: true
|
||||
|
||||
# # force our own ibc config
|
||||
# - type: bind
|
||||
# source: ./ibc.ini
|
||||
# target: /root/ibc/config.ini
|
||||
|
||||
# # force our noop script - socat isn't needed in host mode.
|
||||
# - type: bind
|
||||
# source: ./fork_ports_delayed.sh
|
||||
# target: /root/scripts/fork_ports_delayed.sh
|
||||
|
||||
# # force our noop script - socat isn't needed in host mode.
|
||||
# - type: bind
|
||||
# source: ./run_x11_vnc.sh
|
||||
# target: /root/scripts/run_x11_vnc.sh
|
||||
# read_only: true
|
||||
|
||||
# # NOTE: to fill these out, define an `.env` file in the same dir as
|
||||
# # this compose file which looks something like:
|
||||
# environment:
|
||||
# TRADING_MODE: 'live'
|
||||
# VNC_SERVER_PASSWORD: 'doggy'
|
||||
# VNC_SERVER_PORT: '3004'
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# we now just set this is to a noop script
|
||||
# since we can just run the container in
|
||||
# `network_mode: 'host'` and get literally
|
||||
# the exact same behaviour XD
|
||||
|
|
@ -1,927 +0,0 @@
|
|||
# Note that in the comments in this file, TWS refers to both the Trader
|
||||
# Workstation and the IB Gateway, unless explicitly stated otherwise.
|
||||
#
|
||||
# When referred to below, the default value for a setting is the value
|
||||
# assumed if either the setting is included but no value is specified, or
|
||||
# the setting is not included at all.
|
||||
#
|
||||
# IBC may also be used to start the FIX CTCI Gateway. All settings
|
||||
# relating to this have names prefixed with FIX.
|
||||
#
|
||||
# The IB API Gateway and the FIX CTCI Gateway share the same code. Which
|
||||
# gateway actually runs is governed by an option on the initial gateway
|
||||
# login screen. The FIX setting described under IBC Startup
|
||||
# Settings below controls this.
|
||||
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 1. IBC Startup Settings
|
||||
# =============================================================================
|
||||
|
||||
|
||||
# IBC may be used to start the IB Gateway for the FIX CTCI. This
|
||||
# setting must be set to 'yes' if you want to run the FIX CTCI gateway. The
|
||||
# default is 'no'.
|
||||
|
||||
FIX=no
|
||||
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 2. Authentication Settings
|
||||
# =============================================================================
|
||||
|
||||
# TWS and the IB API gateway require a single username and password.
|
||||
# You may specify the username and password using the following settings:
|
||||
#
|
||||
# IbLoginId
|
||||
# IbPassword
|
||||
#
|
||||
# Alternatively, you can specify the username and password in the command
|
||||
# files used to start TWS or the Gateway, but this is not recommended for
|
||||
# security reasons.
|
||||
#
|
||||
# If you don't specify them, you will be prompted for them in the usual
|
||||
# login dialog when TWS starts (but whatever you have specified will be
|
||||
# included in the dialog automatically: for example you may specify the
|
||||
# username but not the password, and then you will be prompted for the
|
||||
# password via the login dialog). Note that if you specify either
|
||||
# the username or the password (or both) in the command file, then
|
||||
# IbLoginId and IbPassword settings defined in this file are ignored.
|
||||
#
|
||||
#
|
||||
# The FIX CTCI gateway requires one username and password for FIX order
|
||||
# routing, and optionally a separate username and password for market
|
||||
# data connections. You may specify the usernames and passwords using
|
||||
# the following settings:
|
||||
#
|
||||
# FIXLoginId
|
||||
# FIXPassword
|
||||
# IbLoginId (optional - for market data connections)
|
||||
# IbPassword (optional - for market data connections)
|
||||
#
|
||||
# Alternatively you can specify the FIX username and password in the
|
||||
# command file used to start the FIX CTCI Gateway, but this is not
|
||||
# recommended for security reasons.
|
||||
#
|
||||
# If you don't specify them, you will be prompted for them in the usual
|
||||
# login dialog when FIX CTCI gateway starts (but whatever you have
|
||||
# specified will be included in the dialog automatically: for example
|
||||
# you may specify the usernames but not the passwords, and then you will
|
||||
# be prompted for the passwords via the login dialog). Note that if you
|
||||
# specify either the FIX username or the FIX password (or both) on the
|
||||
# command line, then FIXLoginId and FIXPassword settings defined in this
|
||||
# file are ignored; he same applies to the market data username and
|
||||
# password.
|
||||
|
||||
# IB API Authentication Settings
|
||||
# ------------------------------
|
||||
|
||||
# Your TWS username:
|
||||
|
||||
IbLoginId=
|
||||
|
||||
|
||||
# Your TWS password:
|
||||
|
||||
IbPassword=
|
||||
|
||||
|
||||
# FIX CTCI Authentication Settings
|
||||
# --------------------------------
|
||||
|
||||
# Your FIX CTCI username:
|
||||
|
||||
FIXLoginId=
|
||||
|
||||
|
||||
# Your FIX CTCI password:
|
||||
|
||||
FIXPassword=
|
||||
|
||||
|
||||
# Second Factor Authentication Settings
|
||||
# -------------------------------------
|
||||
|
||||
# If you have enabled more than one second factor authentication
|
||||
# device, TWS presents a list from which you must select the device
|
||||
# you want to use for this login. You can use this setting to
|
||||
# instruct IBC to select a particular item in the list on your
|
||||
# behalf. Note that you must spell this value exactly as it appears
|
||||
# in the list. If no value is set, you must manually select the
|
||||
# relevant list entry.
|
||||
|
||||
SecondFactorDevice=
|
||||
|
||||
|
||||
# If you use the IBKR Mobile app for second factor authentication,
|
||||
# and you fail to complete the process before the time limit imposed
|
||||
# by IBKR, this setting tells IBC whether to automatically restart
|
||||
# the login sequence, giving you another opportunity to complete
|
||||
# second factor authentication.
|
||||
#
|
||||
# Permitted values are 'yes' and 'no'.
|
||||
#
|
||||
# If this setting is not present or has no value, then the value
|
||||
# of the deprecated ExitAfterSecondFactorAuthenticationTimeout is
|
||||
# used instead. If this also has no value, then this setting defaults
|
||||
# to 'no'.
|
||||
#
|
||||
# NB: you must be using IBC v3.14.0 or later to use this setting:
|
||||
# earlier versions ignore it.
|
||||
|
||||
ReloginAfterSecondFactorAuthenticationTimeout=
|
||||
|
||||
|
||||
# This setting is only relevant if
|
||||
# ReloginAfterSecondFactorAuthenticationTimeout is set to 'yes',
|
||||
# or if ExitAfterSecondFactorAuthenticationTimeout is set to 'yes'.
|
||||
#
|
||||
# It controls how long (in seconds) IBC waits for login to complete
|
||||
# after the user acknowledges the second factor authentication
|
||||
# alert at the IBKR Mobile app. If login has not completed after
|
||||
# this time, IBC terminates.
|
||||
# The default value is 60.
|
||||
|
||||
SecondFactorAuthenticationExitInterval=
|
||||
|
||||
|
||||
# This setting specifies the timeout for second factor authentication
|
||||
# imposed by IB. The value is in seconds. You should not change this
|
||||
# setting unless you have reason to believe that IB has changed the
|
||||
# timeout. The default value is 180.
|
||||
|
||||
SecondFactorAuthenticationTimeout=180
|
||||
|
||||
|
||||
# DEPRECATED SETTING
|
||||
# ------------------
|
||||
#
|
||||
# ExitAfterSecondFactorAuthenticationTimeout - THIS SETTING WILL BE
|
||||
# REMOVED IN A FUTURE RELEASE. For IBC version 3.14.0 and later, see
|
||||
# the notes for ReloginAfterSecondFactorAuthenticationTimeout above.
|
||||
#
|
||||
# For IBC versions earlier than 3.14.0: If you use the IBKR Mobile
|
||||
# app for second factor authentication, and you fail to complete the
|
||||
# process before the time limit imposed by IBKR, you can use this
|
||||
# setting to tell IBC to exit: arrangements can then be made to
|
||||
# automatically restart IBC in order to initiate the login sequence
|
||||
# afresh. Otherwise, manual intervention at TWS's
|
||||
# Second Factor Authentication dialog is needed to complete the
|
||||
# login.
|
||||
#
|
||||
# Permitted values are 'yes' and 'no'. The default is 'no'.
|
||||
#
|
||||
# Note that the scripts provided with the IBC zips for Windows and
|
||||
# Linux provide options to automatically restart in these
|
||||
# circumstances, but only if this setting is also set to 'yes'.
|
||||
|
||||
ExitAfterSecondFactorAuthenticationTimeout=no
|
||||
|
||||
|
||||
# Trading Mode
|
||||
# ------------
|
||||
#
|
||||
# This indicates whether the live account or the paper trading
|
||||
# account corresponding to the supplied credentials is to be used.
|
||||
# The allowed values are 'live' (the default) and 'paper'.
|
||||
#
|
||||
# If this is set to 'live', then the credentials for the live
|
||||
# account must be supplied. If it is set to 'paper', then either
|
||||
# the live or the paper-trading credentials may be supplied.
|
||||
|
||||
TradingMode=paper
|
||||
|
||||
|
||||
# Paper-trading Account Warning
|
||||
# -----------------------------
|
||||
#
|
||||
# Logging in to a paper-trading account results in TWS displaying
|
||||
# a dialog asking the user to confirm that they are aware that this
|
||||
# is not a brokerage account. Until this dialog has been accepted,
|
||||
# TWS will not allow API connections to succeed. Setting this
|
||||
# to 'yes' (the default) will cause IBC to automatically
|
||||
# confirm acceptance. Setting it to 'no' will leave the dialog
|
||||
# on display, and the user will have to deal with it manually.
|
||||
|
||||
AcceptNonBrokerageAccountWarning=yes
|
||||
|
||||
|
||||
# Login Dialog Display Timeout
|
||||
#-----------------------------
|
||||
#
|
||||
# In some circumstances, starting TWS may result in failure to display
|
||||
# the login dialog. Restarting TWS may help to resolve this situation,
|
||||
# and IBC does this automatically.
|
||||
#
|
||||
# This setting controls how long (in seconds) IBC waits for the login
|
||||
# dialog to appear before restarting TWS.
|
||||
#
|
||||
# Note that in normal circumstances with a reasonably specified
|
||||
# computer the time to displaying the login dialog is typically less
|
||||
# than 20 seconds, and frequently much less. However many factors can
|
||||
# influence this, and it is unwise to set this value too low.
|
||||
#
|
||||
# The default value is 60.
|
||||
|
||||
LoginDialogDisplayTimeout=60
|
||||
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 3. TWS Startup Settings
|
||||
# =============================================================================
|
||||
|
||||
# Path to settings store
|
||||
# ----------------------
|
||||
#
|
||||
# Path to the directory where TWS should store its settings. This is
|
||||
# normally the folder in which TWS is installed. However you may set
|
||||
# it to some other location if you wish (for example if you want to
|
||||
# run multiple instances of TWS with different settings).
|
||||
#
|
||||
# It is recommended for clarity that you use an absolute path. The
|
||||
# effect of using a relative path is undefined.
|
||||
#
|
||||
# Linux and macOS users should use the appropriate path syntax.
|
||||
#
|
||||
# Note that, for Windows users, you MUST use double separator
|
||||
# characters to separate the elements of the folder path: for
|
||||
# example, IbDir=C:\\IBLiveSettings is valid, but
|
||||
# IbDir=C:\IBLiveSettings is NOT valid and will give unexpected
|
||||
# results. Linux and macOS users need not use double separators,
|
||||
# but they are acceptable.
|
||||
#
|
||||
# The default is the current working directory when IBC is
|
||||
# started, unless the TWS_SETTINGS_PATH setting in the relevant
|
||||
# start script is set.
|
||||
#
|
||||
# If both this setting and TWS_SETTINGS_PATH are set, then this
|
||||
# setting takes priority. Note that if they have different values,
|
||||
# auto-restart will not work.
|
||||
#
|
||||
# NB: this setting is now DEPRECATED. You should use the
|
||||
# TWS_SETTINGS_PATH setting in the relevant start script.
|
||||
|
||||
IbDir=/root/Jts
|
||||
|
||||
|
||||
# Store settings on server
|
||||
# ------------------------
|
||||
#
|
||||
# If you wish to store a copy of your TWS settings on IB's
|
||||
# servers as well as locally on your computer, set this to
|
||||
# 'yes': this enables you to run TWS on different computers
|
||||
# with the same configuration, market data lines, etc. If set
|
||||
# to 'no', running TWS on different computers will not share the
|
||||
# same settings. If no value is specified, TWS will obtain its
|
||||
# settings from the same place as the last time this user logged
|
||||
# in (whether manually or using IBC).
|
||||
|
||||
StoreSettingsOnServer=
|
||||
|
||||
|
||||
# Minimize TWS on startup
|
||||
# -----------------------
|
||||
#
|
||||
# Set to 'yes' to minimize TWS when it starts:
|
||||
|
||||
MinimizeMainWindow=no
|
||||
|
||||
|
||||
# Existing Session Detected Action
|
||||
# --------------------------------
|
||||
#
|
||||
# When a user logs on to an IBKR account for trading purposes by any means, the
|
||||
# IBKR account server checks to see whether the account is already logged in
|
||||
# elsewhere. If so, a dialog is displayed to both the users that enables them
|
||||
# to determine what happens next. The 'ExistingSessionDetectedAction' setting
|
||||
# instructs TWS how to proceed when it displays this dialog:
|
||||
#
|
||||
# * If the new TWS session is set to 'secondary', the existing session continues
|
||||
# and the new session terminates. Thus a secondary TWS session can never
|
||||
# override any other session.
|
||||
#
|
||||
# * If the existing TWS session is set to 'primary', the existing session
|
||||
# continues and the new session terminates (even if the new session is also
|
||||
# set to primary). Thus a primary TWS session can never be overridden by
|
||||
# any new session).
|
||||
#
|
||||
# * If both the existing and the new TWS sessions are set to 'primaryoverride',
|
||||
# the existing session terminates and the new session proceeds.
|
||||
#
|
||||
# * If the existing TWS session is set to 'manual', the user must handle the
|
||||
# dialog.
|
||||
#
|
||||
# The difference between 'primary' and 'primaryoverride' is that a
|
||||
# 'primaryoverride' session can be overriden over by a new 'primary' session,
|
||||
# but a 'primary' session cannot be overriden by any other session.
|
||||
#
|
||||
# When set to 'primary', if another TWS session is started and manually told to
|
||||
# end the 'primary' session, the 'primary' session is automatically reconnected.
|
||||
#
|
||||
# The default is 'manual'.
|
||||
|
||||
ExistingSessionDetectedAction=primary
|
||||
|
||||
|
||||
# Override TWS API Port Number
|
||||
# ----------------------------
|
||||
#
|
||||
# If OverrideTwsApiPort is set to an integer, IBC changes the
|
||||
# 'Socket port' in TWS's API configuration to that number shortly
|
||||
# after startup (but note that for the FIX Gateway, this setting is
|
||||
# actually stored in jts.ini rather than the Gateway's settings
|
||||
# file). Leaving the setting blank will make no change to
|
||||
# the current setting. This setting is only intended for use in
|
||||
# certain specialized situations where the port number needs to
|
||||
# be set dynamically at run-time, and for the FIX Gateway: most
|
||||
# non-FIX users will never need it, so don't use it unless you know
|
||||
# you need it.
|
||||
|
||||
OverrideTwsApiPort=4000
|
||||
|
||||
|
||||
# Override TWS Master Client ID
|
||||
# -----------------------------
|
||||
#
|
||||
# If OverrideTwsMasterClientID is set to an integer, IBC changes the
|
||||
# 'Master Client ID' value in TWS's API configuration to that
|
||||
# value shortly after startup. Leaving the setting blank will make
|
||||
# no change to the current setting. This setting is only intended
|
||||
# for use in certain specialized situations where the value needs to
|
||||
# be set dynamically at run-time: most users will never need it,
|
||||
# so don't use it unless you know you need it.
|
||||
|
||||
OverrideTwsMasterClientID=
|
||||
|
||||
|
||||
# Read-only Login
|
||||
# ---------------
|
||||
#
|
||||
# If ReadOnlyLogin is set to 'yes', and the user is enrolled in IB's
|
||||
# account security programme, the user will not be asked to perform
|
||||
# the second factor authentication action, and login to TWS will
|
||||
# occur automatically in read-only mode: in this mode, placing or
|
||||
# managing orders is not allowed.
|
||||
#
|
||||
# If set to 'no', and the user is enrolled in IB's account security
|
||||
# programme, the second factor authentication process is handled
|
||||
# according to the Second Factor Authentication Settings described
|
||||
# elsewhere in this file.
|
||||
#
|
||||
# If the user is not enrolled in IB's account security programme,
|
||||
# this setting is ignored. The default is 'no'.
|
||||
|
||||
ReadOnlyLogin=no
|
||||
|
||||
|
||||
# Read-only API
|
||||
# -------------
|
||||
#
|
||||
# If ReadOnlyApi is set to 'yes', API programs cannot submit, modify
|
||||
# or cancel orders. If set to 'no', API programs can do these things.
|
||||
# If not set, the existing TWS/Gateway configuration is unchanged.
|
||||
# NB: this setting is really only supplied for the benefit of new TWS
|
||||
# or Gateway instances that are being automatically installed and
|
||||
# started without user intervention (eg Docker containers). Where
|
||||
# a user is involved, they should use the Global Configuration to
|
||||
# set the relevant checkbox (this only needs to be done once) and
|
||||
# not provide a value for this setting.
|
||||
|
||||
ReadOnlyApi=
|
||||
|
||||
|
||||
# API Precautions
|
||||
# ---------------
|
||||
#
|
||||
# These settings relate to the corresponding 'Precautions' checkboxes in the
|
||||
# API section of the Global Configuration dialog.
|
||||
#
|
||||
# For all of these, the accepted values are:
|
||||
# - 'yes' sets the checkbox
|
||||
# - 'no' clears the checkbox
|
||||
# - if not set, the existing TWS/Gateway configuration is unchanged
|
||||
#
|
||||
# NB: thess settings are really only supplied for the benefit of new TWS
|
||||
# or Gateway instances that are being automatically installed and
|
||||
# started without user intervention, or where user settings are not preserved
|
||||
# between sessions (eg some Docker containers). Where a user is involved, they
|
||||
# should use the Global Configuration to set the relevant checkboxes and not
|
||||
# provide values for these settings.
|
||||
|
||||
BypassOrderPrecautions=
|
||||
|
||||
BypassBondWarning=
|
||||
|
||||
BypassNegativeYieldToWorstConfirmation=
|
||||
|
||||
BypassCalledBondWarning=
|
||||
|
||||
BypassSameActionPairTradeWarning=
|
||||
|
||||
BypassPriceBasedVolatilityRiskWarning=
|
||||
|
||||
BypassUSStocksMarketDataInSharesWarning=
|
||||
|
||||
BypassRedirectOrderWarning=
|
||||
|
||||
BypassNoOverfillProtectionPrecaution=
|
||||
|
||||
|
||||
# Market data size for US stocks - lots or shares
|
||||
# -----------------------------------------------
|
||||
#
|
||||
# Since IB introduced the option of market data for US stocks showing
|
||||
# bid, ask and last sizes in shares rather than lots, TWS and Gateway
|
||||
# display a dialog immediately after login notifying the user about
|
||||
# this and requiring user input before allowing market data to be
|
||||
# accessed. The user can request that the dialog not be shown again.
|
||||
#
|
||||
# It is recommended that the user should handle this dialog manually
|
||||
# rather than using these settings, which are provided for situations
|
||||
# where the user interface is not easily accessible, or where user
|
||||
# settings are not preserved between sessions (eg some Docker images).
|
||||
#
|
||||
# - If this setting is set to 'accept', the dialog will be handled
|
||||
# automatically and the option to not show it again will be
|
||||
# selected.
|
||||
#
|
||||
# Note that in this case, the only way to allow the dialog to be
|
||||
# displayed again is to manually enable the 'Bid, Ask and Last
|
||||
# Size Display Update' message in the 'Messages' section of the TWS
|
||||
# configuration dialog. So you should only use 'Accept' if you are
|
||||
# sure you really don't want the dialog to be displayed again, or
|
||||
# you have easy access to the user interface.
|
||||
#
|
||||
# - If set to 'defer', the dialog will be handled automatically (so
|
||||
# that market data will start), but the option to not show it again
|
||||
# will not be selected, and it will be shown again after the next
|
||||
# login.
|
||||
#
|
||||
# - If set to 'ignore', the user has to deal with the dialog manually.
|
||||
#
|
||||
# The default value is 'ignore'.
|
||||
#
|
||||
# Note if set to 'accept' or 'defer', TWS also automatically sets
|
||||
# the API settings checkbox labelled 'Send market data in lots for
|
||||
# US stocks for dual-mode API clients'. IBC cannot prevent this.
|
||||
# However you can change this immmediately by setting
|
||||
# SendMarketDataInLotsForUSstocks (see below) to 'no' .
|
||||
|
||||
AcceptBidAskLastSizeDisplayUpdateNotification=accept
|
||||
|
||||
|
||||
# This setting determines whether the API settings checkbox labelled
|
||||
# 'Send market data in lots for US stocks for dual-mode API clients'
|
||||
# is set or cleared. If set to 'yes', the checkbox is set. If set to
|
||||
# 'no' the checkbox is cleared. If defaulted, the checkbox is
|
||||
# unchanged.
|
||||
|
||||
SendMarketDataInLotsForUSstocks=
|
||||
|
||||
|
||||
# Trusted API Client IPs
|
||||
# ----------------------
|
||||
#
|
||||
# NB: THIS SETTING IS ONLY RELEVANT FOR THE GATEWAY, AND ONLY WHEN FIX=yes.
|
||||
# In all other cases it is ignored.
|
||||
#
|
||||
# This is a list of IP addresses separated by commas. API clients with IP
|
||||
# addresses in this list are able to connect to the API without Gateway
|
||||
# generating the 'Incoming connection' popup.
|
||||
#
|
||||
# Note that 127.0.0.1 is always permitted to connect, so do not include it
|
||||
# in this setting.
|
||||
|
||||
TrustedTwsApiClientIPs=
|
||||
|
||||
|
||||
# Reset Order ID Sequence
|
||||
# -----------------------
|
||||
#
|
||||
# The setting resets the order id sequence for orders submitted via the API, so
|
||||
# that the next invocation of the `NextValidId` API callback will return the
|
||||
# value 1. The reset occurs when TWS starts.
|
||||
#
|
||||
# Note that order ids are reset for all API clients, except those that have
|
||||
# outstanding (ie incomplete) orders: their order id sequence carries on as
|
||||
# before.
|
||||
#
|
||||
# Valid values are 'yes', 'true', 'false' and 'no'. The default is 'no'.
|
||||
|
||||
ResetOrderIdsAtStart=
|
||||
|
||||
|
||||
# This setting specifies IBC's action when TWS displays the dialog asking for
|
||||
# confirmation of a request to reset the API order id sequence.
|
||||
#
|
||||
# Note that the Gateway never displays this dialog, so this setting is ignored
|
||||
# for a Gateway session.
|
||||
#
|
||||
# Valid values consist of two strings separated by a solidus '/'. The first
|
||||
# value specifies the action to take when the order id reset request resulted
|
||||
# from setting ResetOrderIdsAtStart=yes. The second specifies the action to
|
||||
# take when the order id reset request is a result of the user clicking the
|
||||
# 'Reset API order ID sequence' button in the API configuration. Each value
|
||||
# must be one of the following:
|
||||
#
|
||||
# 'confirm'
|
||||
# order ids will be reset
|
||||
#
|
||||
# 'reject'
|
||||
# order ids will not be reset
|
||||
#
|
||||
# 'ignore'
|
||||
# IBC will ignore the dialog. The user must take action.
|
||||
#
|
||||
# The default setting is ignore/ignore
|
||||
|
||||
# Examples:
|
||||
#
|
||||
# 'confirm/reject' - confirm order id reset only if ResetOrderIdsAtStart=yes
|
||||
# and reject any user-initiated requests
|
||||
#
|
||||
# 'ignore/confirm' - user must decide what to do if ResetOrderIdsAtStart=yes
|
||||
# and confirm user-initiated requests
|
||||
#
|
||||
# 'reject/ignore' - reject order id reset if ResetOrderIdsAtStart=yes but
|
||||
# allow user to handle user-initiated requests
|
||||
|
||||
ConfirmOrderIdReset=
|
||||
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 4. TWS Auto-Logoff and Auto-Restart
|
||||
# =============================================================================
|
||||
#
|
||||
# TWS and Gateway insist on being restarted every day. Two alternative
|
||||
# automatic options are offered:
|
||||
#
|
||||
# - Auto-Logoff: at a specified time, TWS shuts down tidily, without
|
||||
# restarting.
|
||||
#
|
||||
# - Auto-Restart: at a specified time, TWS shuts down and then restarts
|
||||
# without the user having to re-autheticate.
|
||||
#
|
||||
# The normal way to configure the time at which this happens is via the Lock
|
||||
# and Exit section of the Configuration dialog. Once this time has been
|
||||
# configured in this way, the setting persists until the user changes it again.
|
||||
#
|
||||
# However, there are situations where there is no user available to do this
|
||||
# configuration, or where there is no persistent storage (for example some
|
||||
# Docker images). In such cases, the auto-restart or auto-logoff time can be
|
||||
# set whenever IBC starts with the settings below.
|
||||
#
|
||||
# The value, if specified, must be a time in HH:MM AM/PM format, for example
|
||||
# 08:00 AM or 10:00 PM. Note that there must be a single space between the
|
||||
# two parts of this value; also that midnight is "12:00 AM" and midday is
|
||||
# "12:00 PM".
|
||||
#
|
||||
# If no value is specified for either setting, the currently configured
|
||||
# settings will apply. If a value is supplied for one setting, the other
|
||||
# setting is cleared. If values are supplied for both settings, only the
|
||||
# auto-restart time is set, and the auto-logoff time is cleared.
|
||||
#
|
||||
# Note that for a normal TWS/Gateway installation with persistent storage
|
||||
# (for example on a desktop computer) the value will be persisted as if the
|
||||
# user had set it via the configuration dialog.
|
||||
#
|
||||
# If you choose to auto-restart, you should take note of the considerations
|
||||
# described at the link below. Note that where this information mentions
|
||||
# 'manual authentication', restarting IBC will do the job (IBKR does not
|
||||
# recognise the existence of IBC in its docuemntation).
|
||||
#
|
||||
# https://www.interactivebrokers.com/en/software/tws/twsguide.htm#usersguidebook/configuretws/auto_restart_info.htm
|
||||
#
|
||||
# If you use the "RESTART" command via the IBC command server, and IBC is
|
||||
# running any version of the Gateway (or a version of TWS earlier than 1018),
|
||||
# note that this will set the Auto-Restart time in Gateway/TWS's configuration
|
||||
# dialog to the time at which the restart actually happens (which may be up to
|
||||
# a minute after the RESTART command is issued). To prevent future auto-
|
||||
# restarts at this time, you must make sure you have set AutoLogoffTime or
|
||||
# AutoRestartTime to your desired value before running IBC. NB: this does not
|
||||
# apply to TWS from version 1018 onwards.
|
||||
|
||||
AutoLogoffTime=
|
||||
|
||||
AutoRestartTime=
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 5. TWS Tidy Closedown Time
|
||||
# =============================================================================
|
||||
#
|
||||
# Specifies a time at which TWS will close down tidily, with no restart.
|
||||
#
|
||||
# There is little reason to use this setting. It is similar to AutoLogoffTime,
|
||||
# but can include a day-of-the-week, whereas AutoLogoffTime and AutoRestartTime
|
||||
# apply every day. So for example you could use ClosedownAt in conjunction with
|
||||
# AutoRestartTime to shut down TWS on Friday evenings after the markets
|
||||
# close, without it running on Saturday as well.
|
||||
#
|
||||
# To tell IBC to tidily close TWS at a specified time every
|
||||
# day, set this value to <hh:mm>, for example:
|
||||
# ClosedownAt=22:00
|
||||
#
|
||||
# To tell IBC to tidily close TWS at a specified day and time
|
||||
# each week, set this value to <dayOfWeek hh:mm>, for example:
|
||||
# ClosedownAt=Friday 22:00
|
||||
#
|
||||
# Note that the day of the week must be specified using your
|
||||
# default locale. Also note that Java will only accept
|
||||
# characters encoded to ISO 8859-1 (Latin-1). This means that
|
||||
# if the day name in your default locale uses any non-Latin-1
|
||||
# characters you need to encode them using Unicode escapes
|
||||
# (see http://java.sun.com/docs/books/jls/third_edition/html/lexical.html#3.3
|
||||
# for details). For example, to tidily close TWS at 12:00 on
|
||||
# Saturday where the default locale is Simplified Chinese,
|
||||
# use the following:
|
||||
# #ClosedownAt=\u661F\u671F\u516D 12:00
|
||||
|
||||
ClosedownAt=
|
||||
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 6. Other TWS Settings
|
||||
# =============================================================================
|
||||
|
||||
# Accept Incoming Connection
|
||||
# --------------------------
|
||||
#
|
||||
# If set to 'accept', IBC automatically accepts incoming
|
||||
# API connection dialogs. If set to 'reject', IBC
|
||||
# automatically rejects incoming API connection dialogs. If
|
||||
# set to 'manual', the user must decide whether to accept or reject
|
||||
# incoming API connection dialogs. The default is 'manual'.
|
||||
# NB: it is recommended to set this to 'reject', and to explicitly
|
||||
# configure which IP addresses can connect to the API in TWS's API
|
||||
# configuration page, as this is much more secure (in this case, no
|
||||
# incoming API connection dialogs will occur for those IP addresses).
|
||||
|
||||
AcceptIncomingConnectionAction=reject
|
||||
|
||||
|
||||
# Allow Blind Trading
|
||||
# -------------------
|
||||
#
|
||||
# If you attempt to place an order for a contract for which
|
||||
# you have no market data subscription, TWS displays a dialog
|
||||
# to warn you against such blind trading.
|
||||
#
|
||||
# yes means the dialog is dismissed as though the user had
|
||||
# clicked the 'Ok' button: this means that you accept
|
||||
# the risk and want the order to be submitted.
|
||||
#
|
||||
# no means the dialog remains on display and must be
|
||||
# handled by the user.
|
||||
|
||||
AllowBlindTrading=no
|
||||
|
||||
|
||||
# Save Settings on a Schedule
|
||||
# ---------------------------
|
||||
#
|
||||
# You can tell TWS to automatically save its settings on a schedule
|
||||
# of your choosing. You can specify one or more specific times,
|
||||
# like this:
|
||||
#
|
||||
# SaveTwsSettingsAt=HH:MM [ HH:MM]...
|
||||
#
|
||||
# for example:
|
||||
# SaveTwsSettingsAt=08:00 12:30 17:30
|
||||
#
|
||||
# Or you can specify an interval at which settings are to be saved,
|
||||
# optionally starting at a specific time and continuing until another
|
||||
# time, like this:
|
||||
#
|
||||
#SaveTwsSettingsAt=Every n [{mins | hours}] [hh:mm] [hh:mm]
|
||||
#
|
||||
# where the first hh:mm is the start time and the second is the end
|
||||
# time. If you don't specify the end time, settings are saved regularly
|
||||
# from the start time till midnight. If you don't specify the start time.
|
||||
# settings are saved regularly all day, beginning at 00:00. Note that
|
||||
# settings will always be saved at the end time, even if that is not
|
||||
# exactly one interval later than the previous time. If neither 'mins'
|
||||
# nor 'hours' is specified, 'mins' is assumed. Examples:
|
||||
#
|
||||
# To save every 30 minutes all day starting at 00:00
|
||||
#SaveTwsSettingsAt=Every 30
|
||||
#SaveTwsSettingsAt=Every 30 mins
|
||||
#
|
||||
# To save every hour starting at 08:00 and ending at midnight
|
||||
#SaveTwsSettingsAt=Every 1 hours 08:00
|
||||
#SaveTwsSettingsAt=Every 1 hours 08:00 00:00
|
||||
#
|
||||
# To save every 90 minutes starting at 08:00 up to and including 17:43
|
||||
#SaveTwsSettingsAt=Every 90 08:00 17:43
|
||||
|
||||
SaveTwsSettingsAt=
|
||||
|
||||
|
||||
# Confirm Crypto Currency Orders Automatically
|
||||
# --------------------------------------------
|
||||
#
|
||||
# When you place an order for a cryptocurrency contract, a dialog is displayed
|
||||
# asking you to confirm that you want to place the order, and notifying you
|
||||
# that you are placing an order to trade cryptocurrency with Paxos, a New York
|
||||
# limited trust company, and not at Interactive Brokers.
|
||||
#
|
||||
# transmit means that the order will be placed automatically, and the
|
||||
# dialog will then be closed
|
||||
#
|
||||
# cancel means that the order will not be placed, and the dialog will
|
||||
# then be closed
|
||||
#
|
||||
# manual means that IBC will take no action and the user must deal
|
||||
# with the dialog
|
||||
|
||||
ConfirmCryptoCurrencyOrders=transmit
|
||||
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 7. Settings Specific to Indian Versions of TWS
|
||||
# =============================================================================
|
||||
|
||||
# Indian versions of TWS may display a password expiry
|
||||
# notification dialog and a NSE Compliance dialog. These can be
|
||||
# dismissed by setting the following to yes. By default the
|
||||
# password expiry notice is not dismissed, but the NSE Compliance
|
||||
# notice is dismissed.
|
||||
|
||||
# Warning: setting DismissPasswordExpiryWarning=yes will mean
|
||||
# you will not be notified when your password is about to expire.
|
||||
# You must then take other measures to ensure that your password
|
||||
# is changed within the expiry period, otherwise IBC will
|
||||
# not be able to login successfully.
|
||||
|
||||
DismissPasswordExpiryWarning=no
|
||||
DismissNSEComplianceNotice=yes
|
||||
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 8. IBC Command Server Settings
|
||||
# =============================================================================
|
||||
|
||||
# Do NOT CHANGE THE FOLLOWING SETTINGS unless you
|
||||
# intend to issue commands to IBC (for example
|
||||
# using telnet). Note that these settings have nothing to
|
||||
# do with running programs that use the TWS API.
|
||||
|
||||
# Command Server Port Number
|
||||
# --------------------------
|
||||
#
|
||||
# The port number that IBC listens on for commands
|
||||
# such as "STOP". DO NOT set this to the port number
|
||||
# used for TWS API connections.
|
||||
#
|
||||
# The convention is to use 7462 for this port,
|
||||
# but it must be set to a different value from any other
|
||||
# IBC instance that might run at the same time.
|
||||
#
|
||||
# The default value is 0, which tells IBC not to start
|
||||
# the command server
|
||||
|
||||
#CommandServerPort=7462
|
||||
CommandServerPort=0
|
||||
|
||||
|
||||
# Permitted Command Sources
|
||||
# -------------------------
|
||||
#
|
||||
# A comma separated list of IP addresses, or host names,
|
||||
# which are allowed addresses for sending commands to
|
||||
# IBC. Commands can always be sent from the
|
||||
# same host as IBC is running on.
|
||||
|
||||
ControlFrom=
|
||||
|
||||
|
||||
# Address for Receiving Commands
|
||||
# ------------------------------
|
||||
#
|
||||
# Specifies the IP address on which the Command Server
|
||||
# is to listen. For a multi-homed host, this can be used
|
||||
# to specify that connection requests are only to be
|
||||
# accepted on the specified address. The default is to
|
||||
# accept connection requests on all local addresses.
|
||||
|
||||
BindAddress=
|
||||
|
||||
|
||||
# Command Prompt
|
||||
# --------------
|
||||
#
|
||||
# The specified string is output by the server when
|
||||
# the connection is first opened and after the completion
|
||||
# of each command. This can be useful if sending commands
|
||||
# using an interactive program such as telnet. The default
|
||||
# is that no prompt is output.
|
||||
# For example:
|
||||
#
|
||||
# CommandPrompt=>
|
||||
|
||||
CommandPrompt=
|
||||
|
||||
|
||||
# Suppress Command Server Info Messages
|
||||
# -------------------------------------
|
||||
#
|
||||
# Some commands can return intermediate information about
|
||||
# their progress. This setting controls whether such
|
||||
# information is sent. The default is that such information
|
||||
# is not sent.
|
||||
|
||||
SuppressInfoMessages=yes
|
||||
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 9. Diagnostic Settings
|
||||
# =============================================================================
|
||||
#
|
||||
# IBC can log information about the structure of windows
|
||||
# displayed by TWS. This information is useful when adding
|
||||
# new features to IBC or when behaviour is not as expected.
|
||||
#
|
||||
# The logged information shows the hierarchical organisation
|
||||
# of all the components of the window, and includes the
|
||||
# current values of text boxes and labels.
|
||||
#
|
||||
# Note that this structure logging has a small performance
|
||||
# impact, and depending on the settings can cause the logfile
|
||||
# size to be significantly increased. It is therefore
|
||||
# recommended that the LogStructureWhen setting be set to
|
||||
# 'never' (the default) unless there is a specific reason
|
||||
# that this information is needed.
|
||||
|
||||
|
||||
# Scope of Structure Logging
|
||||
# --------------------------
|
||||
#
|
||||
# The LogStructureScope setting indicates which windows are
|
||||
# eligible for structure logging:
|
||||
#
|
||||
# - (default value) if set to 'known', only windows that
|
||||
# IBC recognizes are eligible - these are windows that
|
||||
# IBC has some interest in monitoring, usually to take
|
||||
# some action on the user's behalf;
|
||||
#
|
||||
# - if set to 'unknown', only windows that IBC does not
|
||||
# recognize are eligible. Most windows displayed by
|
||||
# TWS fall into this category;
|
||||
#
|
||||
# - if set to 'untitled', only windows that IBC does not
|
||||
# recognize and that have no title are eligible. These
|
||||
# are usually message boxes or similar small windows,
|
||||
#
|
||||
# - if set to 'all', then every window displayed by TWS
|
||||
# is eligible.
|
||||
#
|
||||
|
||||
LogStructureScope=known
|
||||
|
||||
|
||||
# When to Log Window Structure
|
||||
# ----------------------------
|
||||
#
|
||||
# The LogStructureWhen setting specifies the circumstances
|
||||
# when eligible TWS windows have their structure logged:
|
||||
#
|
||||
# - if set to 'open' or 'yes' or 'true', IBC logs the
|
||||
# structure of an eligible window the first time it
|
||||
# is encountered;
|
||||
#
|
||||
# - if set to 'openclose', the structure is logged every
|
||||
# time an eligible window is opened or closed;
|
||||
#
|
||||
# - if set to 'activate', the structure is logged every
|
||||
# time an eligible window is made active;
|
||||
#
|
||||
# - (default value) if set to 'never' or 'no' or 'false',
|
||||
# structure information is never logged.
|
||||
#
|
||||
|
||||
LogStructureWhen=never
|
||||
|
||||
|
||||
# DEPRECATED SETTING
|
||||
# ------------------
|
||||
#
|
||||
# LogComponents - THIS SETTING WILL BE REMOVED IN A FUTURE
|
||||
# RELEASE
|
||||
#
|
||||
# If LogComponents is set to any value, this is equivalent
|
||||
# to setting LogStructureWhen to that same value and
|
||||
# LogStructureScope to 'all': the actual values of those
|
||||
# settings are ignored. The default is that the values
|
||||
# of LogStructureScope and LogStructureWhen are honoured.
|
||||
|
||||
#LogComponents=
|
||||
|
||||
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
[IBGateway]
|
||||
ApiOnly=true
|
||||
LocalServerPort=4002
|
||||
# NOTE: must be set if using IBC's "reject" mode
|
||||
TrustedIPs=127.0.0.1
|
||||
; RemoteHostOrderRouting=ndc1.ibllc.com
|
||||
; WriteDebug=true
|
||||
; RemotePortOrderRouting=4001
|
||||
; useRemoteSettings=false
|
||||
; tradingMode=p
|
||||
; Steps=8
|
||||
; colorPalletName=dark
|
||||
|
||||
# window geo, this may be useful for sending `xdotool` commands?
|
||||
; MainWindow.Width=1986
|
||||
; screenHeight=3960
|
||||
|
||||
|
||||
[Logon]
|
||||
Locale=en
|
||||
# most markets are oriented around this zone
|
||||
# so might as well hard code it.
|
||||
TimeZone=America/New_York
|
||||
UseSSL=true
|
||||
displayedproxymsg=1
|
||||
os_titlebar=true
|
||||
s3store=true
|
||||
useRemoteSettings=false
|
||||
|
||||
[Communication]
|
||||
ctciAutoEncrypt=true
|
||||
Region=usr
|
||||
; Peer=cdc1.ibllc.com:4001
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
[IBGateway]
|
||||
ApiOnly=true
|
||||
LocalServerPort=4001
|
||||
# NOTE: must be set if using IBC's "reject" mode
|
||||
TrustedIPs=127.0.0.1
|
||||
; RemoteHostOrderRouting=ndc1.ibllc.com
|
||||
; WriteDebug=true
|
||||
; RemotePortOrderRouting=4001
|
||||
; useRemoteSettings=false
|
||||
; tradingMode=p
|
||||
; Steps=8
|
||||
; colorPalletName=dark
|
||||
|
||||
# window geo, this may be useful for sending `xdotool` commands?
|
||||
; MainWindow.Width=1986
|
||||
; screenHeight=3960
|
||||
|
||||
|
||||
[Logon]
|
||||
Locale=en
|
||||
# most markets are oriented around this zone
|
||||
# so might as well hard code it.
|
||||
TimeZone=America/New_York
|
||||
UseSSL=true
|
||||
displayedproxymsg=1
|
||||
os_titlebar=true
|
||||
s3store=true
|
||||
useRemoteSettings=false
|
||||
|
||||
[Communication]
|
||||
ctciAutoEncrypt=true
|
||||
Region=usr
|
||||
; Peer=cdc1.ibllc.com:4001
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
#!/bin/sh
|
||||
# start vnc server and listen for connections
|
||||
# on port specced in `$VNC_SERVER_PORT`
|
||||
|
||||
x11vnc \
|
||||
-listen 127.0.0.1 \
|
||||
-allow 127.0.0.1 \
|
||||
-rfbport "${VNC_SERVER_PORT}" \
|
||||
-display :1 \
|
||||
-forever \
|
||||
-shared \
|
||||
-bg \
|
||||
-nowf \
|
||||
-noxdamage \
|
||||
-noxfixes \
|
||||
-no6 \
|
||||
-noipv6 \
|
||||
|
||||
|
||||
# -nowcr \
|
||||
# TODO: can't use this because of ``asyncvnc`` issue:
|
||||
# https://github.com/barneygale/asyncvnc/issues/1
|
||||
# -passwd 'ibcansmbz'
|
||||
|
||||
# XXX: optional graphics caching flags that seem to rekt the overlay
|
||||
# of the 2 gw windows? When running a single gateway
|
||||
# this seems to maybe optimize some memory usage?
|
||||
# -ncache_cr \
|
||||
# -ncache \
|
||||
|
||||
# NOTE: this will prevent logs from going to the console.
|
||||
# -logappend /var/log/x11vnc.log \
|
||||
|
||||
# where to start allocating ports
|
||||
# -autoport "${VNC_SERVER_PORT}" \
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
### NOTE this is likely out of date given it was written some
|
||||
(years) time ago by a user that has since not really partaken in
|
||||
contributing since.
|
||||
|
||||
install for tinas
|
||||
*****************
|
||||
for windows peeps you can start by installing all the prerequisite software:
|
||||
|
||||
- install git with all default settings - https://git-scm.com/download/win
|
||||
- install anaconda all default settings - https://www.anaconda.com/products/individual
|
||||
- install microsoft build tools (check the box for Desktop development for C++, you might be able to uncheck some optional downloads) - https://visualstudio.microsoft.com/visual-cpp-build-tools/
|
||||
- install visual studio code default settings - https://code.visualstudio.com/download
|
||||
|
||||
|
||||
then, `crack a conda shell`_ and run the following commands::
|
||||
|
||||
mkdir code # create code directory
|
||||
cd code # change directory to code
|
||||
git clone https://github.com/pikers/piker.git # downloads piker installation package from github
|
||||
cd piker # change directory to piker
|
||||
|
||||
conda create -n pikonda # creates conda environment named pikonda
|
||||
conda activate pikonda # activates pikonda
|
||||
|
||||
conda install -c conda-forge python-levenshtein # in case it is not already installed
|
||||
conda install pip # may already be installed
|
||||
pip # will show if pip is installed
|
||||
|
||||
pip install -e . -r requirements.txt # install piker in editable mode
|
||||
|
||||
test Piker to see if it is working::
|
||||
|
||||
piker -b binance chart btcusdt.binance # formatting for loading a chart
|
||||
piker -b kraken -b binance chart xbtusdt.kraken
|
||||
piker -b kraken -b binance -b ib chart qqq.nasdaq.ib
|
||||
piker -b ib chart tsla.nasdaq.ib
|
||||
|
||||
potential error::
|
||||
|
||||
FileNotFoundError: [Errno 2] No such file or directory: 'C:\\Users\\user\\AppData\\Roaming\\piker\\brokers.toml'
|
||||
|
||||
solution:
|
||||
|
||||
- navigate to file directory above (may be different on your machine, location should be listed in the error code)
|
||||
- copy and paste file from 'C:\\Users\\user\\code\\data/brokers.toml' or create a blank file using notepad at the location above
|
||||
|
||||
Visual Studio Code setup:
|
||||
|
||||
- now that piker is installed we can set up vscode as the default terminal for running piker and editing the code
|
||||
- open Visual Studio Code
|
||||
- file --> Add Folder to Workspace --> C:\Users\user\code\piker (adds piker directory where all piker files are located)
|
||||
- file --> Save Workspace As --> save it wherever you want and call it whatever you want, this is going to be your default workspace for running and editing piker code
|
||||
- ctrl + shift + p --> start typing Python: Select Interpetter --> when the option comes up select it --> Select at the workspace level --> select the one that shows ('pikonda')
|
||||
- change the default terminal to cmd.exe instead of powershell (default)
|
||||
- now when you create a new terminal VScode should automatically activate you conda env so that piker can be run as the first command after a new terminal is created
|
||||
|
||||
also, try out fancyzones as part of powertoyz for a decent tiling windows manager to manage all the cool new software you are going to be running.
|
||||
|
||||
.. _conda installed: https://
|
||||
.. _C++ build toolz: https://
|
||||
.. _crack a conda shell: https://
|
||||
.. _vscode: https://
|
||||
|
||||
.. link to the tina guide
|
||||
.. _setup a coolio tiled wm console: https://
|
||||
|
||||
provider support
|
||||
****************
|
||||
for live data feeds the in-progress set of supported brokers is:
|
||||
|
||||
- IB_ via ``ib_insync``, also see our `container docs`_
|
||||
- binance_ and kraken_ for crypto over their public websocket API
|
||||
- questrade_ (ish) which comes with effectively free L1
|
||||
|
||||
coming soon...
|
||||
|
||||
- webull_ via the reverse engineered public API
|
||||
- yahoo via yliveticker_
|
||||
|
||||
if you want your broker supported and they have an API let us know.
|
||||
|
||||
.. _IB: https://interactivebrokers.github.io/tws-api/index.html
|
||||
.. _container docs: https://github.com/pikers/piker/tree/master/dockering/ib
|
||||
.. _questrade: https://www.questrade.com/api/documentation
|
||||
.. _kraken: https://www.kraken.com/features/api#public-market-data
|
||||
.. _binance: https://github.com/pikers/piker/pull/182
|
||||
.. _webull: https://github.com/tedchou12/webull
|
||||
.. _yliveticker: https://github.com/yahoofinancelive/yliveticker
|
||||
.. _coinbase: https://docs.pro.coinbase.com/#websocket-feed
|
||||
|
||||
|
||||
|
|
@ -1,263 +0,0 @@
|
|||
# from pprint import pformat
|
||||
from functools import partial
|
||||
from decimal import Decimal
|
||||
from typing import Callable
|
||||
|
||||
import tractor
|
||||
import trio
|
||||
from uuid import uuid4
|
||||
|
||||
from piker.service import maybe_open_pikerd
|
||||
from piker.accounting import dec_digits
|
||||
from piker.clearing import (
|
||||
open_ems,
|
||||
OrderClient,
|
||||
)
|
||||
# TODO: we should probably expose these top level in this subsys?
|
||||
from piker.clearing._messages import (
|
||||
Order,
|
||||
Status,
|
||||
BrokerdPosition,
|
||||
)
|
||||
from piker.data import (
|
||||
iterticks,
|
||||
Flume,
|
||||
open_feed,
|
||||
Feed,
|
||||
# ShmArray,
|
||||
)
|
||||
|
||||
|
||||
# TODO: handle other statuses:
|
||||
# - fills, errors, and position tracking
|
||||
async def wait_for_order_status(
|
||||
trades_stream: tractor.MsgStream,
|
||||
oid: str,
|
||||
expect_status: str,
|
||||
|
||||
) -> tuple[
|
||||
list[Status],
|
||||
list[BrokerdPosition],
|
||||
]:
|
||||
'''
|
||||
Wait for a specific order status for a given dialog, return msg flow
|
||||
up to that msg and any position update msgs in a tuple.
|
||||
|
||||
'''
|
||||
# Wait for position message before moving on to verify flow(s)
|
||||
# for the multi-order position entry/exit.
|
||||
status_msgs: list[Status] = []
|
||||
pp_msgs: list[BrokerdPosition] = []
|
||||
|
||||
async for msg in trades_stream:
|
||||
match msg:
|
||||
case {'name': 'position'}:
|
||||
ppmsg = BrokerdPosition(**msg)
|
||||
pp_msgs.append(ppmsg)
|
||||
|
||||
case {
|
||||
'name': 'status',
|
||||
}:
|
||||
msg = Status(**msg)
|
||||
status_msgs.append(msg)
|
||||
|
||||
# if we get the status we expect then return all
|
||||
# collected msgs from the brokerd dialog up to the
|
||||
# exected msg B)
|
||||
if (
|
||||
msg.resp == expect_status
|
||||
and msg.oid == oid
|
||||
):
|
||||
return status_msgs, pp_msgs
|
||||
|
||||
|
||||
async def bot_main():
|
||||
'''
|
||||
Boot the piker runtime, open an ems connection, submit
|
||||
and process orders statuses in real-time.
|
||||
|
||||
'''
|
||||
ll: str = 'info'
|
||||
|
||||
# open an order ctl client, live data feed, trio nursery for
|
||||
# spawning an order trailer task
|
||||
client: OrderClient
|
||||
trades_stream: tractor.MsgStream
|
||||
feed: Feed
|
||||
accounts: list[str]
|
||||
|
||||
fqme: str = 'btcusdt.usdtm.perp.binance'
|
||||
|
||||
async with (
|
||||
|
||||
# TODO: do this implicitly inside `open_ems()` ep below?
|
||||
# init and sync actor-service runtime
|
||||
maybe_open_pikerd(
|
||||
loglevel=ll,
|
||||
debug_mode=True,
|
||||
|
||||
),
|
||||
open_ems(
|
||||
fqme,
|
||||
mode='paper', # {'live', 'paper'}
|
||||
# mode='live', # for real-brokerd submissions
|
||||
loglevel=ll,
|
||||
|
||||
) as (
|
||||
client, # OrderClient
|
||||
trades_stream, # tractor.MsgStream startup_pps,
|
||||
_, # positions
|
||||
accounts,
|
||||
_, # dialogs
|
||||
),
|
||||
|
||||
open_feed(
|
||||
fqmes=[fqme],
|
||||
loglevel=ll,
|
||||
|
||||
# TODO: if you want to throttle via downsampling
|
||||
# how many tick updates your feed received on
|
||||
# quote streams B)
|
||||
# tick_throttle=10,
|
||||
) as feed,
|
||||
|
||||
trio.open_nursery() as tn,
|
||||
):
|
||||
assert accounts
|
||||
print(f'Loaded binance accounts: {accounts}')
|
||||
|
||||
flume: Flume = feed.flumes[fqme]
|
||||
min_tick = Decimal(flume.mkt.price_tick)
|
||||
min_tick_digits: int = dec_digits(min_tick)
|
||||
price_round: Callable = partial(
|
||||
round,
|
||||
ndigits=min_tick_digits,
|
||||
)
|
||||
|
||||
quote_stream: trio.abc.ReceiveChannel = feed.streams['binance']
|
||||
|
||||
|
||||
# always keep live limit 0.003% below last
|
||||
# clearing price
|
||||
clear_margin: float = 0.9997
|
||||
|
||||
async def trailer(
|
||||
order: Order,
|
||||
):
|
||||
# ref shm OHLCV array history, if you want
|
||||
# s_shm: ShmArray = flume.rt_shm
|
||||
# m_shm: ShmArray = flume.hist_shm
|
||||
|
||||
# NOTE: if you wanted to frame ticks by type like the
|
||||
# the quote throttler does.. and this is probably
|
||||
# faster in terms of getting the latest tick type
|
||||
# embedded value of interest?
|
||||
# from piker.data._sampling import frame_ticks
|
||||
|
||||
async for quotes in quote_stream:
|
||||
for fqme, quote in quotes.items():
|
||||
# print(
|
||||
# f'{quote["symbol"]} -> {quote["ticks"]}\n'
|
||||
# f'last 1s OHLC:\n{s_shm.array[-1]}\n'
|
||||
# f'last 1m OHLC:\n{m_shm.array[-1]}\n'
|
||||
# )
|
||||
|
||||
for tick in iterticks(
|
||||
quote,
|
||||
reverse=True,
|
||||
# types=('trade', 'dark_trade'), # defaults
|
||||
):
|
||||
|
||||
await client.update(
|
||||
uuid=order.oid,
|
||||
price=price_round(
|
||||
clear_margin
|
||||
*
|
||||
tick['price']
|
||||
),
|
||||
)
|
||||
msgs, pps = await wait_for_order_status(
|
||||
trades_stream,
|
||||
order.oid,
|
||||
'open'
|
||||
)
|
||||
# if multiple clears per quote just
|
||||
# skip to the next quote?
|
||||
break
|
||||
|
||||
|
||||
# get first live quote to be sure we submit the initial
|
||||
# live buy limit low enough that it doesn't clear due to
|
||||
# a stale initial price from the data feed layer!
|
||||
first_ask_price: float | None = None
|
||||
async for quotes in quote_stream:
|
||||
for fqme, quote in quotes.items():
|
||||
# print(quote['symbol'])
|
||||
for tick in iterticks(quote, types=('ask')):
|
||||
first_ask_price: float = tick['price']
|
||||
break
|
||||
|
||||
if first_ask_price:
|
||||
break
|
||||
|
||||
# setup order dialog via first msg
|
||||
price: float = price_round(
|
||||
clear_margin
|
||||
*
|
||||
first_ask_price,
|
||||
)
|
||||
|
||||
# compute a 1k USD sized pos
|
||||
size: float = round(1e3/price, ndigits=3)
|
||||
|
||||
order = Order(
|
||||
|
||||
# docs on how this all works, bc even i'm not entirely
|
||||
# clear XD. also we probably want to figure out how to
|
||||
# offer both the paper engine running and the brokerd
|
||||
# order ctl tasks with the ems choosing which stream to
|
||||
# route msgs on given the account value!
|
||||
account='paper', # use built-in paper clearing engine and .accounting
|
||||
# account='binance.usdtm', # for live binance futes
|
||||
|
||||
oid=str(uuid4()),
|
||||
exec_mode='live', # {'dark', 'live', 'alert'}
|
||||
|
||||
action='buy', # TODO: remove this from our schema?
|
||||
|
||||
size=size,
|
||||
symbol=fqme,
|
||||
price=price,
|
||||
brokers=['binance'],
|
||||
)
|
||||
await client.send(order)
|
||||
|
||||
msgs, pps = await wait_for_order_status(
|
||||
trades_stream,
|
||||
order.oid,
|
||||
'open',
|
||||
)
|
||||
|
||||
assert not pps
|
||||
assert msgs[-1].oid == order.oid
|
||||
|
||||
# start "trailer task" which tracks rt quote stream
|
||||
tn.start_soon(trailer, order)
|
||||
|
||||
try:
|
||||
# wait for ctl-c from user..
|
||||
await trio.sleep_forever()
|
||||
except KeyboardInterrupt:
|
||||
# cancel the open order
|
||||
await client.cancel(order.oid)
|
||||
|
||||
msgs, pps = await wait_for_order_status(
|
||||
trades_stream,
|
||||
order.oid,
|
||||
'canceled'
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
trio.run(bot_main)
|
||||
138
flake.lock
138
flake.lock
|
|
@ -1,138 +0,0 @@
|
|||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1689068808,
|
||||
"narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils_2": {
|
||||
"inputs": {
|
||||
"systems": "systems_2"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1689068808,
|
||||
"narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nix-github-actions": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"poetry2nix",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1688870561,
|
||||
"narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=",
|
||||
"owner": "nix-community",
|
||||
"repo": "nix-github-actions",
|
||||
"rev": "165b1650b753316aa7f1787f3005a8d2da0f5301",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "nix-github-actions",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1692174805,
|
||||
"narHash": "sha256-xmNPFDi/AUMIxwgOH/IVom55Dks34u1g7sFKKebxUm0=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "caac0eb6bdcad0b32cb2522e03e4002c8975c62e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"poetry2nix": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils_2",
|
||||
"nix-github-actions": "nix-github-actions",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1692048894,
|
||||
"narHash": "sha256-cDw03rso2V4CDc3Mll0cHN+ztzysAvdI8pJ7ybbz714=",
|
||||
"ref": "refs/heads/pyqt6",
|
||||
"rev": "b059ad4c3051f45d6c912e17747aae37a9ec1544",
|
||||
"revCount": 2276,
|
||||
"type": "git",
|
||||
"url": "file:///home/lord_fomo/repos/poetry2nix"
|
||||
},
|
||||
"original": {
|
||||
"type": "git",
|
||||
"url": "file:///home/lord_fomo/repos/poetry2nix"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"poetry2nix": "poetry2nix"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems_2": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
180
flake.nix
180
flake.nix
|
|
@ -1,180 +0,0 @@
|
|||
# NOTE: to convert to a poetry2nix env like this here are the
|
||||
# steps:
|
||||
# - install poetry in your system nix config
|
||||
# - convert the repo to use poetry using `poetry init`:
|
||||
# https://python-poetry.org/docs/basic-usage/#initialising-a-pre-existing-project
|
||||
# - then manually ensuring all deps are converted over:
|
||||
# - add this file to the repo and commit it
|
||||
# -
|
||||
|
||||
# GROKin tips:
|
||||
# - CLI eps are (ostensibly) added via an `entry_points.txt`:
|
||||
# - https://packaging.python.org/en/latest/specifications/entry-points/#file-format
|
||||
# - https://github.com/nix-community/poetry2nix/blob/master/editable.nix#L49
|
||||
{
|
||||
description = "piker: trading gear for hackers (pkged with poetry2nix)";
|
||||
|
||||
inputs.flake-utils.url = "github:numtide/flake-utils";
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
|
||||
# see https://github.com/nix-community/poetry2nix/tree/master#api
|
||||
inputs.poetry2nix = {
|
||||
# url = "github:nix-community/poetry2nix";
|
||||
# url = "github:K900/poetry2nix/qt5-explicit-deps";
|
||||
url = "/home/lord_fomo/repos/poetry2nix";
|
||||
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
|
||||
outputs = {
|
||||
self,
|
||||
nixpkgs,
|
||||
flake-utils,
|
||||
poetry2nix,
|
||||
}:
|
||||
# TODO: build cross-OS and use the `${system}` var thingy..
|
||||
flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
# use PWD as sources
|
||||
projectDir = ./.;
|
||||
pyproject = ./pyproject.toml;
|
||||
poetrylock = ./poetry.lock;
|
||||
|
||||
# TODO: port to 3.11 and support both versions?
|
||||
python = "python3.10";
|
||||
|
||||
# for more functions and examples.
|
||||
# inherit
|
||||
# (poetry2nix.legacyPackages.${system})
|
||||
# mkPoetryApplication;
|
||||
# pkgs = nixpkgs.legacyPackages.${system};
|
||||
|
||||
pkgs = nixpkgs.legacyPackages.x86_64-linux;
|
||||
lib = pkgs.lib;
|
||||
p2npkgs = poetry2nix.legacyPackages.x86_64-linux;
|
||||
|
||||
# define all pkg overrides per dep, see edgecases.md:
|
||||
# https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md
|
||||
# TODO: add these into the json file:
|
||||
# https://github.com/nix-community/poetry2nix/blob/master/overrides/build-systems.json
|
||||
pypkgs-build-requirements = {
|
||||
asyncvnc = [ "setuptools" ];
|
||||
eventkit = [ "setuptools" ];
|
||||
ib-insync = [ "setuptools" "flake8" ];
|
||||
msgspec = [ "setuptools"];
|
||||
pdbp = [ "setuptools" ];
|
||||
pyqt6-sip = [ "setuptools" ];
|
||||
tabcompleter = [ "setuptools" ];
|
||||
tractor = [ "setuptools" ];
|
||||
tricycle = [ "setuptools" ];
|
||||
trio-typing = [ "setuptools" ];
|
||||
trio-util = [ "setuptools" ];
|
||||
xonsh = [ "setuptools" ];
|
||||
};
|
||||
|
||||
# auto-generate override entries
|
||||
p2n-overrides = p2npkgs.defaultPoetryOverrides.extend (self: super:
|
||||
builtins.mapAttrs (package: build-requirements:
|
||||
(builtins.getAttr package super).overridePythonAttrs (old: {
|
||||
buildInputs = (
|
||||
old.buildInputs or [ ]
|
||||
) ++ (
|
||||
builtins.map (
|
||||
pkg: if builtins.isString pkg then builtins.getAttr pkg super else pkg
|
||||
) build-requirements
|
||||
);
|
||||
})
|
||||
) pypkgs-build-requirements
|
||||
);
|
||||
|
||||
# override some ahead-of-time compiled extensions
|
||||
# to be built with their wheels.
|
||||
ahot_overrides = p2n-overrides.extend(
|
||||
final: prev: {
|
||||
|
||||
# llvmlite = prev.llvmlite.override {
|
||||
# preferWheel = false;
|
||||
# };
|
||||
|
||||
# TODO: get this workin with p2n and nixpkgs..
|
||||
# pyqt6 = prev.pyqt6.override {
|
||||
# preferWheel = true;
|
||||
# };
|
||||
|
||||
# NOTE: this DOESN'T work atm but after a fix
|
||||
# to poetry2nix, it will and actually this line
|
||||
# won't be needed - thanks @k900:
|
||||
# https://github.com/nix-community/poetry2nix/pull/1257
|
||||
pyqt5 = prev.pyqt5.override {
|
||||
# withWebkit = false;
|
||||
preferWheel = true;
|
||||
};
|
||||
|
||||
# see PR from @k900:
|
||||
# https://github.com/nix-community/poetry2nix/pull/1257
|
||||
# pyqt5-qt5 = prev.pyqt5-qt5.override {
|
||||
# withWebkit = false;
|
||||
# preferWheel = true;
|
||||
# };
|
||||
|
||||
# TODO: patch in an override for polars to build
|
||||
# from src! See the details likely needed from
|
||||
# the cryptography entry:
|
||||
# https://github.com/nix-community/poetry2nix/blob/master/overrides/default.nix#L426-L435
|
||||
polars = prev.polars.override {
|
||||
preferWheel = true;
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
# WHY!? -> output-attrs that `nix develop` scans for:
|
||||
# https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-develop.html#flake-output-attributes
|
||||
in
|
||||
rec {
|
||||
packages = {
|
||||
# piker = poetry2nix.legacyPackages.x86_64-linux.mkPoetryEditablePackage {
|
||||
# editablePackageSources = { piker = ./piker; };
|
||||
|
||||
piker = p2npkgs.mkPoetryApplication {
|
||||
projectDir = projectDir;
|
||||
|
||||
# SEE ABOVE for auto-genned input set, override
|
||||
# buncha deps with extras.. like `setuptools` mostly.
|
||||
# TODO: maybe propose a patch to p2n to show that you
|
||||
# can even do this in the edgecases docs?
|
||||
overrides = ahot_overrides;
|
||||
|
||||
# XXX: won't work on llvmlite..
|
||||
# preferWheels = true;
|
||||
};
|
||||
};
|
||||
|
||||
# devShells.default = pkgs.mkShell {
|
||||
# projectDir = projectDir;
|
||||
# python = "python3.10";
|
||||
# overrides = ahot_overrides;
|
||||
# inputsFrom = [ self.packages.x86_64-linux.piker ];
|
||||
# packages = packages;
|
||||
# # packages = [ poetry2nix.packages.${system}.poetry ];
|
||||
# };
|
||||
|
||||
# TODO: grok the difference here..
|
||||
# - avoid re-cloning git repos on every develop entry..
|
||||
# - ideally allow hacking on the src code of some deps
|
||||
# (tractor, pyqtgraph, tomlkit, etc.) WITHOUT having to
|
||||
# re-install them every time a change is made.
|
||||
# - boot a usable xonsh inside the poetry virtualenv when
|
||||
# defined via a custom entry point?
|
||||
devShells.default = p2npkgs.mkPoetryEnv {
|
||||
# env = p2npkgs.mkPoetryEnv {
|
||||
projectDir = projectDir;
|
||||
python = pkgs.python310;
|
||||
overrides = ahot_overrides;
|
||||
editablePackageSources = packages;
|
||||
# piker = "./";
|
||||
# tractor = "../tractor/";
|
||||
# }; # wut?
|
||||
};
|
||||
}
|
||||
); # end of .outputs scope
|
||||
}
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
Notes to self
|
||||
=============
|
||||
chicken scratch we shan't forget, consider this staging
|
||||
for actual feature issues on wtv git wrapper-provider we're
|
||||
using (no we shan't stick with GH long term likely).
|
||||
|
||||
|
||||
cool chart features
|
||||
-------------------
|
||||
- allow right-click to spawn shell with current in view
|
||||
data passed to the new process via ``msgpack-numpy``.
|
||||
- expand OHLC datum to lower time frame.
|
||||
- auto-highlight current time range on tick feed
|
||||
|
||||
|
||||
features from IB charting
|
||||
-------------------------
|
||||
- vlm diffing from ticks and compare when bar arrives from historical
|
||||
- should help isolate dark vlm / trades
|
||||
|
||||
|
||||
chart ux ideas
|
||||
--------------
|
||||
- hotkey to zoom to order intersection (horizontal line) with previous
|
||||
price levels (+ some margin obvs).
|
||||
- L1 "lines" (queue size repr) should normalize to some fixed x width
|
||||
such that when levels with more vlm appear other smaller levels are
|
||||
scaled down giving an immediate indication of the liquidity diff.
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers.
|
||||
# Copyright 2020-eternity Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright 2020-eternity Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
@ -14,14 +14,14 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
"""
|
||||
piker: trading gear for hackers.
|
||||
|
||||
'''
|
||||
from .service import open_piker_runtime
|
||||
from .data.feed import open_feed
|
||||
"""
|
||||
import msgpack # noqa
|
||||
|
||||
__all__ = [
|
||||
'open_piker_runtime',
|
||||
'open_feed',
|
||||
]
|
||||
# TODO: remove this now right?
|
||||
import msgpack_numpy
|
||||
|
||||
# patch msgpack for numpy arrays
|
||||
msgpack_numpy.patch()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,66 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Async utils no one seems to have built into a core lib (yet).
|
||||
"""
|
||||
from typing import AsyncContextManager
|
||||
from collections import OrderedDict
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
|
||||
def async_lifo_cache(maxsize=128):
|
||||
"""Async ``cache`` with a LIFO policy.
|
||||
|
||||
Implemented my own since no one else seems to have
|
||||
a standard. I'll wait for the smarter people to come
|
||||
up with one, but until then...
|
||||
"""
|
||||
cache = OrderedDict()
|
||||
|
||||
def decorator(fn):
|
||||
|
||||
async def wrapper(*args):
|
||||
key = args
|
||||
try:
|
||||
return cache[key]
|
||||
except KeyError:
|
||||
if len(cache) >= maxsize:
|
||||
# discard last added new entry
|
||||
cache.popitem()
|
||||
|
||||
# do it
|
||||
cache[key] = await fn(*args)
|
||||
return cache[key]
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def _just_none():
|
||||
# noop -> skip entering context
|
||||
yield None
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def maybe_with_if(
|
||||
predicate: bool,
|
||||
context: AsyncContextManager,
|
||||
) -> AsyncContextManager:
|
||||
async with context if predicate else _just_none() as output:
|
||||
yield output
|
||||
|
|
@ -1,99 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Cacheing apis and toolz.
|
||||
|
||||
'''
|
||||
|
||||
from collections import OrderedDict
|
||||
from typing import (
|
||||
Awaitable,
|
||||
Callable,
|
||||
ParamSpec,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from .log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
T = TypeVar("T")
|
||||
P = ParamSpec("P")
|
||||
|
||||
|
||||
# TODO: move this to `tractor.trionics`..
|
||||
# - egs. to replicate for tests: https://github.com/aio-libs/async-lru#usage
|
||||
# - their suite as well:
|
||||
# https://github.com/aio-libs/async-lru/tree/master/tests
|
||||
# - asked trio_util about it too:
|
||||
# https://github.com/groove-x/trio-util/issues/21
|
||||
def async_lifo_cache(
|
||||
maxsize=128,
|
||||
|
||||
# NOTE: typing style was learned from:
|
||||
# https://stackoverflow.com/a/71132186
|
||||
) -> Callable[
|
||||
Callable[P, Awaitable[T]],
|
||||
Callable[
|
||||
Callable[P, Awaitable[T]],
|
||||
Callable[P, Awaitable[T]],
|
||||
],
|
||||
]:
|
||||
'''
|
||||
Async ``cache`` with a LIFO policy.
|
||||
|
||||
Implemented my own since no one else seems to have
|
||||
a standard. I'll wait for the smarter people to come
|
||||
up with one, but until then...
|
||||
|
||||
NOTE: when decorating, due to this simple/naive implementation, you
|
||||
MUST call the decorator like,
|
||||
|
||||
.. code:: python
|
||||
|
||||
@async_lifo_cache()
|
||||
async def cache_target():
|
||||
|
||||
'''
|
||||
cache = OrderedDict()
|
||||
|
||||
def decorator(
|
||||
fn: Callable[P, Awaitable[T]],
|
||||
) -> Callable[P, Awaitable[T]]:
|
||||
|
||||
async def decorated(
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> T:
|
||||
key = args
|
||||
try:
|
||||
return cache[key]
|
||||
except KeyError:
|
||||
if len(cache) >= maxsize:
|
||||
# discard last added new entry
|
||||
cache.popitem()
|
||||
|
||||
# call underlying
|
||||
cache[key] = await fn(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
return cache[key]
|
||||
|
||||
return decorated
|
||||
|
||||
return decorator
|
||||
|
|
@ -0,0 +1,449 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Structured, daemon tree service management.
|
||||
|
||||
"""
|
||||
from typing import Optional, Union, Callable, Any
|
||||
from contextlib import asynccontextmanager
|
||||
from collections import defaultdict
|
||||
|
||||
from pydantic import BaseModel
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
|
||||
from .log import get_logger, get_console_log
|
||||
from .brokers import get_brokermod
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_root_dname = 'pikerd'
|
||||
_tractor_kwargs: dict[str, Any] = {
|
||||
# use a different registry addr then tractor's default
|
||||
'arbiter_addr': ('127.0.0.1', 6116),
|
||||
}
|
||||
_root_modules = [
|
||||
__name__,
|
||||
'piker.clearing._ems',
|
||||
'piker.clearing._client',
|
||||
]
|
||||
|
||||
|
||||
class Services(BaseModel):
|
||||
|
||||
actor_n: tractor._trionics.ActorNursery
|
||||
service_n: trio.Nursery
|
||||
debug_mode: bool # tractor sub-actor debug mode flag
|
||||
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
async def start_service_task(
|
||||
self,
|
||||
name: str,
|
||||
portal: tractor.Portal,
|
||||
target: Callable,
|
||||
**kwargs,
|
||||
|
||||
) -> (trio.CancelScope, tractor.Context):
|
||||
'''
|
||||
Open a context in a service sub-actor, add to a stack
|
||||
that gets unwound at ``pikerd`` teardown.
|
||||
|
||||
This allows for allocating long-running sub-services in our main
|
||||
daemon and explicitly controlling their lifetimes.
|
||||
|
||||
'''
|
||||
async def open_context_in_task(
|
||||
task_status: TaskStatus[
|
||||
trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> Any:
|
||||
|
||||
with trio.CancelScope() as cs:
|
||||
|
||||
async with portal.open_context(
|
||||
target,
|
||||
**kwargs,
|
||||
|
||||
) as (ctx, first):
|
||||
|
||||
# unblock once the remote context has started
|
||||
task_status.started((cs, first))
|
||||
|
||||
# wait on any context's return value
|
||||
ctx_res = await ctx.result()
|
||||
log.info(
|
||||
f'`pikerd` service {name} started with value {ctx_res}'
|
||||
)
|
||||
|
||||
# wait on any error from the sub-actor
|
||||
# NOTE: this will block indefinitely until cancelled
|
||||
# either by error from the target context function or
|
||||
# by being cancelled here by the surroundingn cancel
|
||||
# scope
|
||||
return await (portal.result(), ctx_res)
|
||||
|
||||
cs, first = await self.service_n.start(open_context_in_task)
|
||||
|
||||
# store the cancel scope and portal for later cancellation or
|
||||
# retstart if needed.
|
||||
self.service_tasks[name] = (cs, portal)
|
||||
|
||||
return cs, first
|
||||
|
||||
async def cancel_service(
|
||||
self,
|
||||
name: str,
|
||||
|
||||
) -> Any:
|
||||
|
||||
log.info(f'Cancelling `pikerd` service {name}')
|
||||
cs, portal = self.service_tasks[name]
|
||||
cs.cancel()
|
||||
return await portal.cancel_actor()
|
||||
|
||||
|
||||
_services: Optional[Services] = None
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_pikerd(
|
||||
start_method: str = 'trio',
|
||||
loglevel: Optional[str] = None,
|
||||
|
||||
# XXX: you should pretty much never want debug mode
|
||||
# for data daemons when running in production.
|
||||
debug_mode: bool = False,
|
||||
|
||||
) -> Optional[tractor._portal.Portal]:
|
||||
'''
|
||||
Start a root piker daemon who's lifetime extends indefinitely
|
||||
until cancelled.
|
||||
|
||||
A root actor nursery is created which can be used to create and keep
|
||||
alive underling services (see below).
|
||||
|
||||
'''
|
||||
global _services
|
||||
assert _services is None
|
||||
|
||||
# XXX: this may open a root actor as well
|
||||
async with (
|
||||
tractor.open_root_actor(
|
||||
|
||||
# passed through to ``open_root_actor``
|
||||
arbiter_addr=_tractor_kwargs['arbiter_addr'],
|
||||
name=_root_dname,
|
||||
loglevel=loglevel,
|
||||
debug_mode=debug_mode,
|
||||
start_method=start_method,
|
||||
|
||||
# TODO: eventually we should be able to avoid
|
||||
# having the root have more then permissions to
|
||||
# spawn other specialized daemons I think?
|
||||
enable_modules=_root_modules,
|
||||
) as _,
|
||||
|
||||
tractor.open_nursery() as actor_nursery,
|
||||
):
|
||||
async with trio.open_nursery() as service_nursery:
|
||||
|
||||
# # setup service mngr singleton instance
|
||||
# async with AsyncExitStack() as stack:
|
||||
|
||||
# assign globally for future daemon/task creation
|
||||
_services = Services(
|
||||
actor_n=actor_nursery,
|
||||
service_n=service_nursery,
|
||||
debug_mode=debug_mode,
|
||||
)
|
||||
|
||||
yield _services
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def maybe_open_runtime(
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
"""
|
||||
Start the ``tractor`` runtime (a root actor) if none exists.
|
||||
|
||||
"""
|
||||
settings = _tractor_kwargs
|
||||
settings.update(kwargs)
|
||||
|
||||
if not tractor.current_actor(err_on_no_runtime=False):
|
||||
async with tractor.open_root_actor(
|
||||
loglevel=loglevel,
|
||||
**settings,
|
||||
):
|
||||
yield
|
||||
else:
|
||||
yield
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def maybe_open_pikerd(
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> Union[tractor._portal.Portal, Services]:
|
||||
"""If no ``pikerd`` daemon-root-actor can be found start it and
|
||||
yield up (we should probably figure out returning a portal to self
|
||||
though).
|
||||
|
||||
"""
|
||||
if loglevel:
|
||||
get_console_log(loglevel)
|
||||
|
||||
# subtle, we must have the runtime up here or portal lookup will fail
|
||||
async with maybe_open_runtime(loglevel, **kwargs):
|
||||
|
||||
async with tractor.find_actor(_root_dname) as portal:
|
||||
# assert portal is not None
|
||||
if portal is not None:
|
||||
yield portal
|
||||
return
|
||||
|
||||
# presume pikerd role since no daemon could be found at
|
||||
# configured address
|
||||
async with open_pikerd(
|
||||
|
||||
loglevel=loglevel,
|
||||
debug_mode=kwargs.get('debug_mode', False),
|
||||
|
||||
) as _:
|
||||
# in the case where we're starting up the
|
||||
# tractor-piker runtime stack in **this** process
|
||||
# we return no portal to self.
|
||||
yield None
|
||||
|
||||
|
||||
# brokerd enabled modules
|
||||
_data_mods = [
|
||||
'piker.brokers.core',
|
||||
'piker.brokers.data',
|
||||
'piker.data',
|
||||
'piker.data.feed',
|
||||
'piker.data._sampling'
|
||||
]
|
||||
|
||||
|
||||
class Brokerd:
|
||||
locks = defaultdict(trio.Lock)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def maybe_spawn_daemon(
|
||||
|
||||
service_name: str,
|
||||
service_task_target: Callable,
|
||||
spawn_args: dict[str, Any],
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor.Portal:
|
||||
"""
|
||||
If no ``service_name`` daemon-actor can be found,
|
||||
spawn one in a local subactor and return a portal to it.
|
||||
|
||||
If this function is called from a non-pikerd actor, the
|
||||
spawned service will persist as long as pikerd does or
|
||||
it is requested to be cancelled.
|
||||
|
||||
This can be seen as a service starting api for remote-actor
|
||||
clients.
|
||||
|
||||
"""
|
||||
if loglevel:
|
||||
get_console_log(loglevel)
|
||||
|
||||
# serialize access to this section to avoid
|
||||
# 2 or more tasks racing to create a daemon
|
||||
lock = Brokerd.locks[service_name]
|
||||
await lock.acquire()
|
||||
|
||||
# attach to existing brokerd if possible
|
||||
async with tractor.find_actor(service_name) as portal:
|
||||
if portal is not None:
|
||||
lock.release()
|
||||
yield portal
|
||||
return
|
||||
|
||||
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
||||
# pikerd is not live we now become the root of the
|
||||
# process tree
|
||||
async with maybe_open_pikerd(
|
||||
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
|
||||
) as pikerd_portal:
|
||||
|
||||
if pikerd_portal is None:
|
||||
# we are the root and thus are `pikerd`
|
||||
# so spawn the target service directly by calling
|
||||
# the provided target routine.
|
||||
# XXX: this assumes that the target is well formed and will
|
||||
# do the right things to setup both a sub-actor **and** call
|
||||
# the ``_Services`` api from above to start the top level
|
||||
# service task for that actor.
|
||||
await service_task_target(**spawn_args)
|
||||
|
||||
else:
|
||||
# tell the remote `pikerd` to start the target,
|
||||
# the target can't return a non-serializable value
|
||||
# since it is expected that service startingn is
|
||||
# non-blocking and the target task will persist running
|
||||
# on `pikerd` after the client requesting it's start
|
||||
# disconnects.
|
||||
await pikerd_portal.run(
|
||||
service_task_target,
|
||||
**spawn_args,
|
||||
)
|
||||
|
||||
async with tractor.wait_for_actor(service_name) as portal:
|
||||
lock.release()
|
||||
yield portal
|
||||
|
||||
|
||||
async def spawn_brokerd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: Optional[str] = None,
|
||||
**tractor_kwargs,
|
||||
|
||||
) -> bool:
|
||||
|
||||
log.info(f'Spawning {brokername} broker daemon')
|
||||
|
||||
brokermod = get_brokermod(brokername)
|
||||
dname = f'brokerd.{brokername}'
|
||||
|
||||
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
||||
tractor_kwargs.update(extra_tractor_kwargs)
|
||||
|
||||
global _services
|
||||
assert _services
|
||||
|
||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||
# actor nursery
|
||||
portal = await _services.actor_n.start_actor(
|
||||
dname,
|
||||
enable_modules=_data_mods + [brokermod.__name__],
|
||||
loglevel=loglevel,
|
||||
debug_mode=_services.debug_mode,
|
||||
**tractor_kwargs
|
||||
)
|
||||
|
||||
# non-blocking setup of brokerd service nursery
|
||||
from .data import _setup_persistent_brokerd
|
||||
|
||||
await _services.start_service_task(
|
||||
dname,
|
||||
portal,
|
||||
_setup_persistent_brokerd,
|
||||
brokername=brokername,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def maybe_spawn_brokerd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor.Portal:
|
||||
'''Helper to spawn a brokerd service.
|
||||
|
||||
'''
|
||||
async with maybe_spawn_daemon(
|
||||
|
||||
f'brokerd.{brokername}',
|
||||
service_task_target=spawn_brokerd,
|
||||
spawn_args={'brokername': brokername, 'loglevel': loglevel},
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
|
||||
) as portal:
|
||||
yield portal
|
||||
|
||||
|
||||
async def spawn_emsd(
|
||||
|
||||
loglevel: Optional[str] = None,
|
||||
**extra_tractor_kwargs
|
||||
|
||||
) -> bool:
|
||||
"""
|
||||
Start the clearing engine under ``pikerd``.
|
||||
|
||||
"""
|
||||
log.info('Spawning emsd')
|
||||
|
||||
global _services
|
||||
assert _services
|
||||
|
||||
portal = await _services.actor_n.start_actor(
|
||||
'emsd',
|
||||
enable_modules=[
|
||||
'piker.clearing._ems',
|
||||
'piker.clearing._client',
|
||||
],
|
||||
loglevel=loglevel,
|
||||
debug_mode=_services.debug_mode, # set by pikerd flag
|
||||
**extra_tractor_kwargs
|
||||
)
|
||||
|
||||
# non-blocking setup of clearing service
|
||||
from .clearing._ems import _setup_persistent_emsd
|
||||
|
||||
await _services.start_service_task(
|
||||
'emsd',
|
||||
portal,
|
||||
_setup_persistent_emsd,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def maybe_open_emsd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor._portal.Portal: # noqa
|
||||
|
||||
async with maybe_spawn_daemon(
|
||||
|
||||
'emsd',
|
||||
service_task_target=spawn_emsd,
|
||||
spawn_args={'loglevel': loglevel},
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
|
||||
) as portal:
|
||||
yield portal
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
@ -14,21 +14,30 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Data layer module commons.
|
||||
"""
|
||||
Profiling wrappers for internal libs.
|
||||
|
||||
'''
|
||||
from functools import partial
|
||||
"""
|
||||
import time
|
||||
from functools import wraps
|
||||
|
||||
from ..log import (
|
||||
get_logger,
|
||||
get_console_log,
|
||||
)
|
||||
subsys: str = 'piker.data'
|
||||
_pg_profile: bool = False
|
||||
|
||||
log = get_logger(subsys)
|
||||
|
||||
get_console_log = partial(
|
||||
get_console_log,
|
||||
name=subsys,
|
||||
)
|
||||
def pg_profile_enabled() -> bool:
|
||||
global _pg_profile
|
||||
return _pg_profile
|
||||
|
||||
|
||||
def timeit(fn):
|
||||
@wraps(fn)
|
||||
def wrapper(*args, **kwargs):
|
||||
t = time.time()
|
||||
res = fn(*args, **kwargs)
|
||||
print(
|
||||
'%s.%s: %.4f sec'
|
||||
% (fn.__module__, fn.__qualname__, time.time() - t)
|
||||
)
|
||||
return res
|
||||
|
||||
return wrapper
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
.accounting
|
||||
-----------
|
||||
A subsystem for transaction processing, storage and historical
|
||||
measurement.
|
||||
|
||||
|
||||
.pnl
|
||||
----
|
||||
BEP, the break even price: the price at which liquidating
|
||||
a remaining position results in a zero PnL since the position was
|
||||
"opened" in the destination asset.
|
||||
|
||||
PPU: price-per-unit: the "average cost" (in cumulative mean terms)
|
||||
of the "entry" transactions which "make a position larger"; taking
|
||||
a profit relative to this price means that you will "make more
|
||||
profit then made prior" since the position was opened.
|
||||
|
|
@ -1,107 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
"Accounting for degens": count dem numberz that tracks how much you got
|
||||
for tendiez.
|
||||
|
||||
'''
|
||||
from ..log import get_logger
|
||||
|
||||
from .calc import (
|
||||
iter_by_dt,
|
||||
)
|
||||
from ._ledger import (
|
||||
Transaction,
|
||||
TransactionLedger,
|
||||
open_trade_ledger,
|
||||
)
|
||||
from ._pos import (
|
||||
Account,
|
||||
load_account,
|
||||
load_account_from_ledger,
|
||||
open_pps,
|
||||
open_account,
|
||||
Position,
|
||||
)
|
||||
from ._mktinfo import (
|
||||
Asset,
|
||||
dec_digits,
|
||||
digits_to_dec,
|
||||
MktPair,
|
||||
Symbol,
|
||||
unpack_fqme,
|
||||
_derivs as DerivTypes,
|
||||
)
|
||||
from ._allocate import (
|
||||
mk_allocator,
|
||||
Allocator,
|
||||
)
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
__all__ = [
|
||||
'Account',
|
||||
'Allocator',
|
||||
'Asset',
|
||||
'MktPair',
|
||||
'Position',
|
||||
'Symbol',
|
||||
'Transaction',
|
||||
'TransactionLedger',
|
||||
'dec_digits',
|
||||
'digits_to_dec',
|
||||
'iter_by_dt',
|
||||
'load_account',
|
||||
'load_account_from_ledger',
|
||||
'mk_allocator',
|
||||
'open_account',
|
||||
'open_pps',
|
||||
'open_trade_ledger',
|
||||
'unpack_fqme',
|
||||
'DerivTypes',
|
||||
]
|
||||
|
||||
|
||||
def get_likely_pair(
|
||||
src: str,
|
||||
dst: str,
|
||||
bs_mktid: str,
|
||||
|
||||
) -> str | None:
|
||||
'''
|
||||
Attempt to get the likely trading pair matching a given destination
|
||||
asset `dst: str`.
|
||||
|
||||
'''
|
||||
try:
|
||||
src_name_start: str = bs_mktid.rindex(src)
|
||||
except (
|
||||
ValueError, # substr not found
|
||||
):
|
||||
# TODO: handle nested positions..(i.e.
|
||||
# positions where the src fiat was used to
|
||||
# buy some other dst which was furhter used
|
||||
# to buy another dst..)
|
||||
# log.warning(
|
||||
# f'No src fiat {src} found in {bs_mktid}?'
|
||||
# )
|
||||
return None
|
||||
|
||||
likely_dst: str = bs_mktid[:src_name_start]
|
||||
if likely_dst == dst:
|
||||
return bs_mktid
|
||||
|
|
@ -1,289 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Position allocation logic and protocols.
|
||||
|
||||
'''
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from bidict import bidict
|
||||
|
||||
from ._pos import Position
|
||||
from . import MktPair
|
||||
from piker.types import Struct
|
||||
|
||||
|
||||
_size_units = bidict({
|
||||
'currency': '$ size',
|
||||
'units': '# units',
|
||||
# TODO: but we'll need a `<brokermod>.get_accounts()` or something
|
||||
# 'percent_of_port': '% of port',
|
||||
})
|
||||
SizeUnit = Enum(
|
||||
'SizeUnit',
|
||||
_size_units,
|
||||
)
|
||||
|
||||
|
||||
class Allocator(Struct):
|
||||
|
||||
mkt: MktPair
|
||||
|
||||
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
||||
# "sizes"
|
||||
# disti_weight: str = 'uniform'
|
||||
|
||||
units_limit: float
|
||||
currency_limit: float
|
||||
slots: int
|
||||
account: Optional[str] = 'paper'
|
||||
|
||||
_size_units: bidict[str, Optional[str]] = _size_units
|
||||
|
||||
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||
# a default at startup by passing in a `dict` but yet you can set
|
||||
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||
# unintuitive garbage.
|
||||
_size_unit: str = 'currency'
|
||||
|
||||
@property
|
||||
def size_unit(self) -> str:
|
||||
return self._size_unit
|
||||
|
||||
@size_unit.setter
|
||||
def size_unit(self, v: str) -> Optional[str]:
|
||||
if v not in _size_units:
|
||||
v = _size_units.inverse[v]
|
||||
|
||||
assert v in _size_units
|
||||
self._size_unit = v
|
||||
return v
|
||||
|
||||
def step_sizes(
|
||||
self,
|
||||
) -> (float, float):
|
||||
'''
|
||||
Return the units size for each unit type as a tuple.
|
||||
|
||||
'''
|
||||
slots = self.slots
|
||||
return (
|
||||
self.units_limit / slots,
|
||||
self.currency_limit / slots,
|
||||
)
|
||||
|
||||
def limit(self) -> float:
|
||||
if self.size_unit == 'currency':
|
||||
return self.currency_limit
|
||||
else:
|
||||
return self.units_limit
|
||||
|
||||
def limit_info(self) -> tuple[str, float]:
|
||||
return self.size_unit, self.limit()
|
||||
|
||||
def next_order_info(
|
||||
self,
|
||||
|
||||
# we only need a startup size for exit calcs, we can then
|
||||
# determine how large slots should be if the initial pp size was
|
||||
# larger then the current live one, and the live one is smaller
|
||||
# then the initial config settings.
|
||||
startup_pp: Position,
|
||||
live_pp: Position,
|
||||
price: float,
|
||||
action: str,
|
||||
|
||||
) -> dict:
|
||||
'''
|
||||
Generate order request info for the "next" submittable order
|
||||
depending on position / order entry config.
|
||||
|
||||
'''
|
||||
mkt: MktPair = self.mkt
|
||||
ld: int = mkt.size_tick_digits
|
||||
|
||||
size_unit = self.size_unit
|
||||
live_size = live_pp.cumsize
|
||||
abs_live_size = abs(live_size)
|
||||
abs_startup_size = abs(startup_pp.cumsize)
|
||||
|
||||
u_per_slot, currency_per_slot = self.step_sizes()
|
||||
|
||||
if size_unit == 'units':
|
||||
slot_size: float = u_per_slot
|
||||
l_sub_pp: float = self.units_limit - abs_live_size
|
||||
|
||||
elif size_unit == 'currency':
|
||||
live_cost_basis: float = abs_live_size * live_pp.ppu
|
||||
slot_size: float = currency_per_slot / price
|
||||
l_sub_pp: float = (self.currency_limit - live_cost_basis) / price
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Not valid size unit '{size_unit}'"
|
||||
)
|
||||
|
||||
# an entry (adding-to or starting a pp)
|
||||
if (
|
||||
live_size == 0
|
||||
or (
|
||||
action == 'buy'
|
||||
and live_size > 0
|
||||
)
|
||||
or (
|
||||
action == 'sell'
|
||||
and live_size < 0
|
||||
)
|
||||
):
|
||||
order_size = min(
|
||||
slot_size,
|
||||
max(l_sub_pp, 0),
|
||||
)
|
||||
|
||||
# an exit (removing-from or going to net-zero pp)
|
||||
else:
|
||||
# when exiting a pp we always try to slot the position
|
||||
# in the instrument's units, since doing so in a derived
|
||||
# size measure (eg. currency value, percent of port) would
|
||||
# result in a mis-mapping of slots sizes in unit terms
|
||||
# (i.e. it would take *more* slots to exit at a profit and
|
||||
# *less* slots to exit at a loss).
|
||||
pp_size = max(abs_startup_size, abs_live_size)
|
||||
slotted_pp = pp_size / self.slots
|
||||
|
||||
if size_unit == 'currency':
|
||||
# compute the "projected" limit's worth of units at the
|
||||
# current pp (weighted) price:
|
||||
slot_size = currency_per_slot / live_pp.ppu
|
||||
|
||||
else:
|
||||
slot_size = u_per_slot
|
||||
|
||||
# TODO: ensure that the limit can never be set **lower**
|
||||
# then the current pp size? It should be configured
|
||||
# correctly at startup right?
|
||||
|
||||
# if our position is greater then our limit setting
|
||||
# we'll want to use slot sizes which are larger then what
|
||||
# the limit would normally determine.
|
||||
order_size = max(slotted_pp, slot_size)
|
||||
|
||||
if (
|
||||
abs_live_size < slot_size
|
||||
|
||||
# NOTE: front/back "loading" heurstic:
|
||||
# if the remaining pp is in between 0-1.5x a slot's
|
||||
# worth, dump the whole position in this last exit
|
||||
# therefore conducting so called "back loading" but
|
||||
# **without** going past a net-zero pp. if the pp is
|
||||
# > 1.5x a slot size, then front load: exit a slot's and
|
||||
# expect net-zero to be acquired on the final exit.
|
||||
or slot_size < pp_size < round((1.5*slot_size), ndigits=ld)
|
||||
or (
|
||||
|
||||
# underlying requires discrete (int) units (eg. stocks)
|
||||
# and thus our slot size (based on our limit) would
|
||||
# exit a fractional unit's worth so, presuming we aren't
|
||||
# supporting a fractional-units-style broker, we need
|
||||
# exit the final unit.
|
||||
ld == 0
|
||||
and abs_live_size == 1
|
||||
)
|
||||
):
|
||||
order_size = abs_live_size
|
||||
|
||||
slots_used = 1.0 # the default uniform policy
|
||||
if order_size < slot_size:
|
||||
# compute a fractional slots size to display
|
||||
slots_used = self.slots_used(
|
||||
Position(
|
||||
mkt=mkt,
|
||||
bs_mktid=mkt.bs_mktid,
|
||||
)
|
||||
)
|
||||
|
||||
# TODO: render an actual ``Executable`` type here?
|
||||
return {
|
||||
'size': abs(round(order_size, ndigits=ld)),
|
||||
'size_digits': ld,
|
||||
|
||||
# TODO: incorporate multipliers for relevant derivatives
|
||||
'fiat_size': round(order_size * price, ndigits=2),
|
||||
'slots_used': slots_used,
|
||||
|
||||
# update line LHS label with account name
|
||||
'account': self.account,
|
||||
}
|
||||
|
||||
def slots_used(
|
||||
self,
|
||||
pp: Position,
|
||||
|
||||
) -> float:
|
||||
'''
|
||||
Calc and return the number of slots used by this ``Position``.
|
||||
|
||||
'''
|
||||
abs_pp_size = abs(pp.cumsize)
|
||||
|
||||
if self.size_unit == 'currency':
|
||||
# live_currency_size = size or (abs_pp_size * pp.ppu)
|
||||
live_currency_size = abs_pp_size * pp.ppu
|
||||
prop = live_currency_size / self.currency_limit
|
||||
|
||||
else:
|
||||
# return (size or abs_pp_size) / alloc.units_limit
|
||||
prop = abs_pp_size / self.units_limit
|
||||
|
||||
# TODO: REALLY need a way to show partial slots..
|
||||
# for now we round at the midway point between slots
|
||||
return round(prop * self.slots)
|
||||
|
||||
|
||||
def mk_allocator(
|
||||
|
||||
mkt: MktPair,
|
||||
startup_pp: Position,
|
||||
|
||||
# default allocation settings
|
||||
defaults: dict[str, float] = {
|
||||
'account': None, # select paper by default
|
||||
# 'size_unit': 'currency',
|
||||
'units_limit': 400,
|
||||
'currency_limit': 5e3,
|
||||
'slots': 4,
|
||||
},
|
||||
**kwargs,
|
||||
|
||||
) -> Allocator:
|
||||
|
||||
if kwargs:
|
||||
defaults.update(kwargs)
|
||||
|
||||
# load and retreive user settings for default allocations
|
||||
# ``config.toml``
|
||||
user_def = {
|
||||
'currency_limit': 6e3,
|
||||
'slots': 6,
|
||||
}
|
||||
defaults.update(user_def)
|
||||
|
||||
return Allocator(
|
||||
mkt=mkt,
|
||||
**defaults,
|
||||
)
|
||||
|
|
@ -1,421 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Trade and transaction ledger processing.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from collections import UserDict
|
||||
from contextlib import contextmanager as cm
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from types import ModuleType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Generator,
|
||||
Literal,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
from pendulum import (
|
||||
DateTime,
|
||||
)
|
||||
import tomli_w # for fast ledger writing
|
||||
|
||||
from piker.types import Struct
|
||||
from piker import config
|
||||
from ..log import get_logger
|
||||
from .calc import (
|
||||
iter_by_dt,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..data._symcache import (
|
||||
SymbologyCache,
|
||||
)
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
TxnType = Literal[
|
||||
'clear',
|
||||
'transfer',
|
||||
|
||||
# TODO: see https://github.com/pikers/piker/issues/510
|
||||
# 'split',
|
||||
# 'rename',
|
||||
# 'resize',
|
||||
# 'removal',
|
||||
]
|
||||
|
||||
|
||||
class Transaction(Struct, frozen=True):
|
||||
|
||||
# NOTE: this is a unified acronym also used in our `MktPair`
|
||||
# and can stand for any of a
|
||||
# "fully qualified <blank> endpoint":
|
||||
# - "market" in the case of financial trades
|
||||
# (btcusdt.spot.binance).
|
||||
# - "merkel (tree)" aka a blockchain system "wallet tranfers"
|
||||
# (btc.blockchain)
|
||||
# - "money" for tradtitional (digital databases)
|
||||
# *bank accounts* (usd.swift, eur.sepa)
|
||||
fqme: str
|
||||
|
||||
tid: str | int # unique transaction id
|
||||
size: float
|
||||
price: float
|
||||
cost: float # commisions or other additional costs
|
||||
dt: DateTime
|
||||
|
||||
# the "event type" in terms of "market events" see above and
|
||||
# https://github.com/pikers/piker/issues/510
|
||||
etype: TxnType = 'clear'
|
||||
|
||||
# TODO: we can drop this right since we
|
||||
# can instead expect the backend to provide this
|
||||
# via the `MktPair`?
|
||||
expiry: DateTime | None = None
|
||||
|
||||
# (optional) key-id defined by the broker-service backend which
|
||||
# ensures the instrument-symbol market key for this record is unique
|
||||
# in the "their backend/system" sense; i.e. this uid for the market
|
||||
# as defined (internally) in some namespace defined by the broker
|
||||
# service.
|
||||
bs_mktid: str | int | None = None
|
||||
|
||||
def to_dict(
|
||||
self,
|
||||
**kwargs,
|
||||
) -> dict:
|
||||
dct: dict[str, Any] = super().to_dict(**kwargs)
|
||||
|
||||
# ensure we use a pendulum formatted
|
||||
# ISO style str here!@
|
||||
dct['dt'] = str(self.dt)
|
||||
|
||||
return dct
|
||||
|
||||
|
||||
class TransactionLedger(UserDict):
|
||||
'''
|
||||
Very simple ``dict`` wrapper + ``pathlib.Path`` handle to
|
||||
a TOML formatted transaction file for enabling file writes
|
||||
dynamically whilst still looking exactly like a ``dict`` from the
|
||||
outside.
|
||||
|
||||
'''
|
||||
# NOTE: see `open_trade_ledger()` for defaults, this should
|
||||
# never be constructed manually!
|
||||
def __init__(
|
||||
self,
|
||||
ledger_dict: dict,
|
||||
file_path: Path,
|
||||
account: str,
|
||||
mod: ModuleType, # broker mod
|
||||
tx_sort: Callable,
|
||||
symcache: SymbologyCache,
|
||||
|
||||
) -> None:
|
||||
self.account: str = account
|
||||
self.file_path: Path = file_path
|
||||
self.mod: ModuleType = mod
|
||||
self.tx_sort: Callable = tx_sort
|
||||
|
||||
self._symcache: SymbologyCache = symcache
|
||||
|
||||
# any added txns we keep in that form for meta-data
|
||||
# gathering purposes
|
||||
self._txns: dict[str, Transaction] = {}
|
||||
|
||||
super().__init__(ledger_dict)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f'TransactionLedger: {len(self)}\n'
|
||||
f'{pformat(list(self.data))}'
|
||||
)
|
||||
|
||||
@property
|
||||
def symcache(self) -> SymbologyCache:
|
||||
'''
|
||||
Read-only ref to backend's ``SymbologyCache``.
|
||||
|
||||
'''
|
||||
return self._symcache
|
||||
|
||||
def update_from_t(
|
||||
self,
|
||||
t: Transaction,
|
||||
) -> None:
|
||||
'''
|
||||
Given an input `Transaction`, cast to `dict` and update
|
||||
from it's transaction id.
|
||||
|
||||
'''
|
||||
self.data[t.tid] = t.to_dict()
|
||||
self._txns[t.tid] = t
|
||||
|
||||
def iter_txns(
|
||||
self,
|
||||
symcache: SymbologyCache | None = None,
|
||||
|
||||
) -> Generator[
|
||||
Transaction,
|
||||
None,
|
||||
None,
|
||||
]:
|
||||
'''
|
||||
Deliver trades records in ``(key: str, t: Transaction)``
|
||||
form via generator.
|
||||
|
||||
'''
|
||||
symcache = symcache or self._symcache
|
||||
|
||||
if self.account == 'paper':
|
||||
from piker.clearing import _paper_engine
|
||||
norm_trade: Callable = partial(
|
||||
_paper_engine.norm_trade,
|
||||
brokermod=self.mod,
|
||||
)
|
||||
|
||||
else:
|
||||
norm_trade: Callable = self.mod.norm_trade
|
||||
|
||||
# datetime-sort and pack into txs
|
||||
for tid, txdict in self.tx_sort(self.data.items()):
|
||||
txn: Transaction = norm_trade(
|
||||
tid,
|
||||
txdict,
|
||||
pairs=symcache.pairs,
|
||||
symcache=symcache,
|
||||
)
|
||||
yield txn
|
||||
|
||||
def to_txns(
|
||||
self,
|
||||
symcache: SymbologyCache | None = None,
|
||||
|
||||
) -> dict[str, Transaction]:
|
||||
'''
|
||||
Return entire output from ``.iter_txns()`` in a ``dict``.
|
||||
|
||||
'''
|
||||
txns: dict[str, Transaction] = {}
|
||||
for t in self.iter_txns(symcache=symcache):
|
||||
|
||||
if not t:
|
||||
log.warning(f'{self.mod.name}:{self.account} TXN is -> {t}')
|
||||
continue
|
||||
|
||||
txns[t.tid] = t
|
||||
|
||||
return txns
|
||||
|
||||
def write_config(self) -> None:
|
||||
'''
|
||||
Render the self.data ledger dict to its TOML file form.
|
||||
|
||||
ALWAYS order datetime sorted!
|
||||
|
||||
'''
|
||||
is_paper: bool = self.account == 'paper'
|
||||
|
||||
symcache: SymbologyCache = self._symcache
|
||||
towrite: dict[str, Any] = {}
|
||||
for tid, txdict in self.tx_sort(self.data.copy()):
|
||||
# write blank-str expiry for non-expiring assets
|
||||
if (
|
||||
'expiry' in txdict
|
||||
and txdict['expiry'] is None
|
||||
):
|
||||
txdict['expiry'] = ''
|
||||
|
||||
# (maybe) re-write old acro-key
|
||||
if (
|
||||
is_paper
|
||||
# if symcache is empty/not supported (yet), don't
|
||||
# bother xD
|
||||
and symcache.mktmaps
|
||||
):
|
||||
fqme: str = txdict.pop('fqsn', None) or txdict['fqme']
|
||||
bs_mktid: str | None = txdict.get('bs_mktid')
|
||||
|
||||
if (
|
||||
|
||||
fqme not in symcache.mktmaps
|
||||
or (
|
||||
# also try to see if this is maybe a paper
|
||||
# engine ledger in which case the bs_mktid
|
||||
# should be the fqme as well!
|
||||
bs_mktid
|
||||
and fqme != bs_mktid
|
||||
)
|
||||
):
|
||||
# always take any (paper) bs_mktid if defined and
|
||||
# in the backend's cache key set.
|
||||
if bs_mktid in symcache.mktmaps:
|
||||
fqme: str = bs_mktid
|
||||
else:
|
||||
best_fqme: str = list(symcache.search(fqme))[0]
|
||||
log.warning(
|
||||
f'Could not find FQME: {fqme} in qualified set?\n'
|
||||
f'Qualifying and expanding {fqme} -> {best_fqme}'
|
||||
)
|
||||
fqme = best_fqme
|
||||
|
||||
if (
|
||||
bs_mktid
|
||||
and bs_mktid != fqme
|
||||
):
|
||||
# in paper account case always make sure both the
|
||||
# fqme and bs_mktid are fully qualified..
|
||||
txdict['bs_mktid'] = fqme
|
||||
|
||||
# in paper ledgers always write the latest
|
||||
# symbology key field: an FQME.
|
||||
txdict['fqme'] = fqme
|
||||
|
||||
towrite[tid] = txdict
|
||||
|
||||
with self.file_path.open(mode='wb') as fp:
|
||||
tomli_w.dump(towrite, fp)
|
||||
|
||||
|
||||
def load_ledger(
|
||||
brokername: str,
|
||||
acctid: str,
|
||||
|
||||
# for testing or manual load from file
|
||||
dirpath: Path | None = None,
|
||||
|
||||
) -> tuple[dict, Path]:
|
||||
'''
|
||||
Load a ledger (TOML) file from user's config directory:
|
||||
$CONFIG_DIR/accounting/ledgers/trades_<brokername>_<acctid>.toml
|
||||
|
||||
Return its `dict`-content and file path.
|
||||
|
||||
'''
|
||||
import time
|
||||
try:
|
||||
import tomllib
|
||||
except ModuleNotFoundError:
|
||||
import tomli as tomllib
|
||||
|
||||
ldir: Path = (
|
||||
dirpath
|
||||
or
|
||||
config._config_dir / 'accounting' / 'ledgers'
|
||||
)
|
||||
if not ldir.is_dir():
|
||||
ldir.mkdir()
|
||||
|
||||
fname = f'trades_{brokername}_{acctid}.toml'
|
||||
fpath: Path = ldir / fname
|
||||
|
||||
if not fpath.is_file():
|
||||
log.info(
|
||||
f'Creating new local trades ledger: {fpath}'
|
||||
)
|
||||
fpath.touch()
|
||||
|
||||
with fpath.open(mode='rb') as cf:
|
||||
start = time.time()
|
||||
ledger_dict = tomllib.load(cf)
|
||||
log.debug(f'Ledger load took {time.time() - start}s')
|
||||
|
||||
return ledger_dict, fpath
|
||||
|
||||
|
||||
@cm
|
||||
def open_trade_ledger(
|
||||
broker: str,
|
||||
account: str,
|
||||
|
||||
allow_from_sync_code: bool = False,
|
||||
symcache: SymbologyCache | None = None,
|
||||
|
||||
# default is to sort by detected datetime-ish field
|
||||
tx_sort: Callable = iter_by_dt,
|
||||
rewrite: bool = False,
|
||||
|
||||
# for testing or manual load from file
|
||||
_fp: Path | None = None,
|
||||
|
||||
) -> Generator[TransactionLedger, None, None]:
|
||||
'''
|
||||
Indempotently create and read in a trade log file from the
|
||||
``<configuration_dir>/ledgers/`` directory.
|
||||
|
||||
Files are named per broker account of the form
|
||||
``<brokername>_<accountname>.toml``. The ``accountname`` here is the
|
||||
name as defined in the user's ``brokers.toml`` config.
|
||||
|
||||
'''
|
||||
from ..brokers import get_brokermod
|
||||
mod: ModuleType = get_brokermod(broker)
|
||||
|
||||
ledger_dict, fpath = load_ledger(
|
||||
broker,
|
||||
account,
|
||||
dirpath=_fp,
|
||||
)
|
||||
cpy = ledger_dict.copy()
|
||||
|
||||
# XXX NOTE: if not provided presume we are being called from
|
||||
# sync code and need to maybe run `trio` to generate..
|
||||
if symcache is None:
|
||||
|
||||
# XXX: be mega pendantic and ensure the caller knows what
|
||||
# they're doing!
|
||||
if not allow_from_sync_code:
|
||||
raise RuntimeError(
|
||||
'You MUST set `allow_from_sync_code=True` when '
|
||||
'calling `open_trade_ledger()` from sync code! '
|
||||
'If you are calling from async code you MUST '
|
||||
'instead pass a `symcache: SymbologyCache`!'
|
||||
)
|
||||
|
||||
from ..data._symcache import (
|
||||
get_symcache,
|
||||
)
|
||||
symcache: SymbologyCache = get_symcache(broker)
|
||||
|
||||
assert symcache
|
||||
|
||||
ledger = TransactionLedger(
|
||||
ledger_dict=cpy,
|
||||
file_path=fpath,
|
||||
account=account,
|
||||
mod=mod,
|
||||
symcache=symcache,
|
||||
tx_sort=getattr(mod, 'tx_sort', tx_sort),
|
||||
)
|
||||
try:
|
||||
yield ledger
|
||||
finally:
|
||||
if (
|
||||
ledger.data != ledger_dict
|
||||
or rewrite
|
||||
):
|
||||
# TODO: show diff output?
|
||||
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||
log.info(f'Updating ledger for {fpath}:\n')
|
||||
ledger.write_config()
|
||||
|
|
@ -1,766 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Market (pair) meta-info layer: sane addressing semantics and meta-data
|
||||
for cross-provider marketplaces.
|
||||
|
||||
We intoduce the concept of,
|
||||
|
||||
- a FQMA: fully qualified market address,
|
||||
- a sane schema for FQMAs including derivatives,
|
||||
- a msg-serializeable description of markets for
|
||||
easy sharing with other pikers B)
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from decimal import (
|
||||
Decimal,
|
||||
ROUND_HALF_EVEN,
|
||||
)
|
||||
from typing import (
|
||||
Any,
|
||||
Literal,
|
||||
)
|
||||
|
||||
from piker.types import Struct
|
||||
|
||||
|
||||
# TODO: make these literals..
|
||||
_underlyings: list[str] = [
|
||||
'stock',
|
||||
'bond',
|
||||
'crypto',
|
||||
'fiat',
|
||||
'commodity',
|
||||
]
|
||||
|
||||
_crypto_derivs: list[str] = [
|
||||
'perpetual_future',
|
||||
'crypto_future',
|
||||
]
|
||||
|
||||
_derivs: list[str] = [
|
||||
'swap',
|
||||
'future',
|
||||
'continuous_future',
|
||||
'option',
|
||||
'futures_option',
|
||||
|
||||
# if we can't figure it out, presume the worst XD
|
||||
'unknown',
|
||||
]
|
||||
|
||||
# NOTE: a tag for other subsystems to try
|
||||
# and do default settings for certain things:
|
||||
# - allocator does unit vs. dolla size limiting.
|
||||
AssetTypeName: Literal[
|
||||
_underlyings
|
||||
+
|
||||
_derivs
|
||||
+
|
||||
_crypto_derivs
|
||||
]
|
||||
|
||||
# egs. stock, futer, option, bond etc.
|
||||
|
||||
|
||||
def dec_digits(
|
||||
value: float | str | Decimal,
|
||||
|
||||
) -> int:
|
||||
'''
|
||||
Return the number of precision digits read from a decimal or float
|
||||
value.
|
||||
|
||||
'''
|
||||
if value == 0:
|
||||
return 0
|
||||
|
||||
return int(
|
||||
-Decimal(str(value)).as_tuple().exponent
|
||||
)
|
||||
|
||||
|
||||
float_digits = dec_digits
|
||||
|
||||
|
||||
def digits_to_dec(
|
||||
ndigits: int,
|
||||
) -> Decimal:
|
||||
'''
|
||||
Return the minimum float value for an input integer value.
|
||||
|
||||
eg. 3 -> 0.001
|
||||
|
||||
'''
|
||||
if ndigits == 0:
|
||||
return Decimal('0')
|
||||
|
||||
return Decimal('0.' + '0'*(ndigits-1) + '1')
|
||||
|
||||
|
||||
class Asset(Struct, frozen=True):
|
||||
'''
|
||||
Container type describing any transactable asset and its
|
||||
contract-like and/or underlying technology meta-info.
|
||||
|
||||
'''
|
||||
name: str
|
||||
atype: str # AssetTypeName
|
||||
|
||||
# minimum transaction size / precision.
|
||||
# eg. for buttcoin this is a "satoshi".
|
||||
tx_tick: Decimal
|
||||
|
||||
# NOTE: additional info optionally packed in by the backend, but
|
||||
# should not be explicitly required in our generic API.
|
||||
info: dict | None = None
|
||||
|
||||
# `None` is not toml-compat so drop info
|
||||
# if no extra data added..
|
||||
def to_dict(
|
||||
self,
|
||||
**kwargs,
|
||||
) -> dict:
|
||||
dct = super().to_dict(**kwargs)
|
||||
if (info := dct.pop('info', None)):
|
||||
dct['info'] = info
|
||||
|
||||
assert dct['tx_tick']
|
||||
return dct
|
||||
|
||||
@classmethod
|
||||
def from_msg(
|
||||
cls,
|
||||
msg: dict[str, Any],
|
||||
) -> Asset:
|
||||
return cls(
|
||||
tx_tick=Decimal(str(msg.pop('tx_tick'))),
|
||||
info=msg.pop('info', None),
|
||||
**msg,
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.name
|
||||
|
||||
def quantize(
|
||||
self,
|
||||
size: float,
|
||||
|
||||
) -> Decimal:
|
||||
'''
|
||||
Truncate input ``size: float`` using ``Decimal``
|
||||
quantized form of the digit precision defined
|
||||
by ``self.lot_tick_size``.
|
||||
|
||||
'''
|
||||
digits = float_digits(self.tx_tick)
|
||||
return Decimal(size).quantize(
|
||||
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
||||
rounding=ROUND_HALF_EVEN
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def guess_from_mkt_ep_key(
|
||||
cls,
|
||||
mkt_ep_key: str,
|
||||
atype: str | None = None,
|
||||
|
||||
) -> Asset:
|
||||
'''
|
||||
A hacky guess method for presuming a (target) asset's properties
|
||||
based on either the actualy market endpoint key, or config settings
|
||||
from the user.
|
||||
|
||||
'''
|
||||
atype = atype or 'unknown'
|
||||
|
||||
# attempt to strip off any source asset
|
||||
# via presumed syntax of:
|
||||
# - <dst>/<src>
|
||||
# - <dst>.<src>
|
||||
# - etc.
|
||||
for char in ['/', '.']:
|
||||
dst, _, src = mkt_ep_key.partition(char)
|
||||
if src:
|
||||
if not atype:
|
||||
atype = 'fiat'
|
||||
break
|
||||
|
||||
return Asset(
|
||||
name=dst,
|
||||
atype=atype,
|
||||
tx_tick=Decimal('0.01'),
|
||||
)
|
||||
|
||||
|
||||
def maybe_cons_tokens(
|
||||
tokens: list[Any],
|
||||
delim_char: str = '.',
|
||||
) -> str:
|
||||
'''
|
||||
Construct `str` output from a maybe-concatenation of input
|
||||
sequence of elements in ``tokens``.
|
||||
|
||||
'''
|
||||
return delim_char.join(filter(bool, tokens)).lower()
|
||||
|
||||
|
||||
class MktPair(Struct, frozen=True):
|
||||
'''
|
||||
Market description for a pair of assets which are tradeable:
|
||||
a market which enables transactions of the form,
|
||||
buy: source asset -> destination asset
|
||||
sell: destination asset -> source asset
|
||||
|
||||
The main intention of this type is for a **simple** cross-asset
|
||||
venue/broker normalized descrption type from which all
|
||||
market-auctions can be mapped from FQME identifiers.
|
||||
|
||||
TODO: our eventual target fqme format/schema is:
|
||||
<dst>/<src>.<expiry>.<con_info_1>.<con_info_2>. -> .<venue>.<broker>
|
||||
^ -- optional tokens ------------------------------- ^
|
||||
|
||||
|
||||
Notes:
|
||||
------
|
||||
|
||||
Some venues provide a different semantic (which we frankly find
|
||||
confusing and non-general) such as "base" and "quote" asset.
|
||||
For example this is how `binance` defines the terms:
|
||||
|
||||
https://binance-docs.github.io/apidocs/websocket_api/en/#public-api-definitions
|
||||
https://binance-docs.github.io/apidocs/futures/en/#public-endpoints-info
|
||||
|
||||
- *base* asset refers to the asset that is the *quantity* of a symbol.
|
||||
- *quote* asset refers to the asset that is the *price* of a symbol.
|
||||
|
||||
In other words the "quote" asset is the asset that the market
|
||||
is pricing "buys" *in*, and the *base* asset it the one that the market
|
||||
allows you to "buy" an *amount of*. Put more simply the *quote*
|
||||
asset is our "source" asset and the *base* asset is our "destination"
|
||||
asset.
|
||||
|
||||
This defintion can be further understood reading our
|
||||
`.brokers.binance.api.Pair` type wherein the
|
||||
`Pair.[quote/base]AssetPrecision` field determines the (transfer)
|
||||
transaction precision available per asset; i.e. the satoshis
|
||||
unit in bitcoin for representing the minimum size of a
|
||||
transaction that can take place on the blockchain.
|
||||
|
||||
'''
|
||||
dst: str | Asset
|
||||
# "destination asset" (name) used to buy *to*
|
||||
# (or used to sell *from*)
|
||||
|
||||
price_tick: Decimal # minimum price increment
|
||||
size_tick: Decimal # minimum size (aka vlm) increment
|
||||
# the tick size is the number describing the smallest step in value
|
||||
# available in this market between the source and destination
|
||||
# assets.
|
||||
# https://en.wikipedia.org/wiki/Tick_size
|
||||
# https://en.wikipedia.org/wiki/Commodity_tick
|
||||
# https://en.wikipedia.org/wiki/Percentage_in_point
|
||||
|
||||
# unique "broker id" since every market endpoint provider
|
||||
# has their own nomenclature and schema for market maps.
|
||||
bs_mktid: str
|
||||
broker: str # the middle man giving access
|
||||
|
||||
# NOTE: to start this field is optional but should eventually be
|
||||
# required; the reason is for backward compat since more positioning
|
||||
# calculations were not originally stored with a src asset..
|
||||
|
||||
src: str | Asset = ''
|
||||
# "source asset" (name) used to buy *from*
|
||||
# (or used to sell *to*).
|
||||
|
||||
venue: str = '' # market venue provider name
|
||||
expiry: str = '' # for derivs, expiry datetime parseable str
|
||||
|
||||
# destination asset's financial type/classification name
|
||||
# NOTE: this is required for the order size allocator system,
|
||||
# since we use different default settings based on the type
|
||||
# of the destination asset, eg. futes use a units limits vs.
|
||||
# equities a $limit.
|
||||
# dst_type: AssetTypeName | None = None
|
||||
|
||||
# source asset's financial type/classification name
|
||||
# TODO: is a src type required for trading?
|
||||
# there's no reason to need any more then the one-way alloc-limiter
|
||||
# config right?
|
||||
# src_type: AssetTypeName
|
||||
|
||||
# for derivs, info describing contract, egs.
|
||||
# strike price, call or put, swap type, exercise model, etc.
|
||||
contract_info: list[str] | None = None
|
||||
|
||||
# TODO: rename to sectype since all of these can
|
||||
# be considered "securities"?
|
||||
_atype: str = ''
|
||||
|
||||
# allow explicit disable of the src part of the market
|
||||
# pair name -> useful for legacy markets like qqq.nasdaq.ib
|
||||
_fqme_without_src: bool = False
|
||||
|
||||
# NOTE: when cast to `str` return fqme
|
||||
def __str__(self) -> str:
|
||||
return self.fqme
|
||||
|
||||
def to_dict(
|
||||
self,
|
||||
**kwargs,
|
||||
) -> dict:
|
||||
d = super().to_dict(**kwargs)
|
||||
d['src'] = self.src.to_dict(**kwargs)
|
||||
|
||||
if not isinstance(self.dst, str):
|
||||
d['dst'] = self.dst.to_dict(**kwargs)
|
||||
else:
|
||||
d['dst'] = str(self.dst)
|
||||
|
||||
d['price_tick'] = str(self.price_tick)
|
||||
d['size_tick'] = str(self.size_tick)
|
||||
|
||||
if self.contract_info is None:
|
||||
d.pop('contract_info')
|
||||
|
||||
# d.pop('_fqme_without_src')
|
||||
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def from_msg(
|
||||
cls,
|
||||
msg: dict[str, Any],
|
||||
|
||||
) -> MktPair:
|
||||
'''
|
||||
Constructor for a received msg-dict normally received over IPC.
|
||||
|
||||
'''
|
||||
if not isinstance(
|
||||
dst_asset_msg := msg.pop('dst'),
|
||||
str,
|
||||
):
|
||||
dst: Asset = Asset.from_msg(dst_asset_msg) # .copy()
|
||||
else:
|
||||
dst: str = dst_asset_msg
|
||||
|
||||
src_asset_msg: dict = msg.pop('src')
|
||||
src: Asset = Asset.from_msg(src_asset_msg) # .copy()
|
||||
|
||||
# XXX NOTE: ``msgspec`` can encode `Decimal` but it doesn't
|
||||
# decide to it by default since we aren't spec-cing these
|
||||
# msgs as structs proper to get them to decode implictily
|
||||
# (yet) as per,
|
||||
# - https://github.com/pikers/piker/pull/354
|
||||
# - https://github.com/goodboy/tractor/pull/311
|
||||
# SO we have to ensure we do a struct type
|
||||
# case (which `.copy()` does) to ensure we get the right
|
||||
# type!
|
||||
return cls(
|
||||
dst=dst,
|
||||
src=src,
|
||||
price_tick=Decimal(msg.pop('price_tick')),
|
||||
size_tick=Decimal(msg.pop('size_tick')),
|
||||
**msg,
|
||||
).copy()
|
||||
|
||||
@property
|
||||
def resolved(self) -> bool:
|
||||
return isinstance(self.dst, Asset)
|
||||
|
||||
@classmethod
|
||||
def from_fqme(
|
||||
cls,
|
||||
fqme: str,
|
||||
|
||||
price_tick: float | str,
|
||||
size_tick: float | str,
|
||||
bs_mktid: str,
|
||||
|
||||
broker: str | None = None,
|
||||
**kwargs,
|
||||
|
||||
) -> MktPair:
|
||||
|
||||
_fqme: str = fqme
|
||||
if (
|
||||
broker
|
||||
and broker not in fqme
|
||||
):
|
||||
_fqme = f'{fqme}.{broker}'
|
||||
|
||||
broker, mkt_ep_key, venue, expiry = unpack_fqme(_fqme)
|
||||
|
||||
kven: str = kwargs.pop('venue', venue)
|
||||
if venue:
|
||||
assert venue == kven
|
||||
else:
|
||||
venue = kven
|
||||
|
||||
exp: str = kwargs.pop('expiry', expiry)
|
||||
if expiry:
|
||||
assert exp == expiry
|
||||
else:
|
||||
expiry = exp
|
||||
|
||||
dst: Asset = Asset.guess_from_mkt_ep_key(
|
||||
mkt_ep_key,
|
||||
atype=kwargs.get('_atype'),
|
||||
)
|
||||
|
||||
# XXX: loading from a fqme string will
|
||||
# leave this pair as "un resolved" meaning
|
||||
# we don't yet have `.dst` set as an `Asset`
|
||||
# which we expect to be filled in by some
|
||||
# backend client with access to that data-info.
|
||||
return cls(
|
||||
dst=dst,
|
||||
# XXX: not resolved to ``Asset`` :(
|
||||
#src=src,
|
||||
|
||||
broker=broker,
|
||||
venue=venue,
|
||||
# XXX NOTE: we presume this token
|
||||
# if the expiry for now!
|
||||
expiry=expiry,
|
||||
|
||||
price_tick=price_tick,
|
||||
size_tick=size_tick,
|
||||
bs_mktid=bs_mktid,
|
||||
|
||||
**kwargs,
|
||||
|
||||
).copy()
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
'''
|
||||
The "endpoint key" for this market.
|
||||
|
||||
'''
|
||||
return self.pair
|
||||
|
||||
def pair(
|
||||
self,
|
||||
delim_char: str | None = None,
|
||||
) -> str:
|
||||
'''
|
||||
The "endpoint asset pair key" for this market.
|
||||
Eg. mnq/usd or btc/usdt or xmr/btc
|
||||
|
||||
In most other tina platforms this is referred to as the
|
||||
"symbol".
|
||||
|
||||
'''
|
||||
return maybe_cons_tokens(
|
||||
[str(self.dst),
|
||||
str(self.src)],
|
||||
# TODO: make the default '/'
|
||||
delim_char=delim_char or '',
|
||||
)
|
||||
|
||||
@property
|
||||
def suffix(self) -> str:
|
||||
'''
|
||||
The "contract suffix" for this market.
|
||||
|
||||
Eg. mnq/usd.20230616.cme.ib
|
||||
^ ----- ^
|
||||
or tsla/usd.20230324.200c.cboe.ib
|
||||
^ ---------- ^
|
||||
|
||||
In most other tina platforms they only show you these details in
|
||||
some kinda "meta data" format, we have FQMEs so we do this up
|
||||
front and explicit.
|
||||
|
||||
'''
|
||||
field_strs = [self.expiry]
|
||||
con_info = self.contract_info
|
||||
if con_info is not None:
|
||||
field_strs.extend(con_info)
|
||||
|
||||
return maybe_cons_tokens(field_strs)
|
||||
|
||||
def get_fqme(
|
||||
self,
|
||||
|
||||
# NOTE: allow dropping the source asset from the
|
||||
# market endpoint's pair key. Eg. to change
|
||||
# mnq/usd.<> -> mnq.<> which is useful when
|
||||
# searching (legacy) stock exchanges.
|
||||
without_src: bool = False,
|
||||
delim_char: str | None = None,
|
||||
|
||||
) -> str:
|
||||
'''
|
||||
Return the fully qualified market endpoint-address for the
|
||||
pair of transacting assets.
|
||||
|
||||
fqme = "fully qualified market endpoint"
|
||||
|
||||
And yes, you pronounce it colloquially as read..
|
||||
|
||||
Basically the idea here is for all client code (consumers of piker's
|
||||
APIs which query the data/broker-provider agnostic layer(s)) should be
|
||||
able to tell which backend / venue / derivative each data feed/flow is
|
||||
from by an explicit string-key of the current form:
|
||||
|
||||
<market-instrument-name>
|
||||
.<venue>
|
||||
.<expiry>
|
||||
.<derivative-suffix-info>
|
||||
.<brokerbackendname>
|
||||
|
||||
eg. for an explicit daq mini futes contract: mnq.cme.20230317.ib
|
||||
|
||||
TODO: I have thoughts that we should actually change this to be
|
||||
more like an "attr lookup" (like how the web should have done
|
||||
urls, but marketting peeps ruined it etc. etc.)
|
||||
|
||||
<broker>.<venue>.<instrumentname>.<suffixwithmetadata>
|
||||
|
||||
TODO:
|
||||
See community discussion on naming and nomenclature, order
|
||||
of addressing hierarchy, general schema, internal representation:
|
||||
|
||||
https://github.com/pikers/piker/issues/467
|
||||
|
||||
'''
|
||||
key: str = (
|
||||
self.pair(delim_char=delim_char)
|
||||
if not (without_src or self._fqme_without_src)
|
||||
else str(self.dst)
|
||||
)
|
||||
|
||||
return maybe_cons_tokens([
|
||||
key, # final "pair name" (eg. qqq[/usd], btcusdt)
|
||||
self.venue,
|
||||
self.suffix, # includes expiry and other con info
|
||||
self.broker,
|
||||
])
|
||||
|
||||
# NOTE: the main idea behind an fqme is to map a "market address"
|
||||
# to some endpoint from a transaction provider (eg. a broker) such
|
||||
# that we build a table of `fqme: str -> bs_mktid: Any` where any "piker
|
||||
# market address" maps 1-to-1 to some broker trading endpoint.
|
||||
# @cached_property
|
||||
fqme = property(get_fqme)
|
||||
|
||||
def get_bs_fqme(
|
||||
self,
|
||||
**kwargs,
|
||||
) -> str:
|
||||
'''
|
||||
FQME sin broker part XD
|
||||
|
||||
'''
|
||||
sin_broker, *_ = self.get_fqme(**kwargs).rpartition('.')
|
||||
return sin_broker
|
||||
|
||||
bs_fqme = property(get_bs_fqme)
|
||||
|
||||
@property
|
||||
def fqsn(self) -> str:
|
||||
return self.fqme
|
||||
|
||||
def quantize(
|
||||
self,
|
||||
size: float,
|
||||
|
||||
quantity_type: Literal['price', 'size'] = 'size',
|
||||
|
||||
) -> Decimal:
|
||||
'''
|
||||
Truncate input ``size: float`` using ``Decimal``
|
||||
and ``.size_tick``'s # of digits.
|
||||
|
||||
'''
|
||||
match quantity_type:
|
||||
case 'price':
|
||||
digits = float_digits(self.price_tick)
|
||||
case 'size':
|
||||
digits = float_digits(self.size_tick)
|
||||
|
||||
return Decimal(size).quantize(
|
||||
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
||||
rounding=ROUND_HALF_EVEN
|
||||
)
|
||||
|
||||
# TODO: BACKWARD COMPAT, TO REMOVE?
|
||||
@property
|
||||
def type_key(self) -> str:
|
||||
|
||||
# if set explicitly then use it!
|
||||
if self._atype:
|
||||
return self._atype
|
||||
|
||||
if isinstance(self.dst, Asset):
|
||||
return str(self.dst.atype)
|
||||
|
||||
return 'UNKNOWN'
|
||||
|
||||
@property
|
||||
def price_tick_digits(self) -> int:
|
||||
return float_digits(self.price_tick)
|
||||
|
||||
@property
|
||||
def size_tick_digits(self) -> int:
|
||||
return float_digits(self.size_tick)
|
||||
|
||||
|
||||
def unpack_fqme(
|
||||
fqme: str,
|
||||
|
||||
broker: str | None = None
|
||||
|
||||
) -> tuple[str, ...]:
|
||||
'''
|
||||
Unpack a fully-qualified-symbol-name to ``tuple``.
|
||||
|
||||
'''
|
||||
venue = ''
|
||||
suffix = ''
|
||||
|
||||
# TODO: probably reverse the order of all this XD
|
||||
tokens = fqme.split('.')
|
||||
|
||||
match tokens:
|
||||
case [mkt_ep, broker]:
|
||||
# probably crypto
|
||||
return (
|
||||
broker,
|
||||
mkt_ep,
|
||||
'',
|
||||
'',
|
||||
)
|
||||
|
||||
# TODO: swap venue and suffix/deriv-info here?
|
||||
case [mkt_ep, venue, suffix, broker]:
|
||||
pass
|
||||
|
||||
# handle `bs_mktid` + `broker` input case
|
||||
case [
|
||||
mkt_ep, venue, suffix
|
||||
] if (
|
||||
broker
|
||||
and suffix != broker
|
||||
):
|
||||
pass
|
||||
|
||||
case [mkt_ep, venue, broker]:
|
||||
suffix = ''
|
||||
|
||||
case _:
|
||||
raise ValueError(f'Invalid fqme: {fqme}')
|
||||
|
||||
return (
|
||||
broker,
|
||||
mkt_ep,
|
||||
venue,
|
||||
# '.'.join([mkt_ep, venue]),
|
||||
suffix,
|
||||
)
|
||||
|
||||
|
||||
class Symbol(Struct):
|
||||
'''
|
||||
I guess this is some kinda container thing for dealing with
|
||||
all the different meta-data formats from brokers?
|
||||
|
||||
'''
|
||||
key: str
|
||||
|
||||
broker: str = ''
|
||||
venue: str = ''
|
||||
|
||||
# precision descriptors for price and vlm
|
||||
tick_size: Decimal = Decimal('0.01')
|
||||
lot_tick_size: Decimal = Decimal('0.0')
|
||||
|
||||
suffix: str = ''
|
||||
broker_info: dict[str, dict[str, Any]] = {}
|
||||
|
||||
@classmethod
|
||||
def from_fqme(
|
||||
cls,
|
||||
fqsn: str,
|
||||
info: dict[str, Any],
|
||||
|
||||
) -> Symbol:
|
||||
broker, mktep, venue, suffix = unpack_fqme(fqsn)
|
||||
tick_size = info.get('price_tick_size', 0.01)
|
||||
lot_size = info.get('lot_tick_size', 0.0)
|
||||
|
||||
return Symbol(
|
||||
broker=broker,
|
||||
key=mktep,
|
||||
tick_size=tick_size,
|
||||
lot_tick_size=lot_size,
|
||||
venue=venue,
|
||||
suffix=suffix,
|
||||
broker_info={broker: info},
|
||||
)
|
||||
|
||||
@property
|
||||
def type_key(self) -> str:
|
||||
return list(self.broker_info.values())[0]['asset_type']
|
||||
|
||||
@property
|
||||
def tick_size_digits(self) -> int:
|
||||
return float_digits(self.tick_size)
|
||||
|
||||
@property
|
||||
def lot_size_digits(self) -> int:
|
||||
return float_digits(self.lot_tick_size)
|
||||
|
||||
@property
|
||||
def price_tick(self) -> Decimal:
|
||||
return Decimal(str(self.tick_size))
|
||||
|
||||
@property
|
||||
def size_tick(self) -> Decimal:
|
||||
return Decimal(str(self.lot_tick_size))
|
||||
|
||||
@property
|
||||
def broker(self) -> str:
|
||||
return list(self.broker_info.keys())[0]
|
||||
|
||||
@property
|
||||
def fqme(self) -> str:
|
||||
return maybe_cons_tokens([
|
||||
self.key, # final "pair name" (eg. qqq[/usd], btcusdt)
|
||||
self.venue,
|
||||
self.suffix, # includes expiry and other con info
|
||||
self.broker,
|
||||
])
|
||||
|
||||
def quantize(
|
||||
self,
|
||||
size: float,
|
||||
) -> Decimal:
|
||||
digits = float_digits(self.lot_tick_size)
|
||||
return Decimal(size).quantize(
|
||||
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
||||
rounding=ROUND_HALF_EVEN
|
||||
)
|
||||
|
||||
# NOTE: when cast to `str` return fqme
|
||||
def __str__(self) -> str:
|
||||
return self.fqme
|
||||
|
|
@ -1,983 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Personal/Private position parsing, calculating, summarizing in a way
|
||||
that doesn't try to cuk most humans who prefer to not lose their moneys..
|
||||
|
||||
(looking at you `ib` and dirt-bird friends)
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from contextlib import contextmanager as cm
|
||||
from decimal import Decimal
|
||||
from pprint import pformat
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
from typing import (
|
||||
Any,
|
||||
Iterator,
|
||||
Generator
|
||||
)
|
||||
|
||||
import pendulum
|
||||
from pendulum import (
|
||||
datetime,
|
||||
now,
|
||||
)
|
||||
import polars as pl
|
||||
import tomlkit
|
||||
|
||||
from ._ledger import (
|
||||
Transaction,
|
||||
TransactionLedger,
|
||||
)
|
||||
from ._mktinfo import (
|
||||
MktPair,
|
||||
Asset,
|
||||
unpack_fqme,
|
||||
)
|
||||
from .calc import (
|
||||
ppu,
|
||||
# iter_by_dt,
|
||||
)
|
||||
from .. import config
|
||||
from ..clearing._messages import (
|
||||
BrokerdPosition,
|
||||
)
|
||||
from piker.types import Struct
|
||||
from piker.data._symcache import SymbologyCache
|
||||
from ..log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class Position(Struct):
|
||||
'''
|
||||
An asset "position" model with attached clearing transaction history.
|
||||
|
||||
A financial "position" in `piker` terms is a summary of accounting
|
||||
metrics computed from a transaction ledger; generally it describes
|
||||
some accumulative "size" and "average price" from the summarized
|
||||
underlying transaction set.
|
||||
|
||||
In piker we focus on the `.ppu` (price per unit) and the `.bep`
|
||||
(break even price) including all transaction entries and exits since
|
||||
the last "net-zero" size of the destination asset's holding.
|
||||
|
||||
This interface serves as an object API for computing and
|
||||
tracking positions as well as supports serialization for
|
||||
storage in the local file system (in TOML) and to interchange
|
||||
as a msg over IPC.
|
||||
|
||||
'''
|
||||
mkt: MktPair
|
||||
|
||||
# can be +ve or -ve for long/short
|
||||
# size: float
|
||||
|
||||
# "price-per-unit price" above or below which pnl moves above and
|
||||
# below zero for the entirety of the current "trade state". The ppu
|
||||
# is only modified on "increases of" the absolute size of a position
|
||||
# in one of a long/short "direction" (i.e. abs(.size_i) > 0 after
|
||||
# the next transaction given .size was > 0 before that tx, and vice
|
||||
# versa for -ve sized positions).
|
||||
# ppu: float
|
||||
|
||||
# TODO: break-even-price support!
|
||||
# bep: float
|
||||
|
||||
# unique "backend system market id"
|
||||
bs_mktid: str
|
||||
|
||||
split_ratio: int | None = None
|
||||
|
||||
# TODO: use a `pl.DataFrame` intead?
|
||||
_events: dict[str, Transaction | dict] = {}
|
||||
|
||||
@property
|
||||
def expiry(self) -> datetime | None:
|
||||
'''
|
||||
Security expiry if it has a limited lifetime.
|
||||
|
||||
For non-derivative markets this is normally `None`.
|
||||
|
||||
'''
|
||||
exp: str | None = self.mkt.expiry
|
||||
if exp is None:
|
||||
return None
|
||||
|
||||
match exp.lower():
|
||||
# empty str, 'perp' (contract) or simply a null
|
||||
# signifies instrument with NO expiry.
|
||||
case 'perp' | '' | None:
|
||||
return None
|
||||
|
||||
case str():
|
||||
return pendulum.parse(exp)
|
||||
|
||||
case _:
|
||||
raise ValueError(
|
||||
f'Unhandled `MktPair.expiry`: `{exp}`'
|
||||
)
|
||||
|
||||
# TODO: idea: "real LIFO" dynamic positioning.
|
||||
# - when a trade takes place where the pnl for
|
||||
# the (set of) trade(s) is below the breakeven price
|
||||
# it may be that the trader took a +ve pnl on a short(er)
|
||||
# term trade in the same account.
|
||||
# - in this case we could recalc the be price to
|
||||
# be reverted back to it's prior value before the nearest term
|
||||
# trade was opened.?
|
||||
# def bep() -> float:
|
||||
# ...
|
||||
def clears_df(self) -> pl.DataFrame:
|
||||
...
|
||||
|
||||
def clearsitems(self) -> list[(str, dict)]:
|
||||
return ppu(
|
||||
self.iter_by_type('clear'),
|
||||
as_ledger=True
|
||||
)
|
||||
|
||||
def iter_by_type(
|
||||
self,
|
||||
etype: str,
|
||||
|
||||
) -> Iterator[dict | Transaction]:
|
||||
'''
|
||||
Iterate the internally managed ``._events: dict`` table in
|
||||
datetime-stamped order.
|
||||
|
||||
'''
|
||||
# sort on the expected datetime field
|
||||
# for event in iter_by_dt(
|
||||
for event in sorted(
|
||||
self._events.values(),
|
||||
key=lambda entry: entry.dt
|
||||
):
|
||||
# if event.etype == etype:
|
||||
match event:
|
||||
case (
|
||||
{'etype': _etype} |
|
||||
Transaction(etype=str(_etype))
|
||||
):
|
||||
assert _etype == etype
|
||||
yield event
|
||||
|
||||
|
||||
def minimized_clears(self) -> dict[str, dict]:
|
||||
'''
|
||||
Minimize the position's clears entries by removing
|
||||
all transactions before the last net zero size except for when
|
||||
a clear event causes a position "side" change (i.e. long to short
|
||||
after a single fill) wherein we store the transaction prior to the
|
||||
net-zero pass.
|
||||
|
||||
This avoids unnecessary history irrelevant to the current
|
||||
non-net-zero size state when serializing for offline storage.
|
||||
|
||||
'''
|
||||
# scan for the last "net zero" position by iterating
|
||||
# transactions until the next net-zero cumsize, rinse,
|
||||
# repeat.
|
||||
cumsize: float = 0
|
||||
clears_since_zero: list[dict] = []
|
||||
|
||||
for tid, cleardict in self.clearsitems():
|
||||
cumsize = float(
|
||||
# self.mkt.quantize(cumsize + cleardict['tx'].size
|
||||
self.mkt.quantize(cleardict['cumsize'])
|
||||
)
|
||||
clears_since_zero.append(cleardict)
|
||||
|
||||
# NOTE: always pop sign change since we just use it to
|
||||
# determine which entry to clear "up to".
|
||||
sign_change: bool = cleardict.pop('sign_change')
|
||||
if cumsize == 0:
|
||||
clears_since_zero = clears_since_zero[:-2]
|
||||
# clears_since_zero.clear()
|
||||
|
||||
elif sign_change:
|
||||
clears_since_zero = clears_since_zero[:-1]
|
||||
|
||||
return clears_since_zero
|
||||
|
||||
def to_pretoml(self) -> tuple[str, dict]:
|
||||
'''
|
||||
Prep this position's data contents for export as an entry
|
||||
in a TOML "account file" (such as
|
||||
`account.binance.paper.toml`) including re-structuring of
|
||||
the ``._events`` entries as an array of inline-subtables
|
||||
for better ``pps.toml`` compactness.
|
||||
|
||||
'''
|
||||
mkt: MktPair = self.mkt
|
||||
assert isinstance(mkt, MktPair)
|
||||
|
||||
# TODO: we need to figure out how to have one top level
|
||||
# listing venue here even when the backend isn't providing
|
||||
# it via the trades ledger..
|
||||
# drop symbol obj in serialized form
|
||||
fqme: str = mkt.fqme
|
||||
broker, mktep, venue, suffix = unpack_fqme(fqme)
|
||||
|
||||
# an asset resolved mkt where we have ``Asset`` info about
|
||||
# each tradeable asset in the market.
|
||||
asset_type: str = 'n/a'
|
||||
if mkt.resolved:
|
||||
dst: Asset = mkt.dst
|
||||
asset_type = dst.atype
|
||||
|
||||
asdict: dict[str, Any] = {
|
||||
'bs_mktid': self.bs_mktid,
|
||||
# 'expiry': self.expiry or '',
|
||||
'asset_type': asset_type,
|
||||
'price_tick': mkt.price_tick,
|
||||
'size_tick': mkt.size_tick,
|
||||
}
|
||||
if exp := self.expiry:
|
||||
asdict['expiry'] = exp
|
||||
|
||||
clears_since_zero: list[dict] = self.minimized_clears()
|
||||
|
||||
# setup a "multi-line array of inline tables" which we call
|
||||
# the "clears table", contained by each position entry in
|
||||
# an "account file".
|
||||
clears_table: tomlkit.Array = tomlkit.array()
|
||||
clears_table.multiline(
|
||||
multiline=True,
|
||||
indent='',
|
||||
)
|
||||
|
||||
for entry in clears_since_zero:
|
||||
inline_table = tomlkit.inline_table()
|
||||
|
||||
# insert optional clear fields in column order
|
||||
for k in ['ppu', 'cumsize']:
|
||||
if val := entry.get(k):
|
||||
inline_table[k] = val
|
||||
|
||||
# insert required fields
|
||||
for k in ['price', 'size', 'cost']:
|
||||
inline_table[k] = entry[k]
|
||||
|
||||
# NOTE: we don't actually need to serialize datetime to parsable `str`
|
||||
# since `tomlkit` supports a native `DateTime` but
|
||||
# seems like we're not doing it entirely in clearing
|
||||
# tables yet?
|
||||
inline_table['dt'] = entry['dt'] # .isoformat('T')
|
||||
|
||||
tid: str = entry['tid']
|
||||
inline_table['tid'] = tid
|
||||
clears_table.append(inline_table)
|
||||
|
||||
# assert not events
|
||||
asdict['clears'] = clears_table
|
||||
|
||||
return fqme, asdict
|
||||
|
||||
def update_from_msg(
|
||||
self,
|
||||
msg: BrokerdPosition,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Hard-set the current position from a remotely-received
|
||||
(normally via IPC) msg by applying the msg as the one (and
|
||||
only) txn in the `._events` table thus forcing the current
|
||||
asset allocation blindly.
|
||||
|
||||
'''
|
||||
mkt: MktPair = self.mkt
|
||||
now_dt: pendulum.DateTime = now()
|
||||
now_str: str = str(now_dt)
|
||||
|
||||
# XXX: wipe all prior txn history since we wanted it we wouldn't
|
||||
# be using this method to compute our state!
|
||||
self._events.clear()
|
||||
|
||||
# NOTE WARNING XXX: we summarize the pos with a single
|
||||
# summary transaction (for now) until we either pass THIS
|
||||
# type as msg directly from emsd or come up with a better
|
||||
# way?
|
||||
t = Transaction(
|
||||
fqme=mkt.fqme,
|
||||
bs_mktid=mkt.bs_mktid,
|
||||
size=msg['size'],
|
||||
price=msg['avg_price'],
|
||||
cost=0,
|
||||
|
||||
# NOTE: special provisions required!
|
||||
# - tid needs to be unique or this txn will be ignored!!
|
||||
tid=now_str,
|
||||
|
||||
# TODO: also figure out how to avoid this!
|
||||
dt=now_dt,
|
||||
)
|
||||
self.add_clear(t)
|
||||
|
||||
@property
|
||||
def dsize(self) -> float:
|
||||
'''
|
||||
The "dollar" size of the pp, normally in source asset
|
||||
(fiat) units.
|
||||
|
||||
'''
|
||||
return self.ppu * self.cumsize
|
||||
|
||||
def expired(self) -> bool:
|
||||
'''
|
||||
Predicate which checks if the contract/instrument is past
|
||||
its expiry.
|
||||
|
||||
'''
|
||||
return bool(self.expiry) and self.expiry < now()
|
||||
|
||||
def add_clear(
|
||||
self,
|
||||
t: Transaction,
|
||||
) -> bool:
|
||||
'''
|
||||
Update clearing table by calculating the rolling ppu and
|
||||
(accumulative) size in both the clears entry and local
|
||||
attrs state.
|
||||
|
||||
Inserts are always done in datetime sorted order.
|
||||
|
||||
'''
|
||||
# added: bool = False
|
||||
tid: str = t.tid
|
||||
if tid in self._events:
|
||||
log.warning(f'{t} is already added?!')
|
||||
# return added
|
||||
|
||||
# TODO: apparently this IS possible with a dict but not
|
||||
# common and probably not that beneficial unless we're also
|
||||
# going to do cum-calcs on each insert?
|
||||
# https://stackoverflow.com/questions/38079171/python-insert-new-element-into-sorted-list-of-dictionaries
|
||||
# from bisect import insort
|
||||
# insort(
|
||||
# self._clears,
|
||||
# clear,
|
||||
# key=lambda entry: entry['dt']
|
||||
# )
|
||||
self._events[tid] = t
|
||||
return True
|
||||
|
||||
# TODO: compute these incrementally instead
|
||||
# of re-looping through each time resulting in O(n**2)
|
||||
# behaviour..? Can we have some kinda clears len to cached
|
||||
# output subsys?
|
||||
def calc_ppu(self) -> float:
|
||||
return ppu(self.iter_by_type('clear'))
|
||||
|
||||
# # return self.clearsdict()
|
||||
# # )
|
||||
# return list(self.clearsdict())[-1][1]['ppu']
|
||||
|
||||
@property
|
||||
def ppu(self) -> float:
|
||||
return round(
|
||||
self.calc_ppu(),
|
||||
ndigits=self.mkt.price_tick_digits,
|
||||
)
|
||||
|
||||
def calc_size(self) -> float:
|
||||
'''
|
||||
Calculate the unit size of this position in the destination
|
||||
asset using the clears/trade event table; zero if expired.
|
||||
|
||||
'''
|
||||
# time-expired pps (normally derivatives) are "closed"
|
||||
# and have a zero size.
|
||||
if self.expired():
|
||||
return 0.
|
||||
|
||||
clears: list[(str, dict)] = self.clearsitems()
|
||||
if clears:
|
||||
return clears[-1][1]['cumsize']
|
||||
else:
|
||||
return 0.
|
||||
|
||||
# if self.split_ratio is not None:
|
||||
# size = round(size * self.split_ratio)
|
||||
|
||||
# return float(
|
||||
# self.mkt.quantize(size),
|
||||
# )
|
||||
|
||||
# TODO: ideally we don't implicitly recompute the
|
||||
# full sequence from `.clearsdict()` every read..
|
||||
# the writer-updates-local-attr-state was actually kinda nice
|
||||
# before, but sometimes led to hard to detect bugs when
|
||||
# state was de-synced.
|
||||
@property
|
||||
def cumsize(self) -> float:
|
||||
|
||||
if (
|
||||
self.expiry
|
||||
and self.expiry < now()
|
||||
):
|
||||
return 0
|
||||
|
||||
return round(
|
||||
self.calc_size(),
|
||||
ndigits=self.mkt.size_tick_digits,
|
||||
)
|
||||
|
||||
# TODO: once we have an `.events` table with diff
|
||||
# mkt event types..?
|
||||
# def suggest_split(self) -> float:
|
||||
# ...
|
||||
|
||||
|
||||
class Account(Struct):
|
||||
'''
|
||||
The real-time (double-entry accounting) state of
|
||||
a given **asset ownership tracking system**, normally offered
|
||||
or measured from some brokerage, CEX or (implied virtual)
|
||||
summary crypto$ "wallets" aggregated and tracked over some set
|
||||
of DEX-es.
|
||||
|
||||
Both market-mapped and ledger-system-native (aka inter-account
|
||||
"transfers") transactions are accounted and they pertain to
|
||||
(implied) PnL relatve to any other accountable asset.
|
||||
|
||||
More specifically in piker terms, an account tracks all of:
|
||||
|
||||
- the *balances* of all assets currently available for use either
|
||||
in (future) market or (inter-account/wallet) transfer
|
||||
transactions.
|
||||
- a transaction *ledger* from a given brokerd backend whic
|
||||
is a recording of all (know) such transactions from the past.
|
||||
- a set of financial *positions* as measured from the current
|
||||
ledger state.
|
||||
|
||||
See the semantic origins from double-bookeeping:
|
||||
https://en.wikipedia.org/wiki/Double-entry_bookkeeping
|
||||
|
||||
'''
|
||||
mod: ModuleType
|
||||
acctid: str
|
||||
pps: dict[str, Position]
|
||||
|
||||
conf_path: Path
|
||||
conf: dict | None = {}
|
||||
|
||||
# TODO: track a table of asset balances as `.balances:
|
||||
# dict[Asset, float]`?
|
||||
|
||||
@property
|
||||
def brokername(self) -> str:
|
||||
return self.mod.name
|
||||
|
||||
def update_from_ledger(
|
||||
self,
|
||||
ledger: TransactionLedger | dict[str, Transaction],
|
||||
cost_scalar: float = 2,
|
||||
symcache: SymbologyCache | None = None,
|
||||
|
||||
_mktmap_table: dict[str, MktPair] | None = None,
|
||||
|
||||
) -> dict[str, Position]:
|
||||
'''
|
||||
Update the internal `.pps[str, Position]` table from input
|
||||
transactions recomputing the price-per-unit (ppu) and
|
||||
accumulative size for each entry.
|
||||
|
||||
'''
|
||||
if (
|
||||
not isinstance(ledger, TransactionLedger)
|
||||
):
|
||||
if symcache is None:
|
||||
raise RuntimeError(
|
||||
'No ledger provided!\n'
|
||||
'We can not determine the `MktPair`s without a symcache..\n'
|
||||
'Please provide `symcache: SymbologyCache` when '
|
||||
'processing NEW positions!'
|
||||
)
|
||||
itertxns = sorted(
|
||||
ledger.values(),
|
||||
key=lambda t: t.dt,
|
||||
)
|
||||
else:
|
||||
itertxns = ledger.iter_txns()
|
||||
symcache = ledger.symcache
|
||||
|
||||
pps = self.pps
|
||||
updated: dict[str, Position] = {}
|
||||
|
||||
# lifo update all pps from records, ensuring
|
||||
# we compute the PPU and size sorted in time!
|
||||
for txn in itertxns:
|
||||
fqme: str = txn.fqme
|
||||
bs_mktid: str = txn.bs_mktid
|
||||
|
||||
# template the mkt-info presuming a legacy market ticks
|
||||
# if no info exists in the transactions..
|
||||
try:
|
||||
mkt: MktPair = symcache.mktmaps[fqme]
|
||||
except KeyError:
|
||||
if _mktmap_table is None:
|
||||
raise
|
||||
|
||||
# XXX: caller is allowed to provide a fallback
|
||||
# mktmap table for the case where a new position is
|
||||
# being added and the preloaded symcache didn't
|
||||
# have this entry prior (eg. with frickin IB..)
|
||||
mkt = _mktmap_table[fqme]
|
||||
|
||||
if not (pos := pps.get(bs_mktid)):
|
||||
|
||||
assert isinstance(
|
||||
mkt,
|
||||
MktPair,
|
||||
)
|
||||
|
||||
# if no existing pos, allocate fresh one.
|
||||
pos = pps[bs_mktid] = Position(
|
||||
mkt=mkt,
|
||||
bs_mktid=bs_mktid,
|
||||
)
|
||||
else:
|
||||
# NOTE: if for some reason a "less resolved" mkt pair
|
||||
# info has been set (based on the `.fqme` being
|
||||
# a shorter string), instead use the one from the
|
||||
# transaction since it likely has (more) full
|
||||
# information from the provider.
|
||||
if len(pos.mkt.fqme) < len(fqme):
|
||||
pos.mkt = mkt
|
||||
|
||||
# update clearing acnt!
|
||||
# NOTE: likely you'll see repeats of the same
|
||||
# ``Transaction`` passed in here if/when you are
|
||||
# restarting a ``brokerd.ib`` where the API will
|
||||
# re-report trades from the current session, so we need
|
||||
# to make sure we don't "double count" these in pp
|
||||
# calculations; `Position.add_clear()` stores txs in
|
||||
# a `._events: dict[tid, tx]` which should always
|
||||
# ensure this is true!
|
||||
pos.add_clear(txn)
|
||||
updated[txn.bs_mktid] = pos
|
||||
|
||||
# NOTE: deliver only the position entries that were
|
||||
# actually updated (modified the state) from the input
|
||||
# transaction set.
|
||||
return updated
|
||||
|
||||
def dump_active(
|
||||
self,
|
||||
) -> tuple[
|
||||
dict[str, Position],
|
||||
dict[str, Position]
|
||||
]:
|
||||
'''
|
||||
Iterate all tabulated positions, render active positions to
|
||||
a ``dict`` format amenable to serialization (via TOML) and drop
|
||||
from state (``.pps``) as well as return in a ``dict`` all
|
||||
``Position``s which have recently closed.
|
||||
|
||||
'''
|
||||
# NOTE: newly closed position are also important to report/return
|
||||
# since a consumer, like an order mode UI ;), might want to react
|
||||
# based on the closure (for example removing the breakeven line
|
||||
# and clearing the entry from any lists/monitors).
|
||||
closed_pp_objs: dict[str, Position] = {}
|
||||
open_pp_objs: dict[str, Position] = {}
|
||||
|
||||
pp_objs = self.pps
|
||||
for bs_mktid in list(pp_objs):
|
||||
pos = pp_objs[bs_mktid]
|
||||
# pos.ensure_state()
|
||||
|
||||
# "net-zero" is a "closed" position
|
||||
if pos.cumsize == 0:
|
||||
# NOTE: we DO NOT pop the pos here since it can still be
|
||||
# used to check for duplicate clears that may come in as
|
||||
# new transaction from some backend API and need to be
|
||||
# ignored; the closed positions won't be written to the
|
||||
# ``pps.toml`` since ``pp_active_entries`` above is what's
|
||||
# written.
|
||||
closed_pp_objs[bs_mktid] = pos
|
||||
|
||||
else:
|
||||
open_pp_objs[bs_mktid] = pos
|
||||
|
||||
return open_pp_objs, closed_pp_objs
|
||||
|
||||
def prep_toml(
|
||||
self,
|
||||
active: dict[str, Position] | None = None,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
|
||||
if active is None:
|
||||
active, _ = self.dump_active()
|
||||
|
||||
# ONLY dict-serialize all active positions; those that are
|
||||
# closed we don't store in the ``pps.toml``.
|
||||
to_toml_dict: dict[str, Any] = {}
|
||||
|
||||
pos: Position
|
||||
for bs_mktid, pos in active.items():
|
||||
# pos.ensure_state()
|
||||
|
||||
# serialize to pre-toml form
|
||||
# NOTE: we only store the minimal amount of clears that
|
||||
# make up this position since the last net-zero state,
|
||||
# see `Position.to_pretoml()` for details
|
||||
fqme, asdict = pos.to_pretoml()
|
||||
|
||||
# clears: list[dict] = asdict['clears']
|
||||
# assert 'Datetime' not in [0]['dt']
|
||||
log.info(f'Updating active pp: {fqme}')
|
||||
|
||||
# XXX: ugh, it's cuz we push the section under
|
||||
# the broker name.. maybe we need to rethink this?
|
||||
brokerless_key = fqme.removeprefix(f'{self.brokername}.')
|
||||
to_toml_dict[brokerless_key] = asdict
|
||||
|
||||
return to_toml_dict
|
||||
|
||||
def write_config(self) -> None:
|
||||
'''
|
||||
Write the current account state to the user's account TOML file, normally
|
||||
something like ``pps.toml``.
|
||||
|
||||
'''
|
||||
# TODO: show diff output?
|
||||
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||
# active, closed_pp_objs = acnt.dump_active()
|
||||
|
||||
active, closed = self.dump_active()
|
||||
pp_entries = self.prep_toml(active=active)
|
||||
if pp_entries:
|
||||
log.info(
|
||||
f'Updating positions in ``{self.conf_path}``:\n'
|
||||
f'n{pformat(pp_entries)}'
|
||||
)
|
||||
|
||||
if self.brokername in self.conf:
|
||||
log.warning(
|
||||
f'Rewriting {self.conf_path} keys to drop <broker.acct>!'
|
||||
)
|
||||
# legacy key schema including <brokername.account>, so
|
||||
# rewrite all entries to drop those tables since we now
|
||||
# put that in the filename!
|
||||
accounts = self.conf.pop(self.brokername)
|
||||
assert len(accounts) == 1
|
||||
entries = accounts.pop(self.acctid)
|
||||
self.conf.update(entries)
|
||||
|
||||
self.conf.update(pp_entries)
|
||||
|
||||
# drop any entries that are computed as net-zero
|
||||
# we don't care about storing in the pps file.
|
||||
if closed:
|
||||
bs_mktid: str
|
||||
for bs_mktid, pos in closed.items():
|
||||
fqme: str = pos.mkt.fqme
|
||||
if fqme in self.conf:
|
||||
self.conf.pop(fqme)
|
||||
else:
|
||||
# TODO: we reallly need a diff set of
|
||||
# loglevels/colors per subsys.
|
||||
log.warning(
|
||||
f'Recent position for {fqme} was closed!'
|
||||
)
|
||||
|
||||
# if there are no active position entries according
|
||||
# to the toml dump output above, then clear the config
|
||||
# file of all entries.
|
||||
elif self.conf:
|
||||
for entry in list(self.conf):
|
||||
del self.conf[entry]
|
||||
|
||||
# XXX WTF: if we use a tomlkit.Integer here we get this
|
||||
# super weird --1 thing going on for cumsize!?1!
|
||||
# NOTE: the fix was to always float() the size value loaded
|
||||
# in open_pps() below!
|
||||
config.write(
|
||||
config=self.conf,
|
||||
path=self.conf_path,
|
||||
fail_empty=False,
|
||||
)
|
||||
|
||||
|
||||
def load_account(
|
||||
brokername: str,
|
||||
acctid: str,
|
||||
|
||||
dirpath: Path | None = None,
|
||||
|
||||
) -> tuple[dict, Path]:
|
||||
'''
|
||||
Load a accounting (with positions) file from
|
||||
$CONFIG_DIR/accounting/account.<brokername>.<acctid>.toml
|
||||
|
||||
Where normally $CONFIG_DIR = ~/.config/piker/
|
||||
and we implicitly create a accounting subdir which should
|
||||
normally be linked to a git repo managed by the user B)
|
||||
|
||||
'''
|
||||
legacy_fn: str = f'pps.{brokername}.{acctid}.toml'
|
||||
fn: str = f'account.{brokername}.{acctid}.toml'
|
||||
|
||||
dirpath: Path = dirpath or (config._config_dir / 'accounting')
|
||||
if not dirpath.is_dir():
|
||||
dirpath.mkdir()
|
||||
|
||||
conf, path = config.load(
|
||||
path=dirpath / fn,
|
||||
decode=tomlkit.parse,
|
||||
touch_if_dne=True,
|
||||
)
|
||||
|
||||
if not conf:
|
||||
legacypath = dirpath / legacy_fn
|
||||
log.warning(
|
||||
f'Your account file is using the legacy `pps.` prefix..\n'
|
||||
f'Rewriting contents to new name -> {path}\n'
|
||||
'Please delete the old file!\n'
|
||||
f'|-> {legacypath}\n'
|
||||
)
|
||||
if legacypath.is_file():
|
||||
legacy_config, _ = config.load(
|
||||
path=legacypath,
|
||||
|
||||
# TODO: move to tomlkit:
|
||||
# - needs to be fixed to support bidict?
|
||||
# https://github.com/sdispater/tomlkit/issues/289
|
||||
# - we need to use or fork's fix to do multiline array
|
||||
# indenting.
|
||||
decode=tomlkit.parse,
|
||||
)
|
||||
conf.update(legacy_config)
|
||||
|
||||
# XXX: override the presumably previously non-existant
|
||||
# file with legacy's contents.
|
||||
config.write(
|
||||
conf,
|
||||
path=path,
|
||||
fail_empty=False,
|
||||
)
|
||||
|
||||
return conf, path
|
||||
|
||||
|
||||
# TODO: make this async and offer a `get_account()` that
|
||||
# can be used from sync code which does the same thing as
|
||||
# open_trade_ledger()!
|
||||
@cm
|
||||
def open_account(
|
||||
brokername: str,
|
||||
acctid: str,
|
||||
write_on_exit: bool = False,
|
||||
|
||||
# for testing or manual load from file
|
||||
_fp: Path | None = None,
|
||||
|
||||
) -> Generator[Account, None, None]:
|
||||
'''
|
||||
Read out broker-specific position entries from
|
||||
incremental update file: ``pps.toml``.
|
||||
|
||||
'''
|
||||
conf: dict
|
||||
conf_path: Path
|
||||
conf, conf_path = load_account(
|
||||
brokername,
|
||||
acctid,
|
||||
dirpath=_fp,
|
||||
)
|
||||
|
||||
if brokername in conf:
|
||||
log.warning(
|
||||
f'Rewriting {conf_path} keys to drop <broker.acct>!'
|
||||
)
|
||||
# legacy key schema including <brokername.account>, so
|
||||
# rewrite all entries to drop those tables since we now
|
||||
# put that in the filename!
|
||||
accounts = conf.pop(brokername)
|
||||
for acctid in accounts.copy():
|
||||
entries = accounts.pop(acctid)
|
||||
conf.update(entries)
|
||||
|
||||
# TODO: ideally we can pass in an existing
|
||||
# pps state to this right? such that we
|
||||
# don't have to do a ledger reload all the
|
||||
# time.. a couple ideas I can think of,
|
||||
# - mirror this in some client side actor which
|
||||
# does the actual ledger updates (say the paper
|
||||
# engine proc if we decide to always spawn it?),
|
||||
# - do diffs against updates from the ledger writer
|
||||
# actor and the in-mem state here?
|
||||
from ..brokers import get_brokermod
|
||||
mod: ModuleType = get_brokermod(brokername)
|
||||
|
||||
pp_objs: dict[str, Position] = {}
|
||||
acnt = Account(
|
||||
mod,
|
||||
acctid,
|
||||
pp_objs,
|
||||
conf_path,
|
||||
conf=conf,
|
||||
)
|
||||
|
||||
# unmarshal/load ``pps.toml`` config entries into object form
|
||||
# and update `Account` obj entries.
|
||||
for fqme, entry in conf.items():
|
||||
|
||||
# unique broker-backend-system market id
|
||||
bs_mktid = str(
|
||||
entry.get('bsuid')
|
||||
or entry.get('bs_mktid')
|
||||
)
|
||||
price_tick = Decimal(str(
|
||||
entry.get('price_tick_size')
|
||||
or entry.get('price_tick')
|
||||
or '0.01'
|
||||
))
|
||||
size_tick = Decimal(str(
|
||||
entry.get('lot_tick_size')
|
||||
or entry.get('size_tick')
|
||||
or '0.0'
|
||||
))
|
||||
|
||||
# load the pair using the fqme which
|
||||
# will make the pair "unresolved" until
|
||||
# the backend broker actually loads
|
||||
# the market and position info.
|
||||
mkt = MktPair.from_fqme(
|
||||
fqme,
|
||||
price_tick=price_tick,
|
||||
size_tick=size_tick,
|
||||
bs_mktid=bs_mktid,
|
||||
)
|
||||
|
||||
# TODO: RE: general "events" instead of just "clears":
|
||||
# - make this an `events` field and support more event types
|
||||
# such as 'split', 'name_change', 'mkt_info', etc..
|
||||
# - should be make a ``Struct`` for clear/event entries? convert
|
||||
# "clear events table" from the toml config (list of a dicts)
|
||||
# and load it into object form for use in position processing of
|
||||
# new clear events.
|
||||
|
||||
# convert clears sub-tables (only in this form
|
||||
# for toml re-presentation) back into a master table.
|
||||
toml_clears_list: list[dict[str, Any]] = entry['clears']
|
||||
trans: list[Transaction] = []
|
||||
|
||||
for clears_table in toml_clears_list:
|
||||
tid = clears_table['tid']
|
||||
dt: tomlkit.items.DateTime | str = clears_table['dt']
|
||||
|
||||
# woa cool, `tomlkit` will actually load datetimes into
|
||||
# native form B)
|
||||
if isinstance(dt, str):
|
||||
dt = pendulum.parse(dt)
|
||||
|
||||
clears_table['dt'] = dt
|
||||
trans.append(Transaction(
|
||||
fqme=bs_mktid,
|
||||
# sym=mkt,
|
||||
bs_mktid=bs_mktid,
|
||||
tid=tid,
|
||||
# XXX: not sure why sometimes these are loaded as
|
||||
# `tomlkit.Integer` and are eventually written with
|
||||
# an extra `-` in front like `--1`?
|
||||
size=float(clears_table['size']),
|
||||
price=float(clears_table['price']),
|
||||
cost=clears_table['cost'],
|
||||
dt=dt,
|
||||
))
|
||||
|
||||
split_ratio = entry.get('split_ratio')
|
||||
|
||||
# if a string-ified expiry field is loaded we try to parse
|
||||
# it, THO, they should normally be serialized as native
|
||||
# TOML datetimes, since that's supported.
|
||||
if (
|
||||
(expiry := entry.get('expiry'))
|
||||
and isinstance(expiry, str)
|
||||
):
|
||||
expiry: pendulum.DateTime = pendulum.parse(expiry)
|
||||
|
||||
pp = pp_objs[bs_mktid] = Position(
|
||||
mkt,
|
||||
split_ratio=split_ratio,
|
||||
bs_mktid=bs_mktid,
|
||||
)
|
||||
|
||||
# XXX: super critical, we need to be sure to include
|
||||
# all pps.toml clears to avoid reusing clears that were
|
||||
# already included in the current incremental update
|
||||
# state, since today's records may have already been
|
||||
# processed!
|
||||
for t in trans:
|
||||
pp.add_clear(t)
|
||||
|
||||
try:
|
||||
yield acnt
|
||||
finally:
|
||||
if write_on_exit:
|
||||
acnt.write_config()
|
||||
|
||||
|
||||
# TODO: drop the old name and THIS!
|
||||
@cm
|
||||
def open_pps(
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Generator[Account, None, None]:
|
||||
log.warning(
|
||||
'`open_pps()` is now deprecated!\n'
|
||||
'Please use `with open_account() as cnt:`'
|
||||
)
|
||||
with open_account(*args, **kwargs) as acnt:
|
||||
yield acnt
|
||||
|
||||
|
||||
def load_account_from_ledger(
|
||||
|
||||
brokername: str,
|
||||
acctname: str,
|
||||
|
||||
# post normalization filter on ledger entries to be processed
|
||||
filter_by_ids: dict[str, list[str]] | None = None,
|
||||
|
||||
ledger: TransactionLedger | None = None,
|
||||
**kwargs,
|
||||
|
||||
) -> Account:
|
||||
'''
|
||||
Open a ledger file by broker name and account and read in and
|
||||
process any trade records into our normalized ``Transaction`` form
|
||||
and then update the equivalent ``Pptable`` and deliver the two
|
||||
bs_mktid-mapped dict-sets of the transactions and pps.
|
||||
|
||||
'''
|
||||
acnt: Account
|
||||
with open_account(
|
||||
brokername,
|
||||
acctname,
|
||||
**kwargs,
|
||||
) as acnt:
|
||||
if ledger is not None:
|
||||
acnt.update_from_ledger(ledger)
|
||||
|
||||
return acnt
|
||||
|
|
@ -1,698 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Calculation routines for balance and position tracking such that
|
||||
you know when you're losing money (if possible) XD
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from collections.abc import ValuesView
|
||||
from contextlib import contextmanager as cm
|
||||
from math import copysign
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Iterator,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import polars as pl
|
||||
from pendulum import (
|
||||
DateTime,
|
||||
from_timestamp,
|
||||
parse,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._ledger import (
|
||||
Transaction,
|
||||
TransactionLedger,
|
||||
)
|
||||
|
||||
|
||||
def ppu(
|
||||
clears: Iterator[Transaction],
|
||||
|
||||
# include transaction cost in breakeven price
|
||||
# and presume the worst case of the same cost
|
||||
# to exit this transaction (even though in reality
|
||||
# it will be dynamic based on exit stratetgy).
|
||||
cost_scalar: float = 2,
|
||||
|
||||
# return the ledger of clears as a (now dt sorted) dict with
|
||||
# new position fields inserted alongside each entry.
|
||||
as_ledger: bool = False,
|
||||
|
||||
) -> float | list[(str, dict)]:
|
||||
'''
|
||||
Compute the "price-per-unit" price for the given non-zero sized
|
||||
rolling position.
|
||||
|
||||
The recurrence relation which computes this (exponential) mean
|
||||
per new clear which **increases** the accumulative postiion size
|
||||
is:
|
||||
|
||||
ppu[-1] = (
|
||||
ppu[-2] * accum_size[-2]
|
||||
+
|
||||
ppu[-1] * size
|
||||
) / accum_size[-1]
|
||||
|
||||
where `cost_basis` for the current step is simply the price
|
||||
* size of the most recent clearing transaction.
|
||||
|
||||
-----
|
||||
TODO: get the BEP computed and working similarly!
|
||||
-----
|
||||
the equivalent "break even price" or bep at each new clear
|
||||
event step conversely only changes when an "position exiting
|
||||
clear" which **decreases** the cumulative dst asset size:
|
||||
|
||||
bep[-1] = ppu[-1] - (cum_pnl[-1] / cumsize[-1])
|
||||
|
||||
'''
|
||||
asize_h: list[float] = [] # historical accumulative size
|
||||
ppu_h: list[float] = [] # historical price-per-unit
|
||||
# ledger: dict[str, dict] = {}
|
||||
ledger: list[dict] = []
|
||||
|
||||
t: Transaction
|
||||
for t in clears:
|
||||
clear_size: float = t.size
|
||||
clear_price: str | float = t.price
|
||||
is_clear: bool = not isinstance(clear_price, str)
|
||||
|
||||
last_accum_size = asize_h[-1] if asize_h else 0
|
||||
accum_size: float = last_accum_size + clear_size
|
||||
accum_sign = copysign(1, accum_size)
|
||||
sign_change: bool = False
|
||||
|
||||
# on transfers we normally write some non-valid
|
||||
# price since withdrawal to another account/wallet
|
||||
# has nothing to do with inter-asset-market prices.
|
||||
# TODO: this should be better handled via a `type: 'tx'`
|
||||
# field as per existing issue surrounding all this:
|
||||
# https://github.com/pikers/piker/issues/510
|
||||
if isinstance(clear_price, str):
|
||||
# TODO: we can't necessarily have this commit to
|
||||
# the overall pos size since we also need to
|
||||
# include other positions contributions to this
|
||||
# balance or we might end up with a -ve balance for
|
||||
# the position..
|
||||
continue
|
||||
|
||||
# test if the pp somehow went "passed" a net zero size state
|
||||
# resulting in a change of the "sign" of the size (+ve for
|
||||
# long, -ve for short).
|
||||
sign_change = (
|
||||
copysign(1, last_accum_size) + accum_sign == 0
|
||||
and last_accum_size != 0
|
||||
)
|
||||
|
||||
# since we passed the net-zero-size state the new size
|
||||
# after sum should be the remaining size the new
|
||||
# "direction" (aka, long vs. short) for this clear.
|
||||
if sign_change:
|
||||
clear_size: float = accum_size
|
||||
abs_diff: float = abs(accum_size)
|
||||
asize_h.append(0)
|
||||
ppu_h.append(0)
|
||||
|
||||
else:
|
||||
# old size minus the new size gives us size diff with
|
||||
# +ve -> increase in pp size
|
||||
# -ve -> decrease in pp size
|
||||
abs_diff = abs(accum_size) - abs(last_accum_size)
|
||||
|
||||
# XXX: LIFO breakeven price update. only an increaze in size
|
||||
# of the position contributes the breakeven price,
|
||||
# a decrease does not (i.e. the position is being made
|
||||
# smaller).
|
||||
# abs_clear_size = abs(clear_size)
|
||||
abs_new_size: float | int = abs(accum_size)
|
||||
|
||||
if (
|
||||
abs_diff > 0
|
||||
and is_clear
|
||||
):
|
||||
cost_basis = (
|
||||
# cost basis for this clear
|
||||
clear_price * abs(clear_size)
|
||||
+
|
||||
# transaction cost
|
||||
accum_sign * cost_scalar * t.cost
|
||||
)
|
||||
|
||||
if asize_h:
|
||||
size_last: float = abs(asize_h[-1])
|
||||
cb_last: float = ppu_h[-1] * size_last
|
||||
ppu: float = (cost_basis + cb_last) / abs_new_size
|
||||
|
||||
else:
|
||||
ppu: float = cost_basis / abs_new_size
|
||||
|
||||
else:
|
||||
# TODO: for PPU we should probably handle txs out
|
||||
# (aka withdrawals) similarly by simply not having
|
||||
# them contrib to the running PPU calc and only
|
||||
# when the next entry clear comes in (which will
|
||||
# then have a higher weighting on the PPU).
|
||||
|
||||
# on "exit" clears from a given direction,
|
||||
# only the size changes not the price-per-unit
|
||||
# need to be updated since the ppu remains constant
|
||||
# and gets weighted by the new size.
|
||||
ppu: float = ppu_h[-1] if ppu_h else 0 # set to previous value
|
||||
|
||||
# extend with new rolling metric for this step
|
||||
ppu_h.append(ppu)
|
||||
asize_h.append(accum_size)
|
||||
|
||||
# ledger[t.tid] = {
|
||||
# 'txn': t,
|
||||
# ledger[t.tid] = t.to_dict() | {
|
||||
ledger.append((
|
||||
t.tid,
|
||||
t.to_dict() | {
|
||||
'ppu': ppu,
|
||||
'cumsize': accum_size,
|
||||
'sign_change': sign_change,
|
||||
|
||||
# TODO: cum_pnl, bep
|
||||
}
|
||||
))
|
||||
|
||||
final_ppu = ppu_h[-1] if ppu_h else 0
|
||||
# TODO: once we have etypes in all ledger entries..
|
||||
# handle any split info entered (for now) manually by user
|
||||
# if self.split_ratio is not None:
|
||||
# final_ppu /= self.split_ratio
|
||||
|
||||
if as_ledger:
|
||||
return ledger
|
||||
|
||||
else:
|
||||
return final_ppu
|
||||
|
||||
|
||||
def iter_by_dt(
|
||||
records: (
|
||||
dict[str, dict[str, Any]]
|
||||
| ValuesView[dict] # eg. `Position._events.values()`
|
||||
| list[dict]
|
||||
| list[Transaction] # XXX preferred!
|
||||
),
|
||||
|
||||
# NOTE: parsers are looked up in the insert order
|
||||
# so if you know that the record stats show some field
|
||||
# is more common then others, stick it at the top B)
|
||||
parsers: dict[str, Callable | None] = {
|
||||
'dt': parse, # parity case
|
||||
'datetime': parse, # datetime-str
|
||||
'time': from_timestamp, # float epoch
|
||||
},
|
||||
key: Callable | None = None,
|
||||
|
||||
) -> Iterator[tuple[str, dict]]:
|
||||
'''
|
||||
Iterate entries of a transaction table sorted by entry recorded
|
||||
datetime presumably set at the ``'dt'`` field in each entry.
|
||||
|
||||
'''
|
||||
if isinstance(records, dict):
|
||||
records: list[tuple[str, dict]] = list(records.items())
|
||||
|
||||
def dyn_parse_to_dt(
|
||||
tx: tuple[str, dict[str, Any]] | Transaction,
|
||||
) -> DateTime:
|
||||
|
||||
# handle `.items()` inputs
|
||||
if isinstance(tx, tuple):
|
||||
tx = tx[1]
|
||||
|
||||
# dict or tx object?
|
||||
isdict: bool = isinstance(tx, dict)
|
||||
|
||||
# get best parser for this record..
|
||||
for k in parsers:
|
||||
if (
|
||||
isdict and k in tx
|
||||
or getattr(tx, k, None)
|
||||
):
|
||||
v = tx[k] if isdict else tx.dt
|
||||
assert v is not None, f'No valid value for `{k}`!?'
|
||||
|
||||
# only call parser on the value if not None from
|
||||
# the `parsers` table above (when NOT using
|
||||
# `.get()`), otherwise pass through the value and
|
||||
# sort on it directly
|
||||
if (
|
||||
not isinstance(v, DateTime)
|
||||
and (parser := parsers.get(k))
|
||||
):
|
||||
return parser(v)
|
||||
else:
|
||||
return v
|
||||
|
||||
else:
|
||||
# XXX: should never get here..
|
||||
breakpoint()
|
||||
|
||||
entry: tuple[str, dict] | Transaction
|
||||
for entry in sorted(
|
||||
records,
|
||||
key=key or dyn_parse_to_dt,
|
||||
):
|
||||
# NOTE the type sig above; either pairs or txns B)
|
||||
yield entry
|
||||
|
||||
|
||||
# TODO: probably just move this into the test suite or
|
||||
# keep it here for use from as such?
|
||||
# def ensure_state(self) -> None:
|
||||
# '''
|
||||
# Audit either the `.cumsize` and `.ppu` local instance vars against
|
||||
# the clears table calculations and return the calc-ed values if
|
||||
# they differ and log warnings to console.
|
||||
|
||||
# '''
|
||||
# # clears: list[dict] = self._clears
|
||||
|
||||
# # self.first_clear_dt = min(clears, key=lambda e: e['dt'])['dt']
|
||||
# last_clear: dict = clears[-1]
|
||||
# csize: float = self.calc_size()
|
||||
# accum: float = last_clear['accum_size']
|
||||
|
||||
# if not self.expired():
|
||||
# if (
|
||||
# csize != accum
|
||||
# and csize != round(accum * (self.split_ratio or 1))
|
||||
# ):
|
||||
# raise ValueError(f'Size mismatch: {csize}')
|
||||
# else:
|
||||
# assert csize == 0, 'Contract is expired but non-zero size?'
|
||||
|
||||
# if self.cumsize != csize:
|
||||
# log.warning(
|
||||
# 'Position state mismatch:\n'
|
||||
# f'{self.cumsize} => {csize}'
|
||||
# )
|
||||
# self.cumsize = csize
|
||||
|
||||
# cppu: float = self.calc_ppu()
|
||||
# ppu: float = last_clear['ppu']
|
||||
# if (
|
||||
# cppu != ppu
|
||||
# and self.split_ratio is not None
|
||||
|
||||
# # handle any split info entered (for now) manually by user
|
||||
# and cppu != (ppu / self.split_ratio)
|
||||
# ):
|
||||
# raise ValueError(f'PPU mismatch: {cppu}')
|
||||
|
||||
# if self.ppu != cppu:
|
||||
# log.warning(
|
||||
# 'Position state mismatch:\n'
|
||||
# f'{self.ppu} => {cppu}'
|
||||
# )
|
||||
# self.ppu = cppu
|
||||
|
||||
|
||||
@cm
|
||||
def open_ledger_dfs(
|
||||
|
||||
brokername: str,
|
||||
acctname: str,
|
||||
|
||||
ledger: TransactionLedger | None = None,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> tuple[
|
||||
dict[str, pl.DataFrame],
|
||||
TransactionLedger,
|
||||
]:
|
||||
'''
|
||||
Open a ledger of trade records (presumably from some broker
|
||||
backend), normalize the records into `Transactions` via the
|
||||
backend's declared endpoint, cast to a `polars.DataFrame` which
|
||||
can update the ledger on exit.
|
||||
|
||||
'''
|
||||
from piker.toolz import open_crash_handler
|
||||
with open_crash_handler():
|
||||
if not ledger:
|
||||
import time
|
||||
from ._ledger import open_trade_ledger
|
||||
|
||||
now = time.time()
|
||||
|
||||
with open_trade_ledger(
|
||||
brokername,
|
||||
acctname,
|
||||
rewrite=True,
|
||||
allow_from_sync_code=True,
|
||||
|
||||
# proxied through from caller
|
||||
**kwargs,
|
||||
|
||||
) as ledger:
|
||||
if not ledger:
|
||||
raise ValueError(f'No ledger for {acctname}@{brokername} exists?')
|
||||
|
||||
print(f'LEDGER LOAD TIME: {time.time() - now}')
|
||||
|
||||
yield ledger_to_dfs(ledger), ledger
|
||||
|
||||
|
||||
def ledger_to_dfs(
|
||||
ledger: TransactionLedger,
|
||||
|
||||
) -> dict[str, pl.DataFrame]:
|
||||
|
||||
txns: dict[str, Transaction] = ledger.to_txns()
|
||||
|
||||
# ldf = pl.DataFrame(
|
||||
# list(txn.to_dict() for txn in txns.values()),
|
||||
ldf = pl.from_dicts(
|
||||
list(txn.to_dict() for txn in txns.values()),
|
||||
|
||||
# only for ordering the cols
|
||||
schema=[
|
||||
('fqme', str),
|
||||
('tid', str),
|
||||
('bs_mktid', str),
|
||||
('expiry', str),
|
||||
('etype', str),
|
||||
('dt', str),
|
||||
('size', pl.Float64),
|
||||
('price', pl.Float64),
|
||||
('cost', pl.Float64),
|
||||
],
|
||||
).sort( # chronological order
|
||||
'dt'
|
||||
).with_columns([
|
||||
pl.col('dt').str.to_datetime(),
|
||||
# pl.col('expiry').str.to_datetime(),
|
||||
# pl.col('expiry').dt.date(),
|
||||
])
|
||||
|
||||
# filter out to the columns matching values filter passed
|
||||
# as input.
|
||||
# if filter_by_ids:
|
||||
# for col, vals in filter_by_ids.items():
|
||||
# str_vals = set(map(str, vals))
|
||||
# pred: pl.Expr = pl.col(col).eq(str_vals.pop())
|
||||
# for val in str_vals:
|
||||
# pred |= pl.col(col).eq(val)
|
||||
|
||||
# fdf = df.filter(pred)
|
||||
|
||||
# TODO: originally i had tried just using a plain ol' groupby
|
||||
# + agg here but the issue was re-inserting to the src frame.
|
||||
# however, learning more about `polars` seems like maybe we can
|
||||
# use `.over()`?
|
||||
# https://pola-rs.github.io/polars/py-polars/html/reference/expressions/api/polars.Expr.over.html#polars.Expr.over
|
||||
# => CURRENTLY we break up into a frame per mkt / fqme
|
||||
dfs: dict[str, pl.DataFrame] = ldf.partition_by(
|
||||
'bs_mktid',
|
||||
as_dict=True,
|
||||
)
|
||||
|
||||
# TODO: not sure if this is even possible but..
|
||||
# - it'd be more ideal to use `ppt = df.groupby('fqme').agg([`
|
||||
# - ppu and bep calcs!
|
||||
for key in dfs:
|
||||
|
||||
# covert to lazy form (since apparently we might need it
|
||||
# eventually ...)
|
||||
df: pl.DataFrame = dfs[key]
|
||||
|
||||
ldf: pl.LazyFrame = df.lazy()
|
||||
|
||||
df = dfs[key] = ldf.with_columns([
|
||||
|
||||
pl.cumsum('size').alias('cumsize'),
|
||||
|
||||
# amount of source asset "sent" (via buy txns in
|
||||
# the market) to acquire the dst asset, PER txn.
|
||||
# when this value is -ve (i.e. a sell operation) then
|
||||
# the amount sent is actually "returned".
|
||||
(
|
||||
(pl.col('price') * pl.col('size'))
|
||||
+
|
||||
(pl.col('cost')) # * pl.col('size').sign())
|
||||
).alias('dst_bot'),
|
||||
|
||||
]).with_columns([
|
||||
|
||||
# rolling balance in src asset units
|
||||
(pl.col('dst_bot').cumsum() * -1).alias('src_balance'),
|
||||
|
||||
# "position operation type" in terms of increasing the
|
||||
# amount in the dst asset (entering) or decreasing the
|
||||
# amount in the dst asset (exiting).
|
||||
pl.when(
|
||||
pl.col('size').sign() == pl.col('cumsize').sign()
|
||||
|
||||
).then(
|
||||
pl.lit('enter') # see above, but is just price * size per txn
|
||||
|
||||
).otherwise(
|
||||
pl.when(pl.col('cumsize') == 0)
|
||||
.then(pl.lit('exit_to_zero'))
|
||||
.otherwise(pl.lit('exit'))
|
||||
).alias('descr'),
|
||||
|
||||
(pl.col('cumsize').sign() == pl.col('size').sign())
|
||||
.alias('is_enter'),
|
||||
|
||||
]).with_columns([
|
||||
|
||||
# pl.lit(0, dtype=pl.Utf8).alias('virt_cost'),
|
||||
pl.lit(0, dtype=pl.Float64).alias('applied_cost'),
|
||||
pl.lit(0, dtype=pl.Float64).alias('pos_ppu'),
|
||||
pl.lit(0, dtype=pl.Float64).alias('per_txn_pnl'),
|
||||
pl.lit(0, dtype=pl.Float64).alias('cum_pos_pnl'),
|
||||
pl.lit(0, dtype=pl.Float64).alias('pos_bep'),
|
||||
pl.lit(0, dtype=pl.Float64).alias('cum_ledger_pnl'),
|
||||
pl.lit(None, dtype=pl.Float64).alias('ledger_bep'),
|
||||
|
||||
# TODO: instead of the iterative loop below i guess we
|
||||
# could try using embedded lists to track which txns
|
||||
# are part of which ppu / bep calcs? Not sure this will
|
||||
# look any better nor be any more performant though xD
|
||||
# pl.lit([[0]], dtype=pl.List(pl.Float64)).alias('list'),
|
||||
|
||||
# choose fields to emit for accounting puposes
|
||||
]).select([
|
||||
pl.exclude([
|
||||
'tid',
|
||||
# 'dt',
|
||||
'expiry',
|
||||
'bs_mktid',
|
||||
'etype',
|
||||
# 'is_enter',
|
||||
]),
|
||||
]).collect()
|
||||
|
||||
# compute recurrence relations for ppu and bep
|
||||
last_ppu: float = 0
|
||||
last_cumsize: float = 0
|
||||
last_ledger_pnl: float = 0
|
||||
last_pos_pnl: float = 0
|
||||
virt_costs: list[float, float] = [0., 0.]
|
||||
|
||||
# imperatively compute the PPU (price per unit) and BEP
|
||||
# (break even price) iteratively over the ledger, oriented
|
||||
# around each position state: a state of split balances in
|
||||
# > 1 asset.
|
||||
for i, row in enumerate(df.iter_rows(named=True)):
|
||||
|
||||
cumsize: float = row['cumsize']
|
||||
is_enter: bool = row['is_enter']
|
||||
price: float = row['price']
|
||||
size: float = row['size']
|
||||
|
||||
# the profit is ALWAYS decreased, aka made a "loss"
|
||||
# by the constant fee charged by the txn provider!
|
||||
# see below in final PnL calculation and row element
|
||||
# set.
|
||||
txn_cost: float = row['cost']
|
||||
pnl: float = 0
|
||||
|
||||
# ALWAYS reset per-position cum PnL
|
||||
if last_cumsize == 0:
|
||||
last_pos_pnl: float = 0
|
||||
|
||||
# a "position size INCREASING" or ENTER transaction
|
||||
# which "makes larger", in src asset unit terms, the
|
||||
# trade's side-size of the destination asset:
|
||||
# - "buying" (more) units of the dst asset
|
||||
# - "selling" (more short) units of the dst asset
|
||||
if is_enter:
|
||||
|
||||
# Naively include transaction cost in breakeven
|
||||
# price and presume the worst case of the
|
||||
# exact-same-cost-to-exit this transaction's worth
|
||||
# of size even though in reality it will be dynamic
|
||||
# based on exit strategy, price, liquidity, etc..
|
||||
virt_cost: float = txn_cost
|
||||
|
||||
# cpu: float = cost / size
|
||||
# cummean of the cost-per-unit used for modelling
|
||||
# a projected future exit cost which we immediately
|
||||
# include in the costs incorporated to BEP on enters
|
||||
last_cum_costs_size, last_cpu = virt_costs
|
||||
cum_costs_size: float = last_cum_costs_size + abs(size)
|
||||
cumcpu = (
|
||||
(last_cpu * last_cum_costs_size)
|
||||
+
|
||||
txn_cost
|
||||
) / cum_costs_size
|
||||
virt_costs = [cum_costs_size, cumcpu]
|
||||
|
||||
txn_cost = txn_cost + virt_cost
|
||||
# df[i, 'virt_cost'] = f'{-virt_cost} FROM {cumcpu}@{cum_costs_size}'
|
||||
|
||||
# a cumulative mean of the price-per-unit acquired
|
||||
# in the destination asset:
|
||||
# https://en.wikipedia.org/wiki/Moving_average#Cumulative_average
|
||||
# You could also think of this measure more
|
||||
# generally as an exponential mean with `alpha
|
||||
# = 1/N` where `N` is the current number of txns
|
||||
# included in the "position" defining set:
|
||||
# https://en.wikipedia.org/wiki/Exponential_smoothing
|
||||
ppu: float = (
|
||||
(
|
||||
(last_ppu * last_cumsize)
|
||||
+
|
||||
(price * size)
|
||||
) /
|
||||
cumsize
|
||||
)
|
||||
|
||||
# a "position size DECREASING" or EXIT transaction
|
||||
# which "makes smaller" the trade's side-size of the
|
||||
# destination asset:
|
||||
# - selling previously bought units of the dst asset
|
||||
# (aka 'closing' a long position).
|
||||
# - buying previously borrowed and sold (short) units
|
||||
# of the dst asset (aka 'covering'/'closing' a short
|
||||
# position).
|
||||
else:
|
||||
# only changes on position size increasing txns
|
||||
ppu: float = last_ppu
|
||||
|
||||
# UNWIND IMPLIED COSTS FROM ENTRIES
|
||||
# => Reverse the virtual/modelled (2x predicted) txn
|
||||
# cost that was included in the least-recently
|
||||
# entered txn that is still part of the current CSi
|
||||
# set.
|
||||
# => we look up the cost-per-unit cumsum and apply
|
||||
# if over the current txn size (by multiplication)
|
||||
# and then reverse that previusly applied cost on
|
||||
# the txn_cost for this record.
|
||||
#
|
||||
# NOTE: current "model" is just to previously assumed 2x
|
||||
# the txn cost for a matching enter-txn's
|
||||
# cost-per-unit; we then immediately reverse this
|
||||
# prediction and apply the real cost received here.
|
||||
last_cum_costs_size, last_cpu = virt_costs
|
||||
prev_virt_cost: float = last_cpu * abs(size)
|
||||
txn_cost: float = txn_cost - prev_virt_cost # +ve thus a "reversal"
|
||||
cum_costs_size: float = last_cum_costs_size - abs(size)
|
||||
virt_costs = [cum_costs_size, last_cpu]
|
||||
|
||||
# df[i, 'virt_cost'] = (
|
||||
# f'{-prev_virt_cost} FROM {last_cpu}@{cum_costs_size}'
|
||||
# )
|
||||
|
||||
# the per-txn profit or loss (PnL) given we are
|
||||
# (partially) "closing"/"exiting" the position via
|
||||
# this txn.
|
||||
pnl: float = (last_ppu - price) * size
|
||||
|
||||
# always subtract txn cost from total txn pnl
|
||||
txn_pnl: float = pnl - txn_cost
|
||||
|
||||
# cumulative PnLs per txn
|
||||
last_ledger_pnl = (
|
||||
last_ledger_pnl + txn_pnl
|
||||
)
|
||||
last_pos_pnl = df[i, 'cum_pos_pnl'] = (
|
||||
last_pos_pnl + txn_pnl
|
||||
)
|
||||
|
||||
if cumsize == 0:
|
||||
last_ppu = ppu = 0
|
||||
|
||||
# compute the BEP: "break even price", a value that
|
||||
# determines at what price the remaining cumsize can be
|
||||
# liquidated such that the net-PnL on the current
|
||||
# position will result in ZERO gain or loss from open
|
||||
# to close including all txn costs B)
|
||||
if (
|
||||
abs(cumsize) > 0 # non-exit-to-zero position txn
|
||||
):
|
||||
cumsize_sign: float = copysign(1, cumsize)
|
||||
ledger_bep: float = (
|
||||
(
|
||||
(ppu * cumsize)
|
||||
-
|
||||
(last_ledger_pnl * cumsize_sign)
|
||||
) / cumsize
|
||||
)
|
||||
|
||||
# NOTE: when we "enter more" dst asset units (aka
|
||||
# increase position state) AFTER having exited some
|
||||
# units (aka decreasing the pos size some) the bep
|
||||
# needs to be RECOMPUTED based on new ppu such that
|
||||
# liquidation of the cumsize at the bep price
|
||||
# results in a zero-pnl for the existing position
|
||||
# (since the last one).
|
||||
# for position lifetime BEP we never can have
|
||||
# a valid value once the position is "closed"
|
||||
# / full exitted Bo
|
||||
pos_bep: float = (
|
||||
(
|
||||
(ppu * cumsize)
|
||||
-
|
||||
(last_pos_pnl * cumsize_sign)
|
||||
) / cumsize
|
||||
)
|
||||
|
||||
# inject DF row with all values
|
||||
df[i, 'pos_ppu'] = ppu
|
||||
df[i, 'per_txn_pnl'] = txn_pnl
|
||||
df[i, 'applied_cost'] = -txn_cost
|
||||
df[i, 'cum_pos_pnl'] = last_pos_pnl
|
||||
df[i, 'pos_bep'] = pos_bep
|
||||
df[i, 'cum_ledger_pnl'] = last_ledger_pnl
|
||||
df[i, 'ledger_bep'] = ledger_bep
|
||||
|
||||
# keep backrefs to suffice reccurence relation
|
||||
last_ppu: float = ppu
|
||||
last_cumsize: float = cumsize
|
||||
|
||||
# TODO?: pass back the current `Position` object loaded from
|
||||
# the account as well? Would provide incentive to do all
|
||||
# this ledger loading inside a new async open_account().
|
||||
# bs_mktid: str = df[0]['bs_mktid']
|
||||
# pos: Position = acnt.pps[bs_mktid]
|
||||
|
||||
return dfs
|
||||
|
|
@ -1,311 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
CLI front end for trades ledger and position tracking management.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
import polars as pl
|
||||
import tractor
|
||||
import trio
|
||||
import typer
|
||||
|
||||
from ..log import get_logger
|
||||
from ..service import (
|
||||
open_piker_runtime,
|
||||
)
|
||||
from ..clearing._messages import BrokerdPosition
|
||||
from ..calc import humanize
|
||||
from ..brokers._daemon import broker_init
|
||||
from ._ledger import (
|
||||
load_ledger,
|
||||
TransactionLedger,
|
||||
# open_trade_ledger,
|
||||
)
|
||||
from .calc import (
|
||||
open_ledger_dfs,
|
||||
)
|
||||
|
||||
|
||||
ledger = typer.Typer()
|
||||
|
||||
|
||||
def unpack_fqan(
|
||||
fully_qualified_account_name: str,
|
||||
console: Console | None = None,
|
||||
) -> tuple | bool:
|
||||
try:
|
||||
brokername, account = fully_qualified_account_name.split('.')
|
||||
return brokername, account
|
||||
except ValueError:
|
||||
if console is not None:
|
||||
md = Markdown(
|
||||
f'=> `{fully_qualified_account_name}` <=\n\n'
|
||||
'is not a valid '
|
||||
'__fully qualified account name?__\n\n'
|
||||
'Your account name needs to be of the form '
|
||||
'`<brokername>.<account_name>`\n'
|
||||
)
|
||||
console.print(md)
|
||||
return False
|
||||
|
||||
|
||||
@ledger.command()
|
||||
def sync(
|
||||
fully_qualified_account_name: str,
|
||||
pdb: bool = False,
|
||||
|
||||
loglevel: str = typer.Option(
|
||||
'error',
|
||||
"-l",
|
||||
),
|
||||
):
|
||||
log = get_logger(loglevel)
|
||||
console = Console()
|
||||
|
||||
pair: tuple[str, str]
|
||||
if not (pair := unpack_fqan(
|
||||
fully_qualified_account_name,
|
||||
console,
|
||||
)):
|
||||
return
|
||||
|
||||
brokername, account = pair
|
||||
|
||||
brokermod, start_kwargs, deamon_ep = broker_init(
|
||||
brokername,
|
||||
loglevel=loglevel,
|
||||
)
|
||||
brokername: str = brokermod.name
|
||||
|
||||
async def main():
|
||||
|
||||
async with (
|
||||
open_piker_runtime(
|
||||
name='ledger_cli',
|
||||
loglevel=loglevel,
|
||||
debug_mode=pdb,
|
||||
|
||||
) as (actor, sockaddr),
|
||||
|
||||
tractor.open_nursery() as an,
|
||||
):
|
||||
try:
|
||||
log.info(
|
||||
f'Piker runtime up as {actor.uid}@{sockaddr}'
|
||||
)
|
||||
|
||||
portal = await an.start_actor(
|
||||
loglevel=loglevel,
|
||||
debug_mode=pdb,
|
||||
**start_kwargs,
|
||||
)
|
||||
|
||||
from ..clearing import (
|
||||
open_brokerd_dialog,
|
||||
)
|
||||
brokerd_stream: tractor.MsgStream
|
||||
|
||||
async with (
|
||||
# engage the brokerd daemon context
|
||||
portal.open_context(
|
||||
deamon_ep,
|
||||
brokername=brokername,
|
||||
loglevel=loglevel,
|
||||
),
|
||||
|
||||
# manually open the brokerd trade dialog EP
|
||||
# (what the EMS normally does internall) B)
|
||||
open_brokerd_dialog(
|
||||
brokermod,
|
||||
portal,
|
||||
exec_mode=(
|
||||
'paper'
|
||||
if account == 'paper'
|
||||
else 'live'
|
||||
),
|
||||
loglevel=loglevel,
|
||||
) as (
|
||||
brokerd_stream,
|
||||
pp_msg_table,
|
||||
accounts,
|
||||
),
|
||||
):
|
||||
try:
|
||||
assert len(accounts) == 1
|
||||
if not pp_msg_table:
|
||||
ld, fpath = load_ledger(brokername, account)
|
||||
assert not ld, f'WTF did we fail to parse ledger:\n{ld}'
|
||||
|
||||
console.print(
|
||||
'[yellow]'
|
||||
'No pps found for '
|
||||
f'`{brokername}.{account}` '
|
||||
'account!\n\n'
|
||||
'[/][underline]'
|
||||
'None of the following ledger files exist:\n\n[/]'
|
||||
f'{fpath.as_uri()}\n'
|
||||
)
|
||||
return
|
||||
|
||||
pps_by_symbol: dict[str, BrokerdPosition] = pp_msg_table[
|
||||
brokername,
|
||||
account,
|
||||
]
|
||||
|
||||
summary: str = (
|
||||
'[dim underline]Piker Position Summary[/] '
|
||||
f'[dim blue underline]{brokername}[/]'
|
||||
'[dim].[/]'
|
||||
f'[blue underline]{account}[/]'
|
||||
f'[dim underline] -> total pps: [/]'
|
||||
f'[green]{len(pps_by_symbol)}[/]\n'
|
||||
)
|
||||
# for ppdict in positions:
|
||||
for fqme, ppmsg in pps_by_symbol.items():
|
||||
# ppmsg = BrokerdPosition(**ppdict)
|
||||
size = ppmsg.size
|
||||
if size:
|
||||
ppu: float = round(
|
||||
ppmsg.avg_price,
|
||||
ndigits=2,
|
||||
)
|
||||
cost_basis: str = humanize(size * ppu)
|
||||
h_size: str = humanize(size)
|
||||
|
||||
if size < 0:
|
||||
pcolor = 'red'
|
||||
else:
|
||||
pcolor = 'green'
|
||||
|
||||
# sematic-highlight of fqme
|
||||
fqme = ppmsg.symbol
|
||||
tokens = fqme.split('.')
|
||||
styled_fqme = f'[blue underline]{tokens[0]}[/]'
|
||||
for tok in tokens[1:]:
|
||||
styled_fqme += '[dim].[/]'
|
||||
styled_fqme += f'[dim blue underline]{tok}[/]'
|
||||
|
||||
# TODO: instead display in a ``rich.Table``?
|
||||
summary += (
|
||||
styled_fqme +
|
||||
'[dim]: [/]'
|
||||
f'[{pcolor}]{h_size}[/]'
|
||||
'[dim blue]u @[/]'
|
||||
f'[{pcolor}]{ppu}[/]'
|
||||
'[dim blue] = [/]'
|
||||
f'[{pcolor}]$ {cost_basis}\n[/]'
|
||||
)
|
||||
|
||||
console.print(summary)
|
||||
|
||||
finally:
|
||||
# exit via ctx cancellation.
|
||||
brokerd_ctx: tractor.Context = brokerd_stream._ctx
|
||||
await brokerd_ctx.cancel(timeout=1)
|
||||
|
||||
# TODO: once ported to newer tractor branch we should
|
||||
# be able to do a loop like this:
|
||||
# while brokerd_ctx.cancel_called_remote is None:
|
||||
# await trio.sleep(0.01)
|
||||
# await brokerd_ctx.cancel()
|
||||
|
||||
finally:
|
||||
await portal.cancel_actor()
|
||||
|
||||
trio.run(main)
|
||||
|
||||
|
||||
@ledger.command()
|
||||
def disect(
|
||||
# "fully_qualified_account_name"
|
||||
fqan: str,
|
||||
fqme: str, # for ib
|
||||
|
||||
# TODO: in tractor we should really have
|
||||
# a debug_mode ctx for wrapping any kind of code no?
|
||||
pdb: bool = False,
|
||||
bs_mktid: str = typer.Option(
|
||||
None,
|
||||
"-bid",
|
||||
),
|
||||
loglevel: str = typer.Option(
|
||||
'error',
|
||||
"-l",
|
||||
),
|
||||
):
|
||||
from piker.log import get_console_log
|
||||
from piker.toolz import open_crash_handler
|
||||
get_console_log(loglevel)
|
||||
|
||||
pair: tuple[str, str]
|
||||
if not (pair := unpack_fqan(fqan)):
|
||||
raise ValueError('{fqan} malformed!?')
|
||||
|
||||
brokername, account = pair
|
||||
|
||||
# ledger dfs groupby-partitioned by fqme
|
||||
dfs: dict[str, pl.DataFrame]
|
||||
# actual ledger instance
|
||||
ldgr: TransactionLedger
|
||||
|
||||
pl.Config.set_tbl_cols(-1)
|
||||
pl.Config.set_tbl_rows(-1)
|
||||
with (
|
||||
open_crash_handler(),
|
||||
open_ledger_dfs(
|
||||
brokername,
|
||||
account,
|
||||
) as (dfs, ldgr),
|
||||
):
|
||||
|
||||
# look up specific frame for fqme-selected asset
|
||||
if (df := dfs.get(fqme)) is None:
|
||||
mktids2fqmes: dict[str, list[str]] = {}
|
||||
for bs_mktid in dfs:
|
||||
df: pl.DataFrame = dfs[bs_mktid]
|
||||
fqmes: pl.Series[str] = df['fqme']
|
||||
uniques: list[str] = fqmes.unique()
|
||||
mktids2fqmes[bs_mktid] = set(uniques)
|
||||
if fqme in uniques:
|
||||
break
|
||||
print(
|
||||
f'No specific ledger for fqme={fqme} could be found in\n'
|
||||
f'{pformat(mktids2fqmes)}?\n'
|
||||
f'Maybe the `{brokername}` backend uses something '
|
||||
'else for its `bs_mktid` then the `fqme`?\n'
|
||||
'Scanning for matches in unique fqmes per frame..\n'
|
||||
)
|
||||
|
||||
# :pray:
|
||||
assert not df.is_empty()
|
||||
|
||||
# muck around in pdbp REPL
|
||||
breakpoint()
|
||||
|
||||
# TODO: we REALLY need a better console REPL for this
|
||||
# kinda thing..
|
||||
# - `xonsh` is an obvious option (and it looks amazin) but
|
||||
# we need to figure out how to embed it better then just:
|
||||
# from xonsh.main import main
|
||||
# main(argv=[])
|
||||
# which will not actually inject the `df` to globals?
|
||||
|
|
@ -17,95 +17,33 @@
|
|||
"""
|
||||
Broker clients, daemons and general back end machinery.
|
||||
"""
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
)
|
||||
from importlib import import_module
|
||||
from types import ModuleType
|
||||
|
||||
from tractor.trionics import maybe_open_context
|
||||
# TODO: move to urllib3/requests once supported
|
||||
import asks
|
||||
asks.init('trio')
|
||||
|
||||
from ._util import (
|
||||
log,
|
||||
BrokerError,
|
||||
SymbolNotFound,
|
||||
NoData,
|
||||
DataUnavailable,
|
||||
DataThrottle,
|
||||
resproc,
|
||||
get_logger,
|
||||
)
|
||||
|
||||
__all__: list[str] = [
|
||||
'BrokerError',
|
||||
'SymbolNotFound',
|
||||
'NoData',
|
||||
'DataUnavailable',
|
||||
'DataThrottle',
|
||||
'resproc',
|
||||
'get_logger',
|
||||
]
|
||||
|
||||
__brokers__: list[str] = [
|
||||
__brokers__ = [
|
||||
'binance',
|
||||
'questrade',
|
||||
'robinhood',
|
||||
'ib',
|
||||
'kraken',
|
||||
'kucoin',
|
||||
|
||||
# broken but used to work
|
||||
# 'questrade',
|
||||
# 'robinhood',
|
||||
|
||||
# TODO: we should get on these stat!
|
||||
# alpaca
|
||||
# wstrade
|
||||
# iex
|
||||
|
||||
# deribit
|
||||
# bitso
|
||||
]
|
||||
|
||||
|
||||
def get_brokermod(brokername: str) -> ModuleType:
|
||||
'''
|
||||
Return the imported broker module by name.
|
||||
|
||||
'''
|
||||
module: ModuleType = import_module('.' + brokername, 'piker.brokers')
|
||||
"""Return the imported broker module by name.
|
||||
"""
|
||||
module = import_module('.' + brokername, 'piker.brokers')
|
||||
# we only allow monkeying because it's for internal keying
|
||||
module.name = module.__name__.split('.')[-1]
|
||||
module.name = module.__name__.split('.')[-1]
|
||||
return module
|
||||
|
||||
|
||||
def iter_brokermods():
|
||||
'''
|
||||
Iterate all built-in broker modules.
|
||||
|
||||
'''
|
||||
"""Iterate all built-in broker modules.
|
||||
"""
|
||||
for name in __brokers__:
|
||||
yield get_brokermod(name)
|
||||
|
||||
|
||||
@acm
|
||||
async def open_cached_client(
|
||||
brokername: str,
|
||||
**kwargs,
|
||||
|
||||
) -> 'Client': # noqa
|
||||
'''
|
||||
Get a cached broker client from the current actor's local vars.
|
||||
|
||||
If one has not been setup do it and cache it.
|
||||
|
||||
'''
|
||||
brokermod = get_brokermod(brokername)
|
||||
async with maybe_open_context(
|
||||
acm_func=brokermod.get_client,
|
||||
kwargs=kwargs,
|
||||
|
||||
) as (cache_hit, client):
|
||||
|
||||
if cache_hit:
|
||||
log.runtime(f'Reusing existing {client}')
|
||||
|
||||
yield client
|
||||
|
|
|
|||
|
|
@ -1,276 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Broker-daemon-actor "endpoint-hooks": the service task entry points for
|
||||
``brokerd``.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
)
|
||||
from types import ModuleType
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
AsyncContextManager,
|
||||
)
|
||||
import exceptiongroup as eg
|
||||
|
||||
import tractor
|
||||
import trio
|
||||
|
||||
from . import _util
|
||||
from . import get_brokermod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..data import _FeedsBus
|
||||
|
||||
# `brokerd` enabled modules
|
||||
# TODO: move this def to the `.data` subpkg..
|
||||
# NOTE: keeping this list as small as possible is part of our caps-sec
|
||||
# model and should be treated with utmost care!
|
||||
_data_mods: str = [
|
||||
'piker.brokers.core',
|
||||
'piker.brokers.data',
|
||||
'piker.brokers._daemon',
|
||||
'piker.data',
|
||||
'piker.data.feed',
|
||||
'piker.data._sampling'
|
||||
]
|
||||
|
||||
|
||||
# TODO: we should rename the daemon to datad prolly once we split up
|
||||
# broker vs. data tasks into separate actors?
|
||||
@tractor.context
|
||||
async def _setup_persistent_brokerd(
|
||||
ctx: tractor.Context,
|
||||
brokername: str,
|
||||
loglevel: str | None = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Allocate a actor-wide service nursery in ``brokerd``
|
||||
such that feeds can be run in the background persistently by
|
||||
the broker backend as needed.
|
||||
|
||||
'''
|
||||
# NOTE: we only need to setup logging once (and only) here
|
||||
# since all hosted daemon tasks will reference this same
|
||||
# log instance's (actor local) state and thus don't require
|
||||
# any further (level) configuration on their own B)
|
||||
log = _util.get_console_log(
|
||||
loglevel or tractor.current_actor().loglevel,
|
||||
name=f'{_util.subsys}.{brokername}',
|
||||
)
|
||||
|
||||
# set global for this actor to this new process-wide instance B)
|
||||
_util.log = log
|
||||
|
||||
# further, set the log level on any broker broker specific
|
||||
# logger instance.
|
||||
|
||||
from piker.data import feed
|
||||
assert not feed._bus
|
||||
|
||||
# allocate a nursery to the bus for spawning background
|
||||
# tasks to service client IPC requests, normally
|
||||
# `tractor.Context` connections to explicitly required
|
||||
# `brokerd` endpoints such as:
|
||||
# - `stream_quotes()`,
|
||||
# - `manage_history()`,
|
||||
# - `allocate_persistent_feed()`,
|
||||
# - `open_symbol_search()`
|
||||
# NOTE: see ep invocation details inside `.data.feed`.
|
||||
try:
|
||||
async with trio.open_nursery() as service_nursery:
|
||||
bus: _FeedsBus = feed.get_feed_bus(
|
||||
brokername,
|
||||
service_nursery,
|
||||
)
|
||||
assert bus is feed._bus
|
||||
|
||||
# unblock caller
|
||||
await ctx.started()
|
||||
|
||||
# we pin this task to keep the feeds manager active until the
|
||||
# parent actor decides to tear it down
|
||||
await trio.sleep_forever()
|
||||
|
||||
except eg.ExceptionGroup:
|
||||
# TODO: likely some underlying `brokerd` IPC connection
|
||||
# broke so here we handle a respawn and re-connect attempt!
|
||||
# This likely should pair with development of the OCO task
|
||||
# nusery in dev over @ `tractor` B)
|
||||
# https://github.com/goodboy/tractor/pull/363
|
||||
raise
|
||||
|
||||
|
||||
def broker_init(
|
||||
brokername: str,
|
||||
loglevel: str | None = None,
|
||||
|
||||
**start_actor_kwargs,
|
||||
|
||||
) -> tuple[
|
||||
ModuleType,
|
||||
dict,
|
||||
AsyncContextManager,
|
||||
]:
|
||||
'''
|
||||
Given an input broker name, load all named arguments
|
||||
which can be passed for daemon endpoint + context spawn
|
||||
as required in every `brokerd` (actor) service.
|
||||
|
||||
This includes:
|
||||
- load the appropriate <brokername>.py pkg module,
|
||||
- reads any declared `__enable_modules__: listr[str]` which will be
|
||||
passed to `tractor.ActorNursery.start_actor(enabled_modules=<this>)`
|
||||
at actor start time,
|
||||
- deliver a references to the daemon lifetime fixture, which
|
||||
for now is always the `_setup_persistent_brokerd()` context defined
|
||||
above.
|
||||
|
||||
'''
|
||||
from ..brokers import get_brokermod
|
||||
brokermod = get_brokermod(brokername)
|
||||
modpath: str = brokermod.__name__
|
||||
|
||||
start_actor_kwargs['name'] = f'brokerd.{brokername}'
|
||||
start_actor_kwargs.update(
|
||||
getattr(
|
||||
brokermod,
|
||||
'_spawn_kwargs',
|
||||
{},
|
||||
)
|
||||
)
|
||||
|
||||
# XXX TODO: make this not so hacky/monkeypatched..
|
||||
# -> we need a sane way to configure the logging level for all
|
||||
# code running in brokerd.
|
||||
# if utilmod := getattr(brokermod, '_util', False):
|
||||
# utilmod.log.setLevel(loglevel.upper())
|
||||
|
||||
# lookup actor-enabled modules declared by the backend offering the
|
||||
# `brokerd` endpoint(s).
|
||||
enabled: list[str]
|
||||
enabled = start_actor_kwargs['enable_modules'] = [
|
||||
__name__, # so that eps from THIS mod can be invoked
|
||||
modpath,
|
||||
]
|
||||
for submodname in getattr(
|
||||
brokermod,
|
||||
'__enable_modules__',
|
||||
[],
|
||||
):
|
||||
subpath: str = f'{modpath}.{submodname}'
|
||||
enabled.append(subpath)
|
||||
|
||||
return (
|
||||
brokermod,
|
||||
start_actor_kwargs, # to `ActorNursery.start_actor()`
|
||||
|
||||
# XXX see impl above; contains all (actor global)
|
||||
# setup/teardown expected in all `brokerd` actor instances.
|
||||
_setup_persistent_brokerd,
|
||||
)
|
||||
|
||||
|
||||
async def spawn_brokerd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: str | None = None,
|
||||
|
||||
**tractor_kwargs,
|
||||
|
||||
) -> bool:
|
||||
|
||||
from piker.service._util import log # use service mngr log
|
||||
log.info(f'Spawning {brokername} broker daemon')
|
||||
|
||||
(
|
||||
brokermode,
|
||||
tractor_kwargs,
|
||||
daemon_fixture_ep,
|
||||
) = broker_init(
|
||||
brokername,
|
||||
loglevel,
|
||||
**tractor_kwargs,
|
||||
)
|
||||
|
||||
brokermod = get_brokermod(brokername)
|
||||
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
||||
tractor_kwargs.update(extra_tractor_kwargs)
|
||||
|
||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||
# actor nursery
|
||||
from piker.service import Services
|
||||
|
||||
dname: str = tractor_kwargs.pop('name') # f'brokerd.{brokername}'
|
||||
portal = await Services.actor_n.start_actor(
|
||||
dname,
|
||||
enable_modules=_data_mods + tractor_kwargs.pop('enable_modules'),
|
||||
debug_mode=Services.debug_mode,
|
||||
**tractor_kwargs
|
||||
)
|
||||
|
||||
# NOTE: the service mngr expects an already spawned actor + its
|
||||
# portal ref in order to do non-blocking setup of brokerd
|
||||
# service nursery.
|
||||
await Services.start_service_task(
|
||||
dname,
|
||||
portal,
|
||||
|
||||
# signature of target root-task endpoint
|
||||
daemon_fixture_ep,
|
||||
brokername=brokername,
|
||||
loglevel=loglevel,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_spawn_brokerd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: str | None = None,
|
||||
|
||||
**pikerd_kwargs,
|
||||
|
||||
) -> tractor.Portal:
|
||||
'''
|
||||
Helper to spawn a brokerd service *from* a client who wishes to
|
||||
use the sub-actor-daemon but is fine with re-using any existing
|
||||
and contactable `brokerd`.
|
||||
|
||||
Mas o menos, acts as a cached-actor-getter factory.
|
||||
|
||||
'''
|
||||
from piker.service import maybe_spawn_daemon
|
||||
|
||||
async with maybe_spawn_daemon(
|
||||
|
||||
f'brokerd.{brokername}',
|
||||
service_task_target=spawn_brokerd,
|
||||
spawn_args={
|
||||
'brokername': brokername,
|
||||
},
|
||||
loglevel=loglevel,
|
||||
|
||||
**pikerd_kwargs,
|
||||
|
||||
) as portal:
|
||||
yield portal
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
@ -15,32 +15,13 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Handy cross-broker utils.
|
||||
|
||||
Handy utils.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from functools import partial
|
||||
|
||||
import json
|
||||
import httpx
|
||||
import asks
|
||||
import logging
|
||||
|
||||
from ..log import (
|
||||
get_logger,
|
||||
get_console_log,
|
||||
colorize_json,
|
||||
)
|
||||
subsys: str = 'piker.brokers'
|
||||
|
||||
# NOTE: level should be reset by any actor that is spawned
|
||||
# as well as given a (more) explicit name/key such
|
||||
# as `piker.brokers.binance` matching the subpkg.
|
||||
log = get_logger(subsys)
|
||||
|
||||
get_console_log = partial(
|
||||
get_console_log,
|
||||
name=subsys,
|
||||
)
|
||||
from ..log import colorize_json
|
||||
|
||||
|
||||
class BrokerError(Exception):
|
||||
|
|
@ -51,67 +32,27 @@ class SymbolNotFound(BrokerError):
|
|||
"Symbol not found by broker search"
|
||||
|
||||
|
||||
# TODO: these should probably be moved to `.tsp/.data`?
|
||||
class NoData(BrokerError):
|
||||
'''
|
||||
Symbol data not permitted or no data
|
||||
for time range found.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
info: dict|None = None,
|
||||
|
||||
) -> None:
|
||||
super().__init__(*args)
|
||||
self.info: dict|None = info
|
||||
|
||||
# when raised, machinery can check if the backend
|
||||
# set a "frame size" for doing datetime calcs.
|
||||
# self.frame_size: int = 1000
|
||||
|
||||
|
||||
class DataUnavailable(BrokerError):
|
||||
'''
|
||||
Signal storage requests to terminate.
|
||||
|
||||
'''
|
||||
# TODO: add in a reason that can be displayed in the
|
||||
# UI (for eg. `kraken` is bs and you should complain
|
||||
# to them that you can't pull more OHLC data..)
|
||||
|
||||
|
||||
class DataThrottle(BrokerError):
|
||||
'''
|
||||
Broker throttled request rate for data.
|
||||
|
||||
'''
|
||||
# TODO: add in throttle metrics/feedback
|
||||
"Symbol data not permitted"
|
||||
|
||||
|
||||
def resproc(
|
||||
resp: httpx.Response,
|
||||
resp: asks.response_objects.Response,
|
||||
log: logging.Logger,
|
||||
return_json: bool = True,
|
||||
log_resp: bool = False,
|
||||
|
||||
) -> httpx.Response:
|
||||
'''
|
||||
Process response and return its json content.
|
||||
return_json: bool = True
|
||||
) -> asks.response_objects.Response:
|
||||
"""Process response and return its json content.
|
||||
|
||||
Raise the appropriate error on non-200 OK responses.
|
||||
|
||||
'''
|
||||
"""
|
||||
if not resp.status_code == 200:
|
||||
raise BrokerError(resp.body)
|
||||
try:
|
||||
msg = resp.json()
|
||||
json = resp.json()
|
||||
except json.decoder.JSONDecodeError:
|
||||
log.exception(f"Failed to process {resp}:\n{resp.text}")
|
||||
raise BrokerError(resp.text)
|
||||
else:
|
||||
log.debug(f"Received json contents:\n{colorize_json(json)}")
|
||||
|
||||
if log_resp:
|
||||
log.debug(f"Received json contents:\n{colorize_json(msg)}")
|
||||
|
||||
return msg if return_json else resp
|
||||
return json if return_json else resp
|
||||
|
|
|
|||
|
|
@ -0,0 +1,85 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Actor-aware broker agnostic interface.
|
||||
|
||||
"""
|
||||
from typing import Dict
|
||||
from contextlib import asynccontextmanager, AsyncExitStack
|
||||
|
||||
import trio
|
||||
|
||||
from . import get_brokermod
|
||||
from ..log import get_logger
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
_cache: Dict[str, 'Client'] = {} # noqa
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_cached_client(
|
||||
brokername: str,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> 'Client': # noqa
|
||||
"""Get a cached broker client from the current actor's local vars.
|
||||
|
||||
If one has not been setup do it and cache it.
|
||||
"""
|
||||
global _cache
|
||||
|
||||
clients = _cache.setdefault('clients', {'_lock': trio.Lock()})
|
||||
|
||||
# global cache task lock
|
||||
lock = clients['_lock']
|
||||
|
||||
client = None
|
||||
|
||||
try:
|
||||
log.info(f"Loading existing `{brokername}` client")
|
||||
|
||||
async with lock:
|
||||
client = clients[brokername]
|
||||
client._consumers += 1
|
||||
|
||||
yield client
|
||||
|
||||
except KeyError:
|
||||
log.info(f"Creating new client for broker {brokername}")
|
||||
|
||||
async with lock:
|
||||
brokermod = get_brokermod(brokername)
|
||||
exit_stack = AsyncExitStack()
|
||||
|
||||
client = await exit_stack.enter_async_context(
|
||||
brokermod.get_client()
|
||||
)
|
||||
client._consumers = 0
|
||||
client._exit_stack = exit_stack
|
||||
clients[brokername] = client
|
||||
|
||||
yield client
|
||||
|
||||
finally:
|
||||
if client is not None:
|
||||
# if no more consumers, teardown the client
|
||||
client._consumers -= 1
|
||||
if client._consumers <= 0:
|
||||
await client._exit_stack.aclose()
|
||||
|
|
@ -0,0 +1,520 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Binance backend
|
||||
|
||||
"""
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import List, Dict, Any, Tuple, Union, Optional
|
||||
import time
|
||||
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import arrow
|
||||
import asks
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import tractor
|
||||
from pydantic.dataclasses import dataclass
|
||||
from pydantic import BaseModel
|
||||
import wsproto
|
||||
|
||||
from .api import open_cached_client
|
||||
from ._util import resproc, SymbolNotFound
|
||||
from ..log import get_logger, get_console_log
|
||||
from ..data import ShmArray
|
||||
from ..data._web_bs import open_autorecon_ws
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
_url = 'https://api.binance.com'
|
||||
|
||||
|
||||
# Broker specific ohlc schema (rest)
|
||||
_ohlc_dtype = [
|
||||
('index', int),
|
||||
('time', int),
|
||||
('open', float),
|
||||
('high', float),
|
||||
('low', float),
|
||||
('close', float),
|
||||
('volume', float),
|
||||
('bar_wap', float), # will be zeroed by sampler if not filled
|
||||
|
||||
# XXX: some additional fields are defined in the docs:
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
|
||||
|
||||
# ('close_time', int),
|
||||
# ('quote_vol', float),
|
||||
# ('num_trades', int),
|
||||
# ('buy_base_vol', float),
|
||||
# ('buy_quote_vol', float),
|
||||
# ('ignore', float),
|
||||
]
|
||||
|
||||
# UI components allow this to be declared such that additional
|
||||
# (historical) fields can be exposed.
|
||||
ohlc_dtype = np.dtype(_ohlc_dtype)
|
||||
|
||||
_show_wap_in_history = False
|
||||
|
||||
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
||||
class Pair(BaseModel):
|
||||
symbol: str
|
||||
status: str
|
||||
|
||||
baseAsset: str
|
||||
baseAssetPrecision: int
|
||||
quoteAsset: str
|
||||
quotePrecision: int
|
||||
quoteAssetPrecision: int
|
||||
|
||||
baseCommissionPrecision: int
|
||||
quoteCommissionPrecision: int
|
||||
|
||||
orderTypes: List[str]
|
||||
|
||||
icebergAllowed: bool
|
||||
ocoAllowed: bool
|
||||
quoteOrderQtyMarketAllowed: bool
|
||||
isSpotTradingAllowed: bool
|
||||
isMarginTradingAllowed: bool
|
||||
|
||||
filters: List[Dict[str, Union[str, int, float]]]
|
||||
permissions: List[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class OHLC:
|
||||
"""Description of the flattened OHLC quote format.
|
||||
|
||||
For schema details see:
|
||||
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
||||
|
||||
"""
|
||||
time: int
|
||||
|
||||
open: float
|
||||
high: float
|
||||
low: float
|
||||
close: float
|
||||
volume: float
|
||||
|
||||
close_time: int
|
||||
|
||||
quote_vol: float
|
||||
num_trades: int
|
||||
buy_base_vol: float
|
||||
buy_quote_vol: float
|
||||
ignore: int
|
||||
|
||||
# null the place holder for `bar_wap` until we
|
||||
# figure out what to extract for this.
|
||||
bar_wap: float = 0.0
|
||||
|
||||
|
||||
# convert arrow timestamp to unixtime in miliseconds
|
||||
def binance_timestamp(when):
|
||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||
|
||||
|
||||
class Client:
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._sesh = asks.Session(connections=4)
|
||||
self._sesh.base_location = _url
|
||||
self._pairs: dict[str, Any] = {}
|
||||
|
||||
async def _api(
|
||||
self,
|
||||
method: str,
|
||||
params: dict,
|
||||
) -> Dict[str, Any]:
|
||||
resp = await self._sesh.get(
|
||||
path=f'/api/v3/{method}',
|
||||
params=params,
|
||||
timeout=float('inf')
|
||||
)
|
||||
return resproc(resp, log)
|
||||
|
||||
async def symbol_info(
|
||||
|
||||
self,
|
||||
sym: Optional[str] = None,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
'''Get symbol info for the exchange.
|
||||
|
||||
'''
|
||||
# TODO: we can load from our self._pairs cache
|
||||
# on repeat calls...
|
||||
|
||||
# will retrieve all symbols by default
|
||||
params = {}
|
||||
|
||||
if sym is not None:
|
||||
sym = sym.upper()
|
||||
params = {'symbol': sym}
|
||||
|
||||
resp = await self._api(
|
||||
'exchangeInfo',
|
||||
params=params,
|
||||
)
|
||||
|
||||
entries = resp['symbols']
|
||||
if not entries:
|
||||
raise SymbolNotFound(f'{sym} not found')
|
||||
|
||||
syms = {item['symbol']: item for item in entries}
|
||||
|
||||
if sym is not None:
|
||||
return syms[sym]
|
||||
else:
|
||||
return syms
|
||||
|
||||
async def cache_symbols(
|
||||
self,
|
||||
) -> dict:
|
||||
if not self._pairs:
|
||||
self._pairs = await self.symbol_info()
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def search_symbols(
|
||||
self,
|
||||
pattern: str,
|
||||
limit: int = None,
|
||||
) -> Dict[str, Any]:
|
||||
if self._pairs is not None:
|
||||
data = self._pairs
|
||||
else:
|
||||
data = await self.symbol_info()
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
data,
|
||||
score_cutoff=50,
|
||||
)
|
||||
# repack in dict form
|
||||
return {item[0]['symbol']: item[0]
|
||||
for item in matches}
|
||||
|
||||
async def bars(
|
||||
self,
|
||||
symbol: str,
|
||||
start_time: int = None,
|
||||
end_time: int = None,
|
||||
limit: int = 1000, # <- max allowed per query
|
||||
as_np: bool = True,
|
||||
|
||||
) -> dict:
|
||||
|
||||
if start_time is None:
|
||||
start_time = binance_timestamp(
|
||||
arrow.utcnow().floor('minute').shift(minutes=-limit)
|
||||
)
|
||||
|
||||
if end_time is None:
|
||||
end_time = binance_timestamp(arrow.utcnow())
|
||||
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
|
||||
bars = await self._api(
|
||||
'klines',
|
||||
params={
|
||||
'symbol': symbol.upper(),
|
||||
'interval': '1m',
|
||||
'startTime': start_time,
|
||||
'endTime': end_time,
|
||||
'limit': limit
|
||||
}
|
||||
)
|
||||
|
||||
# TODO: pack this bars scheme into a ``pydantic`` validator type:
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
|
||||
|
||||
# TODO: we should port this to ``pydantic`` to avoid doing
|
||||
# manual validation ourselves..
|
||||
new_bars = []
|
||||
for i, bar in enumerate(bars):
|
||||
|
||||
bar = OHLC(*bar)
|
||||
|
||||
row = []
|
||||
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
||||
|
||||
# TODO: maybe we should go nanoseconds on all
|
||||
# history time stamps?
|
||||
if name == 'time':
|
||||
# convert to epoch seconds: float
|
||||
row.append(bar.time / 1000.0)
|
||||
|
||||
else:
|
||||
row.append(getattr(bar, name))
|
||||
|
||||
new_bars.append((i,) + tuple(row))
|
||||
|
||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||
return array
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_client() -> Client:
|
||||
client = Client()
|
||||
await client.cache_symbols()
|
||||
yield client
|
||||
|
||||
|
||||
# validation type
|
||||
class AggTrade(BaseModel):
|
||||
e: str # Event type
|
||||
E: int # Event time
|
||||
s: str # Symbol
|
||||
a: int # Aggregate trade ID
|
||||
p: float # Price
|
||||
q: float # Quantity
|
||||
f: int # First trade ID
|
||||
l: int # Last trade ID
|
||||
T: int # Trade time
|
||||
m: bool # Is the buyer the market maker?
|
||||
M: bool # Ignore
|
||||
|
||||
|
||||
async def stream_messages(ws):
|
||||
|
||||
timeouts = 0
|
||||
while True:
|
||||
|
||||
with trio.move_on_after(3) as cs:
|
||||
msg = await ws.recv_msg()
|
||||
|
||||
if cs.cancelled_caught:
|
||||
|
||||
timeouts += 1
|
||||
if timeouts > 2:
|
||||
log.error("binance feed seems down and slow af? rebooting...")
|
||||
await ws._connect()
|
||||
|
||||
continue
|
||||
|
||||
# for l1 streams binance doesn't add an event type field so
|
||||
# identify those messages by matching keys
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams
|
||||
|
||||
if msg.get('u'):
|
||||
sym = msg['s']
|
||||
bid = float(msg['b'])
|
||||
bsize = float(msg['B'])
|
||||
ask = float(msg['a'])
|
||||
asize = float(msg['A'])
|
||||
|
||||
yield 'l1', {
|
||||
'symbol': sym,
|
||||
'ticks': [
|
||||
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||
{'type': 'ask', 'price': ask, 'size': asize},
|
||||
{'type': 'asize', 'price': ask, 'size': asize}
|
||||
]
|
||||
}
|
||||
|
||||
elif msg.get('e') == 'aggTrade':
|
||||
|
||||
# validate
|
||||
msg = AggTrade(**msg)
|
||||
|
||||
# TODO: type out and require this quote format
|
||||
# from all backends!
|
||||
yield 'trade', {
|
||||
'symbol': msg.s,
|
||||
'last': msg.p,
|
||||
'brokerd_ts': time.time(),
|
||||
'ticks': [{
|
||||
'type': 'trade',
|
||||
'price': msg.p,
|
||||
'size': msg.q,
|
||||
'broker_ts': msg.T,
|
||||
}],
|
||||
}
|
||||
|
||||
|
||||
def make_sub(pairs: List[str], sub_name: str, uid: int) -> Dict[str, str]:
|
||||
"""Create a request subscription packet dict.
|
||||
|
||||
https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams
|
||||
"""
|
||||
return {
|
||||
'method': 'SUBSCRIBE',
|
||||
'params': [
|
||||
f'{pair.lower()}@{sub_name}'
|
||||
for pair in pairs
|
||||
],
|
||||
'id': uid
|
||||
}
|
||||
|
||||
|
||||
async def backfill_bars(
|
||||
sym: str,
|
||||
shm: ShmArray, # type: ignore # noqa
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
) -> None:
|
||||
"""Fill historical bars into shared mem / storage afap.
|
||||
"""
|
||||
with trio.CancelScope() as cs:
|
||||
async with open_cached_client('binance') as client:
|
||||
bars = await client.bars(symbol=sym)
|
||||
shm.push(bars)
|
||||
task_status.started(cs)
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: List[str],
|
||||
shm: ShmArray,
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[Tuple[Dict, Dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
sym_infos = {}
|
||||
uid = 0
|
||||
|
||||
async with (
|
||||
open_cached_client('binance') as client,
|
||||
send_chan as send_chan,
|
||||
):
|
||||
|
||||
# keep client cached for real-time section
|
||||
cache = await client.cache_symbols()
|
||||
|
||||
for sym in symbols:
|
||||
d = cache[sym.upper()]
|
||||
syminfo = Pair(**d) # validation
|
||||
|
||||
si = sym_infos[sym] = syminfo.dict()
|
||||
|
||||
# XXX: after manually inspecting the response format we
|
||||
# just directly pick out the info we need
|
||||
si['price_tick_size'] = syminfo.filters[0]['tickSize']
|
||||
si['lot_tick_size'] = syminfo.filters[2]['stepSize']
|
||||
|
||||
symbol = symbols[0]
|
||||
|
||||
init_msgs = {
|
||||
# pass back token, and bool, signalling if we're the writer
|
||||
# and that history has been written
|
||||
symbol: {
|
||||
'symbol_info': sym_infos[sym],
|
||||
'shm_write_opts': {'sum_tick_vml': False},
|
||||
},
|
||||
}
|
||||
|
||||
@asynccontextmanager
|
||||
async def subscribe(ws: wsproto.WSConnection):
|
||||
# setup subs
|
||||
|
||||
# trade data (aka L1)
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker
|
||||
l1_sub = make_sub(symbols, 'bookTicker', uid)
|
||||
await ws.send_msg(l1_sub)
|
||||
|
||||
# aggregate (each order clear by taker **not** by maker)
|
||||
# trades data:
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams
|
||||
agg_trades_sub = make_sub(symbols, 'aggTrade', uid)
|
||||
await ws.send_msg(agg_trades_sub)
|
||||
|
||||
# ack from ws server
|
||||
res = await ws.recv_msg()
|
||||
assert res['id'] == uid
|
||||
|
||||
yield
|
||||
|
||||
subs = []
|
||||
for sym in symbols:
|
||||
subs.append("{sym}@aggTrade")
|
||||
subs.append("{sym}@bookTicker")
|
||||
|
||||
# unsub from all pairs on teardown
|
||||
await ws.send_msg({
|
||||
"method": "UNSUBSCRIBE",
|
||||
"params": subs,
|
||||
"id": uid,
|
||||
})
|
||||
|
||||
# XXX: do we need to ack the unsub?
|
||||
# await ws.recv_msg()
|
||||
|
||||
async with open_autorecon_ws(
|
||||
'wss://stream.binance.com/ws',
|
||||
fixture=subscribe,
|
||||
) as ws:
|
||||
|
||||
# pull a first quote and deliver
|
||||
msg_gen = stream_messages(ws)
|
||||
|
||||
typ, quote = await msg_gen.__anext__()
|
||||
|
||||
while typ != 'trade':
|
||||
# TODO: use ``anext()`` when it lands in 3.10!
|
||||
typ, quote = await msg_gen.__anext__()
|
||||
|
||||
first_quote = {quote['symbol'].lower(): quote}
|
||||
task_status.started((init_msgs, first_quote))
|
||||
|
||||
# signal to caller feed is ready for consumption
|
||||
feed_is_live.set()
|
||||
|
||||
# start streaming
|
||||
async for typ, msg in msg_gen:
|
||||
|
||||
topic = msg['symbol'].lower()
|
||||
await send_chan.send({topic: msg})
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(
|
||||
ctx: tractor.Context,
|
||||
) -> Client:
|
||||
async with open_cached_client('binance') as client:
|
||||
|
||||
# load all symbols locally for fast search
|
||||
cache = await client.cache_symbols()
|
||||
await ctx.started()
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
async for pattern in stream:
|
||||
# results = await client.symbol_info(sym=pattern.upper())
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
cache,
|
||||
score_cutoff=50,
|
||||
)
|
||||
# repack in dict form
|
||||
await stream.send(
|
||||
{item[0]['symbol']: item[0]
|
||||
for item in matches}
|
||||
)
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C)
|
||||
# Guillermo Rodriguez (aka ze jefe)
|
||||
# Tyler Goodlet
|
||||
# (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
binancial secs on the floor, in the office, behind the dumpster.
|
||||
|
||||
"""
|
||||
from .api import (
|
||||
get_client,
|
||||
)
|
||||
from .feed import (
|
||||
get_mkt_info,
|
||||
open_history_client,
|
||||
open_symbol_search,
|
||||
stream_quotes,
|
||||
)
|
||||
from .broker import (
|
||||
open_trade_dialog,
|
||||
get_cost,
|
||||
)
|
||||
from .venues import (
|
||||
SpotPair,
|
||||
FutesPair,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
'get_mkt_info',
|
||||
'get_cost',
|
||||
'SpotPair',
|
||||
'FutesPair',
|
||||
'open_trade_dialog',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
]
|
||||
|
||||
|
||||
# `brokerd` modules
|
||||
__enable_modules__: list[str] = [
|
||||
'api',
|
||||
'feed',
|
||||
'broker',
|
||||
]
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,710 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C)
|
||||
# Guillermo Rodriguez (aka ze jefe)
|
||||
# Tyler Goodlet
|
||||
# (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Live order control B)
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from pprint import pformat
|
||||
from typing import (
|
||||
Any,
|
||||
AsyncIterator,
|
||||
)
|
||||
import time
|
||||
from time import time_ns
|
||||
|
||||
from bidict import bidict
|
||||
import tractor
|
||||
import trio
|
||||
|
||||
from piker.accounting import (
|
||||
Asset,
|
||||
)
|
||||
from piker.brokers._util import (
|
||||
get_logger,
|
||||
)
|
||||
from piker.data._web_bs import (
|
||||
open_autorecon_ws,
|
||||
NoBsWs,
|
||||
)
|
||||
from piker.brokers import (
|
||||
open_cached_client,
|
||||
BrokerError,
|
||||
)
|
||||
from piker.clearing import (
|
||||
OrderDialogs,
|
||||
)
|
||||
from piker.clearing._messages import (
|
||||
BrokerdOrder,
|
||||
BrokerdOrderAck,
|
||||
BrokerdStatus,
|
||||
BrokerdPosition,
|
||||
BrokerdFill,
|
||||
BrokerdCancel,
|
||||
BrokerdError,
|
||||
Status,
|
||||
Order,
|
||||
)
|
||||
from .venues import (
|
||||
Pair,
|
||||
_futes_ws,
|
||||
_testnet_futes_ws,
|
||||
)
|
||||
from .api import Client
|
||||
|
||||
log = get_logger('piker.brokers.binance')
|
||||
|
||||
|
||||
# Fee schedule template, mostly for paper engine fees modelling.
|
||||
# https://www.binance.com/en/support/faq/what-are-market-makers-and-takers-360007720071
|
||||
def get_cost(
|
||||
price: float,
|
||||
size: float,
|
||||
is_taker: bool = False,
|
||||
|
||||
) -> float:
|
||||
|
||||
# https://www.binance.com/en/fee/trading
|
||||
cb: float = price * size
|
||||
match is_taker:
|
||||
case True:
|
||||
return cb * 0.001000
|
||||
|
||||
case False if cb < 1e6:
|
||||
return cb * 0.001000
|
||||
|
||||
case False if 1e6 >= cb < 5e6:
|
||||
return cb * 0.000900
|
||||
|
||||
# NOTE: there's more but are you really going
|
||||
# to have a cb bigger then this per trade?
|
||||
case False if cb >= 5e6:
|
||||
return cb * 0.000800
|
||||
|
||||
|
||||
async def handle_order_requests(
|
||||
ems_order_stream: tractor.MsgStream,
|
||||
client: Client,
|
||||
dids: bidict[str, str],
|
||||
dialogs: OrderDialogs,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Receive order requests from `emsd`, translate tramsit API calls and transmit.
|
||||
|
||||
'''
|
||||
msg: dict | BrokerdOrder | BrokerdCancel
|
||||
async for msg in ems_order_stream:
|
||||
log.info(f'Rx order request:\n{pformat(msg)}')
|
||||
match msg:
|
||||
case {
|
||||
'action': 'cancel',
|
||||
}:
|
||||
cancel = BrokerdCancel(**msg)
|
||||
existing: BrokerdOrder | None = dialogs.get(cancel.oid)
|
||||
if not existing:
|
||||
log.error(
|
||||
f'NO Existing order-dialog for {cancel.oid}!?'
|
||||
)
|
||||
await ems_order_stream.send(BrokerdError(
|
||||
oid=cancel.oid,
|
||||
|
||||
# TODO: do we need the symbol?
|
||||
# https://github.com/pikers/piker/issues/514
|
||||
symbol='unknown',
|
||||
|
||||
reason=(
|
||||
'Invalid `binance` order request dialog oid',
|
||||
)
|
||||
))
|
||||
continue
|
||||
|
||||
else:
|
||||
symbol: str = existing['symbol']
|
||||
try:
|
||||
await client.submit_cancel(
|
||||
symbol,
|
||||
cancel.oid,
|
||||
)
|
||||
except BrokerError as be:
|
||||
await ems_order_stream.send(
|
||||
BrokerdError(
|
||||
oid=msg['oid'],
|
||||
symbol=symbol,
|
||||
reason=(
|
||||
'`binance` CANCEL failed:\n'
|
||||
f'{be}'
|
||||
))
|
||||
)
|
||||
continue
|
||||
|
||||
case {
|
||||
'account': ('binance.usdtm' | 'binance.spot') as account,
|
||||
'action': action,
|
||||
} if action in {'buy', 'sell'}:
|
||||
|
||||
# validate
|
||||
order = BrokerdOrder(**msg)
|
||||
oid: str = order.oid # emsd order id
|
||||
modify: bool = False
|
||||
|
||||
# NOTE: check and report edits
|
||||
if existing := dialogs.get(order.oid):
|
||||
log.info(
|
||||
f'Existing order for {oid} updated:\n'
|
||||
f'{pformat(existing.maps[-1])} -> {pformat(msg)}'
|
||||
)
|
||||
modify = True
|
||||
|
||||
# only add new msg AFTER the existing check
|
||||
dialogs.add_msg(oid, msg)
|
||||
|
||||
else:
|
||||
# XXX NOTE: update before the ack!
|
||||
# track latest request state such that map
|
||||
# lookups start at the most recent msg and then
|
||||
# scan reverse-chronologically.
|
||||
dialogs.add_msg(oid, msg)
|
||||
|
||||
# XXX: ACK the request **immediately** before sending
|
||||
# the api side request to ensure the ems maps the oid ->
|
||||
# reqid correctly!
|
||||
resp = BrokerdOrderAck(
|
||||
oid=oid, # ems order request id
|
||||
reqid=oid, # our custom int mapping
|
||||
account='binance', # piker account
|
||||
)
|
||||
await ems_order_stream.send(resp)
|
||||
|
||||
# call our client api to submit the order
|
||||
# NOTE: modifies only require diff key for user oid:
|
||||
# https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade
|
||||
try:
|
||||
reqid = await client.submit_limit(
|
||||
symbol=order.symbol,
|
||||
side=order.action,
|
||||
quantity=order.size,
|
||||
price=order.price,
|
||||
oid=oid,
|
||||
modify=modify,
|
||||
)
|
||||
|
||||
# SMH they do gen their own order id: ints..
|
||||
# assert reqid == order.oid
|
||||
dids[order.oid] = reqid
|
||||
|
||||
except BrokerError as be:
|
||||
await ems_order_stream.send(
|
||||
BrokerdError(
|
||||
oid=msg['oid'],
|
||||
symbol=msg['symbol'],
|
||||
reason=(
|
||||
'`binance` request failed:\n'
|
||||
f'{be}'
|
||||
))
|
||||
)
|
||||
continue
|
||||
|
||||
case _:
|
||||
account = msg.get('account')
|
||||
if account not in {'binance.spot', 'binance.futes'}:
|
||||
log.error(
|
||||
'Order request does not have a valid binance account name?\n'
|
||||
'Only one of\n'
|
||||
'- `binance.spot` or,\n'
|
||||
'- `binance.usdtm`\n'
|
||||
'is currently valid!'
|
||||
)
|
||||
await ems_order_stream.send(
|
||||
BrokerdError(
|
||||
oid=msg['oid'],
|
||||
symbol=msg['symbol'],
|
||||
reason=(
|
||||
f'Invalid `binance` broker request msg:\n{msg}'
|
||||
))
|
||||
)
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_trade_dialog(
|
||||
ctx: tractor.Context,
|
||||
|
||||
) -> AsyncIterator[dict[str, Any]]:
|
||||
|
||||
# TODO: how do we set this from the EMS such that
|
||||
# positions are loaded from the correct venue on the user
|
||||
# stream at startup? (that is in an attempt to support both
|
||||
# spot and futes markets?)
|
||||
# - I guess we just want to instead start 2 separate user
|
||||
# stream tasks right? unless we want another actor pool?
|
||||
# XXX: see issue: <urlhere>
|
||||
venue_name: str = 'futes'
|
||||
venue_mode: str = 'usdtm_futes'
|
||||
account_name: str = 'usdtm'
|
||||
use_testnet: bool = False
|
||||
|
||||
# TODO: if/when we add .accounting support we need to
|
||||
# do a open_symcache() call.. though maybe we can hide
|
||||
# this in a new async version of open_account()?
|
||||
async with open_cached_client('binance') as client:
|
||||
subconf: dict|None = client.conf.get(venue_name)
|
||||
|
||||
# XXX: if no futes.api_key or spot.api_key has been set we
|
||||
# always fall back to the paper engine!
|
||||
if (
|
||||
not subconf
|
||||
or
|
||||
not subconf.get('api_key')
|
||||
):
|
||||
await ctx.started('paper')
|
||||
return
|
||||
|
||||
use_testnet: bool = subconf.get('use_testnet', False)
|
||||
|
||||
async with (
|
||||
open_cached_client('binance') as client,
|
||||
):
|
||||
client.mkt_mode: str = venue_mode
|
||||
|
||||
# TODO: map these wss urls depending on spot or futes
|
||||
# setting passed when this task is spawned?
|
||||
wss_url: str = _futes_ws if not use_testnet else _testnet_futes_ws
|
||||
|
||||
wss: NoBsWs
|
||||
async with (
|
||||
client.manage_listen_key() as listen_key,
|
||||
open_autorecon_ws(f'{wss_url}/?listenKey={listen_key}') as wss,
|
||||
):
|
||||
nsid: int = time_ns()
|
||||
await wss.send_msg({
|
||||
# "method": "SUBSCRIBE",
|
||||
"method": "REQUEST",
|
||||
"params":
|
||||
[
|
||||
f"{listen_key}@account",
|
||||
f"{listen_key}@balance",
|
||||
f"{listen_key}@position",
|
||||
|
||||
# TODO: does this even work!? seems to cause
|
||||
# a hang on the first msg..? lelelel.
|
||||
# f"{listen_key}@order",
|
||||
],
|
||||
"id": nsid
|
||||
})
|
||||
|
||||
with trio.fail_after(6):
|
||||
msg = await wss.recv_msg()
|
||||
assert msg['id'] == nsid
|
||||
|
||||
# TODO: load other market wide data / statistics:
|
||||
# - OI: https://binance-docs.github.io/apidocs/futures/en/#open-interest
|
||||
# - OI stats: https://binance-docs.github.io/apidocs/futures/en/#open-interest-statistics
|
||||
accounts: bidict[str, str] = bidict({'binance.usdtm': None})
|
||||
balances: dict[Asset, float] = {}
|
||||
positions: list[BrokerdPosition] = []
|
||||
|
||||
for resp_dict in msg['result']:
|
||||
resp: dict = resp_dict['res']
|
||||
req: str = resp_dict['req']
|
||||
|
||||
# @account response should be something like:
|
||||
# {'accountAlias': 'sRFzFzAuuXsR',
|
||||
# 'canDeposit': True,
|
||||
# 'canTrade': True,
|
||||
# 'canWithdraw': True,
|
||||
# 'feeTier': 0}
|
||||
if 'account' in req:
|
||||
# NOTE: fill in the hash-like key/alias binance
|
||||
# provides for the account.
|
||||
alias: str = resp['accountAlias']
|
||||
accounts['binance.usdtm'] = alias
|
||||
|
||||
# @balance response:
|
||||
# {'accountAlias': 'sRFzFzAuuXsR',
|
||||
# 'balances': [{'asset': 'BTC',
|
||||
# 'availableBalance': '0.00000000',
|
||||
# 'balance': '0.00000000',
|
||||
# 'crossUnPnl': '0.00000000',
|
||||
# 'crossWalletBalance': '0.00000000',
|
||||
# 'maxWithdrawAmount': '0.00000000',
|
||||
# 'updateTime': 0}]
|
||||
# ...
|
||||
# }
|
||||
elif 'balance' in req:
|
||||
for entry in resp['balances']:
|
||||
name: str = entry['asset']
|
||||
balance: float = float(entry['balance'])
|
||||
last_update_t: int = entry['updateTime']
|
||||
|
||||
spot_asset: Asset = client._venue2assets['spot'][name]
|
||||
|
||||
if balance > 0:
|
||||
balances[spot_asset] = (balance, last_update_t)
|
||||
# await tractor.pause()
|
||||
|
||||
# @position response:
|
||||
# {'positions': [{'entryPrice': '0.0',
|
||||
# 'isAutoAddMargin': False,
|
||||
# 'isolatedMargin': '0',
|
||||
# 'leverage': 20,
|
||||
# 'liquidationPrice': '0',
|
||||
# 'marginType': 'CROSSED',
|
||||
# 'markPrice': '0.60289650',
|
||||
# 'markPrice': '0.00000000',
|
||||
# 'maxNotionalValue': '25000',
|
||||
# 'notional': '0',
|
||||
# 'positionAmt': '0',
|
||||
# 'positionSide': 'BOTH',
|
||||
# 'symbol': 'ETHUSDT_230630',
|
||||
# 'unRealizedProfit': '0.00000000',
|
||||
# 'updateTime': 1672741444894}
|
||||
# ...
|
||||
# }
|
||||
elif 'position' in req:
|
||||
for entry in resp['positions']:
|
||||
bs_mktid: str = entry['symbol']
|
||||
entry_size: float = float(entry['positionAmt'])
|
||||
|
||||
pair: Pair | None = client._venue2pairs[
|
||||
venue_mode
|
||||
].get(bs_mktid)
|
||||
if (
|
||||
pair
|
||||
and entry_size > 0
|
||||
):
|
||||
entry_price: float = float(entry['entryPrice'])
|
||||
|
||||
ppmsg = BrokerdPosition(
|
||||
broker='binance',
|
||||
account=f'binance.{account_name}',
|
||||
|
||||
# TODO: maybe we should be passing back
|
||||
# a `MktPair` here?
|
||||
symbol=pair.bs_fqme.lower() + '.binance',
|
||||
|
||||
size=entry_size,
|
||||
avg_price=entry_price,
|
||||
)
|
||||
positions.append(ppmsg)
|
||||
|
||||
if pair is None:
|
||||
log.warning(
|
||||
f'`{bs_mktid}` Position entry but no market pair?\n'
|
||||
f'{pformat(entry)}\n'
|
||||
)
|
||||
|
||||
await ctx.started((
|
||||
positions,
|
||||
list(accounts)
|
||||
))
|
||||
|
||||
# TODO: package more state tracking into the dialogs API?
|
||||
# - hmm maybe we could include `OrderDialogs.dids:
|
||||
# bidict` as part of the interface and then ask for
|
||||
# a reqid field to be passed at init?
|
||||
# |-> `OrderDialog(reqid_field='orderId')` kinda thing?
|
||||
# - also maybe bundle in some kind of dialog to account
|
||||
# table?
|
||||
dialogs = OrderDialogs()
|
||||
dids: dict[str, int] = bidict()
|
||||
|
||||
# TODO: further init setup things to get full EMS and
|
||||
# .accounting support B)
|
||||
# - live order loading via user stream subscription and
|
||||
# update to the order dialog table.
|
||||
# - MAKE SURE we add live orders loaded during init
|
||||
# into the dialogs table to ensure they can be
|
||||
# cancelled, meaning we can do a symbol lookup.
|
||||
# - position loading using `piker.accounting` subsys
|
||||
# and comparison with binance's own position calcs.
|
||||
# - load pps and accounts using accounting apis, write
|
||||
# the ledger and account files
|
||||
# - table: Account
|
||||
# - ledger: TransactionLedger
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as tn,
|
||||
ctx.open_stream() as ems_stream,
|
||||
):
|
||||
# deliver all pre-exist open orders to EMS thus syncing
|
||||
# state with existing live limits reported by them.
|
||||
order: Order
|
||||
for order in await client.get_open_orders():
|
||||
status_msg = Status(
|
||||
time_ns=time.time_ns(),
|
||||
resp='open',
|
||||
oid=order.oid,
|
||||
reqid=order.oid,
|
||||
|
||||
# embedded order info
|
||||
req=order,
|
||||
src='binance',
|
||||
)
|
||||
dialogs.add_msg(order.oid, order.to_dict())
|
||||
await ems_stream.send(status_msg)
|
||||
|
||||
tn.start_soon(
|
||||
handle_order_requests,
|
||||
ems_stream,
|
||||
client,
|
||||
dids,
|
||||
dialogs,
|
||||
)
|
||||
tn.start_soon(
|
||||
handle_order_updates,
|
||||
venue_mode,
|
||||
account_name,
|
||||
client,
|
||||
ems_stream,
|
||||
wss,
|
||||
dialogs,
|
||||
|
||||
)
|
||||
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
||||
async def handle_order_updates(
|
||||
venue: str,
|
||||
account_name: str,
|
||||
client: Client,
|
||||
ems_stream: tractor.MsgStream,
|
||||
wss: NoBsWs,
|
||||
dialogs: OrderDialogs,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Main msg handling loop for all things order management.
|
||||
|
||||
This code is broken out to make the context explicit and state
|
||||
variables defined in the signature clear to the reader.
|
||||
|
||||
'''
|
||||
async for msg in wss:
|
||||
log.info(f'Rx USERSTREAM msg:\n{pformat(msg)}')
|
||||
match msg:
|
||||
|
||||
# ORDER update
|
||||
# spot: https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update
|
||||
# futes: https://binance-docs.github.io/apidocs/futures/en/#event-order-update
|
||||
# futes: https://binance-docs.github.io/apidocs/futures/en/#event-balance-and-position-update
|
||||
# {'o': {
|
||||
# 'L': '0',
|
||||
# 'N': 'USDT',
|
||||
# 'R': False,
|
||||
# 'S': 'BUY',
|
||||
# 'T': 1687028772484,
|
||||
# 'X': 'NEW',
|
||||
# 'a': '0',
|
||||
# 'ap': '0',
|
||||
# 'b': '7012.06520',
|
||||
# 'c': '518d4122-8d3e-49b0-9a1e-1fabe6f62e4c',
|
||||
# 'cp': False,
|
||||
# 'f': 'GTC',
|
||||
# 'i': 3376956924,
|
||||
# 'l': '0',
|
||||
# 'm': False,
|
||||
# 'n': '0',
|
||||
# 'o': 'LIMIT',
|
||||
# 'ot': 'LIMIT',
|
||||
# 'p': '21136.80',
|
||||
# 'pP': False,
|
||||
# 'ps': 'BOTH',
|
||||
# 'q': '0.047',
|
||||
# 'rp': '0',
|
||||
# 's': 'BTCUSDT',
|
||||
# 'si': 0,
|
||||
# 'sp': '0',
|
||||
# 'ss': 0,
|
||||
# 't': 0,
|
||||
# 'wt': 'CONTRACT_PRICE',
|
||||
# 'x': 'NEW',
|
||||
# 'z': '0'}
|
||||
# }
|
||||
case {
|
||||
# 'e': 'executionReport',
|
||||
'e': 'ORDER_TRADE_UPDATE',
|
||||
'T': int(epoch_ms),
|
||||
'o': {
|
||||
's': bs_mktid,
|
||||
|
||||
# XXX NOTE XXX see special ids for market
|
||||
# events or margin calls:
|
||||
# // special client order id:
|
||||
# // starts with "autoclose-": liquidation order
|
||||
# // "adl_autoclose": ADL auto close order
|
||||
# // "settlement_autoclose-": settlement order
|
||||
# for delisting or delivery
|
||||
'c': oid,
|
||||
# 'i': reqid, # binance internal int id
|
||||
|
||||
# prices
|
||||
'a': submit_price,
|
||||
'ap': avg_price,
|
||||
'L': fill_price,
|
||||
|
||||
# sizing
|
||||
'q': req_size,
|
||||
'l': clear_size_filled, # this event
|
||||
'z': accum_size_filled, # accum
|
||||
|
||||
# commissions
|
||||
'n': cost,
|
||||
'N': cost_asset,
|
||||
|
||||
# state
|
||||
'S': side,
|
||||
'X': status,
|
||||
},
|
||||
} as order_msg:
|
||||
log.info(
|
||||
f'{status} for {side} ORDER oid: {oid}\n'
|
||||
f'bs_mktid: {bs_mktid}\n\n'
|
||||
|
||||
f'order size: {req_size}\n'
|
||||
f'cleared size: {clear_size_filled}\n'
|
||||
f'accum filled size: {accum_size_filled}\n\n'
|
||||
|
||||
f'submit price: {submit_price}\n'
|
||||
f'fill_price: {fill_price}\n'
|
||||
f'avg clearing price: {avg_price}\n\n'
|
||||
|
||||
f'cost: {cost}@{cost_asset}\n'
|
||||
)
|
||||
|
||||
# status remap from binance to piker's
|
||||
# status set:
|
||||
# - NEW
|
||||
# - PARTIALLY_FILLED
|
||||
# - FILLED
|
||||
# - CANCELED
|
||||
# - EXPIRED
|
||||
# https://binance-docs.github.io/apidocs/futures/en/#event-order-update
|
||||
|
||||
req_size: float = float(req_size)
|
||||
accum_size_filled: float = float(accum_size_filled)
|
||||
fill_price: float = float(fill_price)
|
||||
|
||||
match status:
|
||||
case 'PARTIALLY_FILLED' | 'FILLED':
|
||||
status = 'fill'
|
||||
|
||||
fill_msg = BrokerdFill(
|
||||
time_ns=time_ns(),
|
||||
# reqid=reqid,
|
||||
reqid=oid,
|
||||
|
||||
# just use size value for now?
|
||||
# action=action,
|
||||
size=clear_size_filled,
|
||||
price=fill_price,
|
||||
|
||||
# TODO: maybe capture more msg data
|
||||
# i.e fees?
|
||||
broker_details={'name': 'broker'} | order_msg,
|
||||
broker_time=time.time(),
|
||||
)
|
||||
await ems_stream.send(fill_msg)
|
||||
|
||||
if accum_size_filled == req_size:
|
||||
status = 'closed'
|
||||
dialogs.pop(oid)
|
||||
|
||||
case 'NEW':
|
||||
status = 'open'
|
||||
|
||||
case 'EXPIRED':
|
||||
status = 'canceled'
|
||||
dialogs.pop(oid)
|
||||
|
||||
case _:
|
||||
status = status.lower()
|
||||
|
||||
resp = BrokerdStatus(
|
||||
time_ns=time_ns(),
|
||||
# reqid=reqid,
|
||||
reqid=oid,
|
||||
|
||||
# TODO: i feel like we don't need to make the
|
||||
# ems and upstream clients aware of this?
|
||||
# account='binance.usdtm',
|
||||
|
||||
status=status,
|
||||
|
||||
filled=accum_size_filled,
|
||||
remaining=req_size - accum_size_filled,
|
||||
broker_details={
|
||||
'name': 'binance',
|
||||
'broker_time': epoch_ms / 1000.
|
||||
}
|
||||
)
|
||||
await ems_stream.send(resp)
|
||||
|
||||
# ACCOUNT and POSITION update B)
|
||||
# {
|
||||
# 'E': 1687036749218,
|
||||
# 'e': 'ACCOUNT_UPDATE'
|
||||
# 'T': 1687036749215,
|
||||
# 'a': {'B': [{'a': 'USDT',
|
||||
# 'bc': '0',
|
||||
# 'cw': '1267.48920735',
|
||||
# 'wb': '1410.90245576'}],
|
||||
# 'P': [{'cr': '-3292.10973007',
|
||||
# 'ep': '26349.90000',
|
||||
# 'iw': '143.41324841',
|
||||
# 'ma': 'USDT',
|
||||
# 'mt': 'isolated',
|
||||
# 'pa': '0.038',
|
||||
# 'ps': 'BOTH',
|
||||
# 's': 'BTCUSDT',
|
||||
# 'up': '5.17555453'}],
|
||||
# 'm': 'ORDER'},
|
||||
# }
|
||||
case {
|
||||
'T': int(epoch_ms),
|
||||
'e': 'ACCOUNT_UPDATE',
|
||||
'a': {
|
||||
'P': [{
|
||||
's': bs_mktid,
|
||||
'pa': pos_amount,
|
||||
'ep': entry_price,
|
||||
}],
|
||||
},
|
||||
}:
|
||||
# real-time relay position updates back to EMS
|
||||
pair: Pair | None = client._venue2pairs[venue].get(bs_mktid)
|
||||
ppmsg = BrokerdPosition(
|
||||
broker='binance',
|
||||
account=f'binance.{account_name}',
|
||||
|
||||
# TODO: maybe we should be passing back
|
||||
# a `MktPair` here?
|
||||
symbol=pair.bs_fqme.lower() + '.binance',
|
||||
|
||||
size=float(pos_amount),
|
||||
avg_price=float(entry_price),
|
||||
)
|
||||
await ems_stream.send(ppmsg)
|
||||
|
||||
case _:
|
||||
log.warning(
|
||||
'Unhandled event:\n'
|
||||
f'{pformat(msg)}'
|
||||
)
|
||||
|
|
@ -1,557 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Real-time and historical data feed endpoints.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
aclosing,
|
||||
)
|
||||
from datetime import datetime
|
||||
from functools import (
|
||||
partial,
|
||||
)
|
||||
import itertools
|
||||
from pprint import pformat
|
||||
from typing import (
|
||||
Any,
|
||||
AsyncGenerator,
|
||||
Callable,
|
||||
Generator,
|
||||
)
|
||||
import time
|
||||
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
from pendulum import (
|
||||
from_timestamp,
|
||||
)
|
||||
import numpy as np
|
||||
import tractor
|
||||
|
||||
from piker.brokers import (
|
||||
open_cached_client,
|
||||
NoData,
|
||||
)
|
||||
from piker._cacheables import (
|
||||
async_lifo_cache,
|
||||
)
|
||||
from piker.accounting import (
|
||||
Asset,
|
||||
DerivTypes,
|
||||
MktPair,
|
||||
unpack_fqme,
|
||||
)
|
||||
from piker.types import Struct
|
||||
from piker.data.validate import FeedInit
|
||||
from piker.data._web_bs import (
|
||||
open_autorecon_ws,
|
||||
NoBsWs,
|
||||
)
|
||||
from piker.brokers._util import (
|
||||
DataUnavailable,
|
||||
get_logger,
|
||||
)
|
||||
|
||||
from .api import (
|
||||
Client,
|
||||
)
|
||||
from .venues import (
|
||||
Pair,
|
||||
FutesPair,
|
||||
get_api_eps,
|
||||
)
|
||||
|
||||
log = get_logger('piker.brokers.binance')
|
||||
|
||||
|
||||
class L1(Struct):
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams
|
||||
|
||||
update_id: int
|
||||
sym: str
|
||||
|
||||
bid: float
|
||||
bsize: float
|
||||
ask: float
|
||||
asize: float
|
||||
|
||||
|
||||
# validation type
|
||||
class AggTrade(Struct, frozen=True):
|
||||
e: str # Event type
|
||||
E: int # Event time
|
||||
s: str # Symbol
|
||||
a: int # Aggregate trade ID
|
||||
p: float # Price
|
||||
q: float # Quantity
|
||||
f: int # First trade ID
|
||||
l: int # noqa Last trade ID
|
||||
T: int # Trade time
|
||||
m: bool # Is the buyer the market maker?
|
||||
M: bool | None = None # Ignore
|
||||
|
||||
|
||||
async def stream_messages(
|
||||
ws: NoBsWs,
|
||||
|
||||
) -> AsyncGenerator[NoBsWs, dict]:
|
||||
|
||||
# TODO: match syntax here!
|
||||
msg: dict[str, Any]
|
||||
async for msg in ws:
|
||||
match msg:
|
||||
# for l1 streams binance doesn't add an event type field so
|
||||
# identify those messages by matching keys
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams
|
||||
case {
|
||||
# NOTE: this is never an old value it seems, so
|
||||
# they are always sending real L1 spread updates.
|
||||
'u': upid, # update id
|
||||
's': sym,
|
||||
'b': bid,
|
||||
'B': bsize,
|
||||
'a': ask,
|
||||
'A': asize,
|
||||
}:
|
||||
# TODO: it would be super nice to have a `L1` piker type
|
||||
# which "renders" incremental tick updates from a packed
|
||||
# msg-struct:
|
||||
# - backend msgs after packed into the type such that we
|
||||
# can reduce IPC usage but without each backend having
|
||||
# to do that incremental update logic manually B)
|
||||
# - would it maybe be more efficient to use this instead?
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#diff-depth-stream
|
||||
l1 = L1(
|
||||
update_id=upid,
|
||||
sym=sym,
|
||||
bid=bid,
|
||||
bsize=bsize,
|
||||
ask=ask,
|
||||
asize=asize,
|
||||
)
|
||||
# for speed probably better to only specifically
|
||||
# cast fields we need in numerical form?
|
||||
# l1.typecast()
|
||||
|
||||
# repack into piker's tick-quote format
|
||||
yield 'l1', {
|
||||
'symbol': l1.sym,
|
||||
'ticks': [
|
||||
{
|
||||
'type': 'bid',
|
||||
'price': float(l1.bid),
|
||||
'size': float(l1.bsize),
|
||||
},
|
||||
{
|
||||
'type': 'bsize',
|
||||
'price': float(l1.bid),
|
||||
'size': float(l1.bsize),
|
||||
},
|
||||
{
|
||||
'type': 'ask',
|
||||
'price': float(l1.ask),
|
||||
'size': float(l1.asize),
|
||||
},
|
||||
{
|
||||
'type': 'asize',
|
||||
'price': float(l1.ask),
|
||||
'size': float(l1.asize),
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams
|
||||
case {
|
||||
'e': 'aggTrade',
|
||||
}:
|
||||
# NOTE: this is purely for a definition,
|
||||
# ``msgspec.Struct`` does not runtime-validate until you
|
||||
# decode/encode, see:
|
||||
# https://jcristharif.com/msgspec/structs.html#type-validation
|
||||
msg = AggTrade(**msg) # TODO: should we .copy() ?
|
||||
piker_quote: dict = {
|
||||
'symbol': msg.s,
|
||||
'last': float(msg.p),
|
||||
'brokerd_ts': time.time(),
|
||||
'ticks': [{
|
||||
'type': 'trade',
|
||||
'price': float(msg.p),
|
||||
'size': float(msg.q),
|
||||
'broker_ts': msg.T,
|
||||
}],
|
||||
}
|
||||
yield 'trade', piker_quote
|
||||
|
||||
|
||||
def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]:
|
||||
'''
|
||||
Create a request subscription packet dict.
|
||||
|
||||
- spot:
|
||||
https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams
|
||||
|
||||
- futes:
|
||||
https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams
|
||||
|
||||
'''
|
||||
return {
|
||||
'method': 'SUBSCRIBE',
|
||||
'params': [
|
||||
f'{pair.lower()}@{sub_name}'
|
||||
for pair in pairs
|
||||
],
|
||||
'id': uid
|
||||
}
|
||||
|
||||
|
||||
# TODO, why aren't frame resp `log.info()`s showing in upstream
|
||||
# code?!
|
||||
@acm
|
||||
async def open_history_client(
|
||||
mkt: MktPair,
|
||||
|
||||
) -> tuple[Callable, int]:
|
||||
|
||||
# TODO implement history getter for the new storage layer.
|
||||
async with open_cached_client('binance') as client:
|
||||
|
||||
async def get_ohlc(
|
||||
timeframe: float,
|
||||
end_dt: datetime | None = None,
|
||||
start_dt: datetime | None = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
if timeframe != 60:
|
||||
raise DataUnavailable('Only 1m bars are supported')
|
||||
|
||||
# TODO: better wrapping for venue / mode?
|
||||
# - eventually logic for usd vs. coin settled futes
|
||||
# based on `MktPair.src` type/value?
|
||||
# - maybe something like `async with
|
||||
# Client.use_venue('usdtm_futes')`
|
||||
if mkt.type_key in DerivTypes:
|
||||
client.mkt_mode = 'usdtm_futes'
|
||||
else:
|
||||
client.mkt_mode = 'spot'
|
||||
|
||||
array: np.ndarray = await client.bars(
|
||||
mkt=mkt,
|
||||
start_dt=start_dt,
|
||||
end_dt=end_dt,
|
||||
)
|
||||
if array.size == 0:
|
||||
raise NoData(
|
||||
f'No frame for {start_dt} -> {end_dt}\n'
|
||||
)
|
||||
|
||||
times = array['time']
|
||||
if not times.any():
|
||||
raise ValueError(
|
||||
'Bad frame with null-times?\n\n'
|
||||
f'{times}'
|
||||
)
|
||||
|
||||
if end_dt is None:
|
||||
inow: int = round(time.time())
|
||||
if (inow - times[-1]) > 60:
|
||||
await tractor.pause()
|
||||
|
||||
start_dt = from_timestamp(times[0])
|
||||
end_dt = from_timestamp(times[-1])
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||
|
||||
|
||||
@async_lifo_cache()
|
||||
async def get_mkt_info(
|
||||
fqme: str,
|
||||
|
||||
) -> tuple[MktPair, Pair] | None:
|
||||
|
||||
# uppercase since kraken bs_mktid is always upper
|
||||
if 'binance' not in fqme.lower():
|
||||
fqme += '.binance'
|
||||
|
||||
mkt_mode: str = ''
|
||||
broker, mkt_ep, venue, expiry = unpack_fqme(fqme)
|
||||
|
||||
# NOTE: we always upper case all tokens to be consistent with
|
||||
# binance's symbology style for pairs, like `BTCUSDT`, but in
|
||||
# theory we could also just keep things lower case; as long as
|
||||
# we're consistent and the symcache matches whatever this func
|
||||
# returns, always!
|
||||
expiry: str = expiry.upper()
|
||||
venue: str = venue.upper()
|
||||
venue_lower: str = venue.lower()
|
||||
|
||||
# XXX TODO: we should change the usdtm_futes name to just
|
||||
# usdm_futes (dropping the tether part) since it turns out that
|
||||
# there are indeed USD-tokens OTHER THEN tether being used as
|
||||
# the margin assets.. it's going to require a wholesale
|
||||
# (variable/key) rename as well as file name adjustments to any
|
||||
# existing tsdb set..
|
||||
if 'usd' in venue_lower:
|
||||
mkt_mode: str = 'usdtm_futes'
|
||||
|
||||
# NO IDEA what these contracts (some kinda DEX-ish futes?) are
|
||||
# but we're masking them for now..
|
||||
elif (
|
||||
'defi' in venue_lower
|
||||
|
||||
# TODO: handle coinm futes which have a margin asset that
|
||||
# is some crypto token!
|
||||
# https://binance-docs.github.io/apidocs/delivery/en/#exchange-information
|
||||
or 'btc' in venue_lower
|
||||
):
|
||||
return None
|
||||
|
||||
else:
|
||||
# NOTE: see the `FutesPair.bs_fqme: str` implementation
|
||||
# to understand the reverse market info lookup below.
|
||||
mkt_mode = venue_lower or 'spot'
|
||||
|
||||
if (
|
||||
venue
|
||||
and 'spot' not in venue_lower
|
||||
|
||||
# XXX: catch all in case user doesn't know which
|
||||
# venue they want (usdtm vs. coinm) and we can choose
|
||||
# a default (via config?) once we support coin-m APIs.
|
||||
or 'perp' in venue_lower
|
||||
):
|
||||
if not mkt_mode:
|
||||
mkt_mode: str = f'{venue_lower}_futes'
|
||||
|
||||
async with open_cached_client(
|
||||
'binance',
|
||||
) as client:
|
||||
|
||||
assets: dict[str, Asset] = await client.get_assets()
|
||||
pair_str: str = mkt_ep.upper()
|
||||
|
||||
# switch venue-mode depending on input pattern parsing
|
||||
# since we want to use a particular endpoint (set) for
|
||||
# pair info lookup!
|
||||
client.mkt_mode = mkt_mode
|
||||
|
||||
pair: Pair = await client.exch_info(
|
||||
pair_str,
|
||||
venue=mkt_mode, # explicit
|
||||
expiry=expiry,
|
||||
)
|
||||
|
||||
if 'futes' in mkt_mode:
|
||||
assert isinstance(pair, FutesPair)
|
||||
|
||||
dst: Asset | None = assets.get(pair.bs_dst_asset)
|
||||
if (
|
||||
not dst
|
||||
# TODO: a known asset DNE list?
|
||||
# and pair.baseAsset == 'DEFI'
|
||||
):
|
||||
log.warning(
|
||||
f'UNKNOWN {venue} asset {pair.baseAsset} from,\n'
|
||||
f'{pformat(pair.to_dict())}'
|
||||
)
|
||||
|
||||
# XXX UNKNOWN missing "asset", though no idea why?
|
||||
# maybe it's only avail in the margin venue(s): /dapi/ ?
|
||||
return None
|
||||
|
||||
mkt = MktPair(
|
||||
dst=dst,
|
||||
src=assets[pair.bs_src_asset],
|
||||
price_tick=pair.price_tick,
|
||||
size_tick=pair.size_tick,
|
||||
bs_mktid=pair.symbol,
|
||||
expiry=expiry,
|
||||
venue=venue,
|
||||
broker='binance',
|
||||
|
||||
# NOTE: sectype is always taken from dst, see
|
||||
# `MktPair.type_key` and `Client._cache_pairs()`
|
||||
# _atype=sectype,
|
||||
)
|
||||
return mkt, pair
|
||||
|
||||
|
||||
@acm
|
||||
async def subscribe(
|
||||
ws: NoBsWs,
|
||||
symbols: list[str],
|
||||
|
||||
# defined once at import time to keep a global state B)
|
||||
iter_subids: Generator[int, None, None] = itertools.count(),
|
||||
|
||||
):
|
||||
# setup subs
|
||||
|
||||
subid: int = next(iter_subids)
|
||||
|
||||
# trade data (aka L1)
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker
|
||||
l1_sub = make_sub(symbols, 'bookTicker', subid)
|
||||
await ws.send_msg(l1_sub)
|
||||
|
||||
# aggregate (each order clear by taker **not** by maker)
|
||||
# trades data:
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams
|
||||
agg_trades_sub = make_sub(symbols, 'aggTrade', subid)
|
||||
await ws.send_msg(agg_trades_sub)
|
||||
|
||||
# might get ack from ws server, or maybe some
|
||||
# other msg still in transit..
|
||||
res = await ws.recv_msg()
|
||||
subid: str | None = res.get('id')
|
||||
if subid:
|
||||
assert res['id'] == subid
|
||||
|
||||
yield
|
||||
|
||||
subs = []
|
||||
for sym in symbols:
|
||||
subs.append("{sym}@aggTrade")
|
||||
subs.append("{sym}@bookTicker")
|
||||
|
||||
# unsub from all pairs on teardown
|
||||
if ws.connected():
|
||||
await ws.send_msg({
|
||||
"method": "UNSUBSCRIBE",
|
||||
"params": subs,
|
||||
"id": subid,
|
||||
})
|
||||
|
||||
# XXX: do we need to ack the unsub?
|
||||
# await ws.recv_msg()
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: list[str],
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
|
||||
async with (
|
||||
send_chan as send_chan,
|
||||
open_cached_client('binance') as client,
|
||||
):
|
||||
init_msgs: list[FeedInit] = []
|
||||
for sym in symbols:
|
||||
mkt: MktPair
|
||||
pair: Pair
|
||||
mkt, pair = await get_mkt_info(sym)
|
||||
|
||||
# build out init msgs according to latest spec
|
||||
init_msgs.append(
|
||||
FeedInit(mkt_info=mkt)
|
||||
)
|
||||
|
||||
wss_url: str = get_api_eps(client.mkt_mode)[1] # 2nd elem is wss url
|
||||
|
||||
# TODO: for sanity, but remove eventually Xp
|
||||
if 'future' in mkt.type_key:
|
||||
assert 'fstream' in wss_url
|
||||
|
||||
async with (
|
||||
open_autorecon_ws(
|
||||
url=wss_url,
|
||||
fixture=partial(
|
||||
subscribe,
|
||||
symbols=[mkt.bs_mktid],
|
||||
),
|
||||
) as ws,
|
||||
|
||||
# avoid stream-gen closure from breaking trio..
|
||||
aclosing(stream_messages(ws)) as msg_gen,
|
||||
):
|
||||
# log.info('WAITING ON FIRST LIVE QUOTE..')
|
||||
typ, quote = await anext(msg_gen)
|
||||
|
||||
# pull a first quote and deliver
|
||||
while typ != 'trade':
|
||||
typ, quote = await anext(msg_gen)
|
||||
|
||||
task_status.started((init_msgs, quote))
|
||||
|
||||
# signal to caller feed is ready for consumption
|
||||
feed_is_live.set()
|
||||
|
||||
# import time
|
||||
# last = time.time()
|
||||
|
||||
# XXX NOTE: can't include the `.binance` suffix
|
||||
# or the sampling loop will not broadcast correctly
|
||||
# since `bus._subscribers.setdefault(bs_fqme, set())`
|
||||
# is used inside `.data.open_feed_bus()` !!!
|
||||
topic: str = mkt.bs_fqme
|
||||
|
||||
# start streaming
|
||||
async for typ, quote in msg_gen:
|
||||
# period = time.time() - last
|
||||
# hz = 1/period if period else float('inf')
|
||||
# if hz > 60:
|
||||
# log.info(f'Binance quotez : {hz}')
|
||||
await send_chan.send({topic: quote})
|
||||
# last = time.time()
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(
|
||||
ctx: tractor.Context,
|
||||
) -> Client:
|
||||
|
||||
# NOTE: symbology tables are loaded as part of client
|
||||
# startup in ``.api.get_client()`` and in this case
|
||||
# are stored as `Client._pairs`.
|
||||
async with open_cached_client('binance') as client:
|
||||
|
||||
# TODO: maybe we should deliver the cache
|
||||
# so that client's can always do a local-lookup-first
|
||||
# style try and then update async as (new) match results
|
||||
# are delivered from here?
|
||||
await ctx.started()
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
pattern: str
|
||||
async for pattern in stream:
|
||||
# NOTE: pattern fuzzy-matching is done within
|
||||
# the methd impl.
|
||||
pairs: dict[str, Pair] = await client.search_symbols(
|
||||
pattern,
|
||||
)
|
||||
|
||||
# repack in fqme-keyed table
|
||||
byfqme: dict[str, Pair] = {}
|
||||
for pair in pairs.values():
|
||||
byfqme[pair.bs_fqme] = pair
|
||||
|
||||
await stream.send(byfqme)
|
||||
|
|
@ -1,303 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Per market data-type definitions and schemas types.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Literal,
|
||||
)
|
||||
from decimal import Decimal
|
||||
|
||||
from msgspec import field
|
||||
|
||||
from piker.types import Struct
|
||||
|
||||
|
||||
# API endpoint paths by venue / sub-API
|
||||
_domain: str = 'binance.com'
|
||||
_spot_url = f'https://api.{_domain}'
|
||||
_futes_url = f'https://fapi.{_domain}'
|
||||
|
||||
# WEBsocketz
|
||||
# NOTE XXX: see api docs which show diff addr?
|
||||
# https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information
|
||||
_spot_ws: str = 'wss://stream.binance.com/ws'
|
||||
# or this one? ..
|
||||
# 'wss://ws-api.binance.com:443/ws-api/v3',
|
||||
|
||||
# https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams
|
||||
_futes_ws: str = f'wss://fstream.{_domain}/ws'
|
||||
_auth_futes_ws: str = 'wss://fstream-auth.{_domain}/ws'
|
||||
|
||||
# test nets
|
||||
# NOTE: spot test network only allows certain ep sets:
|
||||
# https://testnet.binance.vision/
|
||||
# https://www.binance.com/en/support/faq/how-to-test-my-functions-on-binance-testnet-ab78f9a1b8824cf0a106b4229c76496d
|
||||
_testnet_spot_url: str = 'https://testnet.binance.vision/api'
|
||||
_testnet_spot_ws: str = 'wss://testnet.binance.vision/ws'
|
||||
# or this one? ..
|
||||
# 'wss://testnet.binance.vision/ws-api/v3'
|
||||
|
||||
_testnet_futes_url: str = 'https://testnet.binancefuture.com'
|
||||
_testnet_futes_ws: str = 'wss://stream.binancefuture.com/ws'
|
||||
|
||||
|
||||
MarketType = Literal[
|
||||
'spot',
|
||||
# 'margin',
|
||||
'usdtm_futes',
|
||||
# 'coinm_futes',
|
||||
]
|
||||
|
||||
|
||||
def get_api_eps(venue: MarketType) -> tuple[str, str]:
|
||||
'''
|
||||
Return API ep root paths per venue.
|
||||
|
||||
'''
|
||||
return {
|
||||
'spot': (
|
||||
_spot_url,
|
||||
_spot_ws,
|
||||
),
|
||||
'usdtm_futes': (
|
||||
_futes_url,
|
||||
_futes_ws,
|
||||
),
|
||||
}[venue]
|
||||
|
||||
|
||||
class Pair(Struct, frozen=True, kw_only=True):
|
||||
|
||||
symbol: str
|
||||
status: str
|
||||
orderTypes: list[str]
|
||||
|
||||
# src
|
||||
quoteAsset: str
|
||||
quotePrecision: int
|
||||
|
||||
# dst
|
||||
baseAsset: str
|
||||
baseAssetPrecision: int
|
||||
|
||||
filters: dict[
|
||||
str,
|
||||
str | int | float,
|
||||
] = field(default_factory=dict)
|
||||
|
||||
@property
|
||||
def price_tick(self) -> Decimal:
|
||||
# XXX: lul, after manually inspecting the response format we
|
||||
# just directly pick out the info we need
|
||||
step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0')
|
||||
return Decimal(step_size)
|
||||
|
||||
@property
|
||||
def size_tick(self) -> Decimal:
|
||||
step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0')
|
||||
return Decimal(step_size)
|
||||
|
||||
@property
|
||||
def bs_fqme(self) -> str:
|
||||
return self.symbol
|
||||
|
||||
@property
|
||||
def bs_mktid(self) -> str:
|
||||
return f'{self.symbol}.{self.venue}'
|
||||
|
||||
|
||||
class SpotPair(Pair, frozen=True):
|
||||
|
||||
cancelReplaceAllowed: bool
|
||||
allowTrailingStop: bool
|
||||
quoteAssetPrecision: int
|
||||
|
||||
baseCommissionPrecision: int
|
||||
quoteCommissionPrecision: int
|
||||
|
||||
icebergAllowed: bool
|
||||
ocoAllowed: bool
|
||||
quoteOrderQtyMarketAllowed: bool
|
||||
isSpotTradingAllowed: bool
|
||||
isMarginTradingAllowed: bool
|
||||
otoAllowed: bool
|
||||
|
||||
defaultSelfTradePreventionMode: str
|
||||
allowedSelfTradePreventionModes: list[str]
|
||||
permissions: list[str]
|
||||
permissionSets: list[list[str]]
|
||||
|
||||
# NOTE: see `.data._symcache.SymbologyCache.load()` for why
|
||||
ns_path: str = 'piker.brokers.binance:SpotPair'
|
||||
|
||||
@property
|
||||
def venue(self) -> str:
|
||||
return 'SPOT'
|
||||
|
||||
@property
|
||||
def bs_fqme(self) -> str:
|
||||
return f'{self.symbol}.SPOT'
|
||||
|
||||
@property
|
||||
def bs_src_asset(self) -> str:
|
||||
return f'{self.quoteAsset}'
|
||||
|
||||
@property
|
||||
def bs_dst_asset(self) -> str:
|
||||
return f'{self.baseAsset}'
|
||||
|
||||
|
||||
class FutesPair(Pair):
|
||||
symbol: str # 'BTCUSDT',
|
||||
pair: str # 'BTCUSDT',
|
||||
baseAssetPrecision: int # 8,
|
||||
contractType: str # 'PERPETUAL',
|
||||
deliveryDate: int # 4133404800000,
|
||||
liquidationFee: float # '0.012500',
|
||||
maintMarginPercent: float # '2.5000',
|
||||
marginAsset: str # 'USDT',
|
||||
marketTakeBound: float # '0.05',
|
||||
maxMoveOrderLimit: int # 10000,
|
||||
onboardDate: int # 1569398400000,
|
||||
pricePrecision: int # 2,
|
||||
quantityPrecision: int # 3,
|
||||
quoteAsset: str # 'USDT',
|
||||
quotePrecision: int # 8,
|
||||
requiredMarginPercent: float # '5.0000',
|
||||
timeInForce: list[str] # ['GTC', 'IOC', 'FOK', 'GTX'],
|
||||
triggerProtect: float # '0.0500',
|
||||
underlyingSubType: list[str] # ['PoW'],
|
||||
underlyingType: str # 'COIN'
|
||||
|
||||
# NOTE: see `.data._symcache.SymbologyCache.load()` for why
|
||||
ns_path: str = 'piker.brokers.binance:FutesPair'
|
||||
|
||||
# NOTE: for compat with spot pairs and `MktPair.src: Asset`
|
||||
# processing..
|
||||
@property
|
||||
def quoteAssetPrecision(self) -> int:
|
||||
return self.quotePrecision
|
||||
|
||||
@property
|
||||
def expiry(self) -> str:
|
||||
symbol: str = self.symbol
|
||||
contype: str = self.contractType
|
||||
match contype:
|
||||
case (
|
||||
'CURRENT_QUARTER'
|
||||
| 'CURRENT_QUARTER DELIVERING'
|
||||
| 'NEXT_QUARTER' # su madre binance..
|
||||
):
|
||||
pair, _, expiry = symbol.partition('_')
|
||||
assert pair == self.pair # sanity
|
||||
return f'{expiry}'
|
||||
|
||||
case 'PERPETUAL':
|
||||
return 'PERP'
|
||||
|
||||
case '':
|
||||
subtype: list[str] = self.underlyingSubType
|
||||
if not subtype:
|
||||
if self.status == 'PENDING_TRADING':
|
||||
return 'PENDING'
|
||||
|
||||
match subtype:
|
||||
case ['DEFI']:
|
||||
return 'PERP'
|
||||
|
||||
# wow, just wow you binance guys suck..
|
||||
if self.status == 'PENDING_TRADING':
|
||||
return 'PENDING'
|
||||
|
||||
# XXX: yeah no clue then..
|
||||
raise ValueError(
|
||||
f'Bad .expiry token match: {contype} for {symbol}'
|
||||
)
|
||||
|
||||
@property
|
||||
def venue(self) -> str:
|
||||
symbol: str = self.symbol
|
||||
ctype: str = self.contractType
|
||||
margin: str = self.marginAsset
|
||||
|
||||
match ctype:
|
||||
case 'PERPETUAL':
|
||||
return f'{margin}M'
|
||||
|
||||
case (
|
||||
'CURRENT_QUARTER'
|
||||
| 'CURRENT_QUARTER DELIVERING'
|
||||
| 'NEXT_QUARTER' # su madre binance..
|
||||
):
|
||||
_, _, expiry = symbol.partition('_')
|
||||
return f'{margin}M'
|
||||
|
||||
case '':
|
||||
subtype: list[str] = self.underlyingSubType
|
||||
if not subtype:
|
||||
if self.status == 'PENDING_TRADING':
|
||||
return f'{margin}M'
|
||||
|
||||
match subtype:
|
||||
case (
|
||||
['DEFI']
|
||||
| ['USDC']
|
||||
):
|
||||
return f'{subtype[0]}'
|
||||
|
||||
# XXX: yeah no clue then..
|
||||
raise ValueError(
|
||||
f'Bad .venue token match: {ctype}'
|
||||
)
|
||||
|
||||
@property
|
||||
def bs_fqme(self) -> str:
|
||||
symbol: str = self.symbol
|
||||
ctype: str = self.contractType
|
||||
venue: str = self.venue
|
||||
pair: str = self.pair
|
||||
|
||||
match ctype:
|
||||
case (
|
||||
'CURRENT_QUARTER'
|
||||
| 'NEXT_QUARTER' # su madre binance..
|
||||
):
|
||||
pair, _, expiry = symbol.partition('_')
|
||||
assert pair == self.pair
|
||||
|
||||
return f'{pair}.{venue}.{self.expiry}'
|
||||
|
||||
@property
|
||||
def bs_src_asset(self) -> str:
|
||||
return f'{self.quoteAsset}'
|
||||
|
||||
@property
|
||||
def bs_dst_asset(self) -> str:
|
||||
return f'{self.baseAsset}.{self.venue}'
|
||||
|
||||
|
||||
PAIRTYPES: dict[MarketType, Pair] = {
|
||||
'spot': SpotPair,
|
||||
'usdtm_futes': FutesPair,
|
||||
|
||||
# TODO: support coin-margined venue:
|
||||
# https://binance-docs.github.io/apidocs/delivery/en/#change-log
|
||||
# 'coinm_futes': CoinFutesPair,
|
||||
}
|
||||
|
|
@ -21,189 +21,34 @@ import os
|
|||
from functools import partial
|
||||
from operator import attrgetter
|
||||
from operator import itemgetter
|
||||
from types import ModuleType
|
||||
|
||||
import click
|
||||
import pandas as pd
|
||||
import trio
|
||||
import tractor
|
||||
|
||||
from ..cli import cli
|
||||
from .. import watchlists as wl
|
||||
from ..log import (
|
||||
colorize_json,
|
||||
)
|
||||
from ._util import (
|
||||
log,
|
||||
get_console_log,
|
||||
)
|
||||
from ..service import (
|
||||
maybe_spawn_brokerd,
|
||||
maybe_open_pikerd,
|
||||
)
|
||||
from ..brokers import (
|
||||
core,
|
||||
get_brokermod,
|
||||
data,
|
||||
)
|
||||
DEFAULT_BROKER = 'binance'
|
||||
from ..log import get_console_log, colorize_json, get_logger
|
||||
from .._daemon import maybe_spawn_brokerd, maybe_open_pikerd
|
||||
from ..brokers import core, get_brokermod, data
|
||||
|
||||
log = get_logger('cli')
|
||||
DEFAULT_BROKER = 'questrade'
|
||||
|
||||
_config_dir = click.get_app_dir('piker')
|
||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||
|
||||
|
||||
OK = '\033[92m'
|
||||
WARNING = '\033[93m'
|
||||
FAIL = '\033[91m'
|
||||
ENDC = '\033[0m'
|
||||
|
||||
|
||||
def print_ok(s: str, **kwargs):
|
||||
print(OK + s + ENDC, **kwargs)
|
||||
|
||||
|
||||
def print_error(s: str, **kwargs):
|
||||
print(FAIL + s + ENDC, **kwargs)
|
||||
|
||||
|
||||
def get_method(client, meth_name: str):
|
||||
print(f'checking client for method \'{meth_name}\'...', end='', flush=True)
|
||||
method = getattr(client, meth_name, None)
|
||||
assert method
|
||||
print_ok('found!.')
|
||||
return method
|
||||
|
||||
|
||||
async def run_method(client, meth_name: str, **kwargs):
|
||||
method = get_method(client, meth_name)
|
||||
print('running...', end='', flush=True)
|
||||
result = await method(**kwargs)
|
||||
print_ok(f'done! result: {type(result)}')
|
||||
return result
|
||||
|
||||
|
||||
async def run_test(broker_name: str):
|
||||
brokermod = get_brokermod(broker_name)
|
||||
total = 0
|
||||
passed = 0
|
||||
failed = 0
|
||||
|
||||
print('getting client...', end='', flush=True)
|
||||
if not hasattr(brokermod, 'get_client'):
|
||||
print_error('fail! no \'get_client\' context manager found.')
|
||||
return
|
||||
|
||||
async with brokermod.get_client(is_brokercheck=True) as client:
|
||||
print_ok('done! inside client context.')
|
||||
|
||||
# check for methods present on brokermod
|
||||
method_list = [
|
||||
'backfill_bars',
|
||||
'get_client',
|
||||
'trades_dialogue',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
|
||||
]
|
||||
|
||||
for method in method_list:
|
||||
print(
|
||||
f'checking brokermod for method \'{method}\'...',
|
||||
end='', flush=True)
|
||||
if not hasattr(brokermod, method):
|
||||
print_error(f'fail! method \'{method}\' not found.')
|
||||
failed += 1
|
||||
else:
|
||||
print_ok('done!')
|
||||
passed += 1
|
||||
|
||||
total += 1
|
||||
|
||||
# check for methods present con brokermod.Client and their
|
||||
# results
|
||||
|
||||
# for private methods only check is present
|
||||
method_list = [
|
||||
'get_balances',
|
||||
'get_assets',
|
||||
'get_trades',
|
||||
'get_xfers',
|
||||
'submit_limit',
|
||||
'submit_cancel',
|
||||
'search_symbols',
|
||||
]
|
||||
|
||||
for method_name in method_list:
|
||||
try:
|
||||
get_method(client, method_name)
|
||||
passed += 1
|
||||
|
||||
except AssertionError:
|
||||
print_error(f'fail! method \'{method_name}\' not found.')
|
||||
failed += 1
|
||||
|
||||
total += 1
|
||||
|
||||
# check for methods present con brokermod.Client and their
|
||||
# results
|
||||
|
||||
syms = await run_method(client, 'symbol_info')
|
||||
total += 1
|
||||
|
||||
if len(syms) == 0:
|
||||
raise BaseException('Empty Symbol list?')
|
||||
|
||||
passed += 1
|
||||
|
||||
first_sym = tuple(syms.keys())[0]
|
||||
|
||||
method_list = [
|
||||
('cache_symbols', {}),
|
||||
('search_symbols', {'pattern': first_sym[:-1]}),
|
||||
('bars', {'symbol': first_sym})
|
||||
]
|
||||
|
||||
for method_name, method_kwargs in method_list:
|
||||
try:
|
||||
await run_method(client, method_name, **method_kwargs)
|
||||
passed += 1
|
||||
|
||||
except AssertionError:
|
||||
print_error(f'fail! method \'{method_name}\' not found.')
|
||||
failed += 1
|
||||
|
||||
total += 1
|
||||
|
||||
print(f'total: {total}, passed: {passed}, failed: {failed}')
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument('broker', nargs=1, required=True)
|
||||
@click.pass_obj
|
||||
def brokercheck(config, broker):
|
||||
'''
|
||||
Test broker apis for completeness.
|
||||
|
||||
'''
|
||||
async def bcheck_main():
|
||||
async with maybe_spawn_brokerd(broker) as portal:
|
||||
await portal.run(run_test, broker)
|
||||
await portal.cancel_actor()
|
||||
|
||||
trio.run(run_test, broker)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option('--keys', '-k', multiple=True,
|
||||
help='Return results only for these keys')
|
||||
help='Return results only for these keys')
|
||||
@click.argument('meth', nargs=1)
|
||||
@click.argument('kwargs', nargs=-1)
|
||||
@click.pass_obj
|
||||
def api(config, meth, kwargs, keys):
|
||||
'''
|
||||
Make a broker-client API method call
|
||||
|
||||
'''
|
||||
"""Make a broker-client API method call
|
||||
"""
|
||||
# global opts
|
||||
broker = config['brokers'][0]
|
||||
|
||||
|
|
@ -234,15 +79,15 @@ def api(config, meth, kwargs, keys):
|
|||
|
||||
|
||||
@cli.command()
|
||||
@click.option('--df-output', '-df', flag_value=True,
|
||||
help='Output in `pandas.DataFrame` format')
|
||||
@click.argument('tickers', nargs=-1, required=True)
|
||||
@click.pass_obj
|
||||
def quote(config, tickers):
|
||||
'''
|
||||
Print symbol quotes to the console
|
||||
|
||||
'''
|
||||
def quote(config, tickers, df_output):
|
||||
"""Print symbol quotes to the console
|
||||
"""
|
||||
# global opts
|
||||
brokermod = list(config['brokermods'].values())[0]
|
||||
brokermod = config['brokermods'][0]
|
||||
|
||||
quotes = trio.run(partial(core.stocks_quote, brokermod, tickers))
|
||||
if not quotes:
|
||||
|
|
@ -255,21 +100,30 @@ def quote(config, tickers):
|
|||
if ticker not in syms:
|
||||
brokermod.log.warn(f"Could not find symbol {ticker}?")
|
||||
|
||||
click.echo(colorize_json(quotes))
|
||||
if df_output:
|
||||
cols = next(filter(bool, quotes)).copy()
|
||||
cols.pop('symbol')
|
||||
df = pd.DataFrame(
|
||||
(quote or {} for quote in quotes),
|
||||
columns=cols,
|
||||
)
|
||||
click.echo(df)
|
||||
else:
|
||||
click.echo(colorize_json(quotes))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option('--df-output', '-df', flag_value=True,
|
||||
help='Output in `pandas.DataFrame` format')
|
||||
@click.option('--count', '-c', default=1000,
|
||||
help='Number of bars to retrieve')
|
||||
@click.argument('symbol', required=True)
|
||||
@click.pass_obj
|
||||
def bars(config, symbol, count):
|
||||
'''
|
||||
Retreive 1m bars for symbol and print on the console
|
||||
|
||||
'''
|
||||
def bars(config, symbol, count, df_output):
|
||||
"""Retreive 1m bars for symbol and print on the console
|
||||
"""
|
||||
# global opts
|
||||
brokermod = list(config['brokermods'].values())[0]
|
||||
brokermod = config['brokermods'][0]
|
||||
|
||||
# broker backend should return at the least a
|
||||
# list of candle dictionaries
|
||||
|
|
@ -279,7 +133,7 @@ def bars(config, symbol, count):
|
|||
brokermod,
|
||||
symbol,
|
||||
count=count,
|
||||
as_np=False,
|
||||
as_np=df_output
|
||||
)
|
||||
)
|
||||
|
||||
|
|
@ -287,7 +141,10 @@ def bars(config, symbol, count):
|
|||
log.error(f"No quotes could be found for {symbol}?")
|
||||
return
|
||||
|
||||
click.echo(colorize_json(bars))
|
||||
if df_output:
|
||||
click.echo(pd.DataFrame(bars))
|
||||
else:
|
||||
click.echo(colorize_json(bars))
|
||||
|
||||
|
||||
@cli.command()
|
||||
|
|
@ -299,12 +156,10 @@ def bars(config, symbol, count):
|
|||
@click.argument('name', nargs=1, required=True)
|
||||
@click.pass_obj
|
||||
def record(config, rate, name, dhost, filename):
|
||||
'''
|
||||
Record client side quotes to a file on disk
|
||||
|
||||
'''
|
||||
"""Record client side quotes to a file on disk
|
||||
"""
|
||||
# global opts
|
||||
brokermod = list(config['brokermods'].values())[0]
|
||||
brokermod = config['brokermods'][0]
|
||||
loglevel = config['loglevel']
|
||||
log = config['log']
|
||||
|
||||
|
|
@ -340,10 +195,8 @@ def record(config, rate, name, dhost, filename):
|
|||
@click.argument('symbol', required=True)
|
||||
@click.pass_context
|
||||
def contracts(ctx, loglevel, broker, symbol, ids):
|
||||
'''
|
||||
Get list of all option contracts for symbol
|
||||
|
||||
'''
|
||||
"""Get list of all option contracts for symbol
|
||||
"""
|
||||
brokermod = get_brokermod(broker)
|
||||
get_console_log(loglevel)
|
||||
|
||||
|
|
@ -360,16 +213,16 @@ def contracts(ctx, loglevel, broker, symbol, ids):
|
|||
|
||||
|
||||
@cli.command()
|
||||
@click.option('--df-output', '-df', flag_value=True,
|
||||
help='Output in `pandas.DataFrame` format')
|
||||
@click.option('--date', '-d', help='Contracts expiry date')
|
||||
@click.argument('symbol', required=True)
|
||||
@click.pass_obj
|
||||
def optsquote(config, symbol, date):
|
||||
'''
|
||||
Retreive symbol option quotes on the console
|
||||
|
||||
'''
|
||||
def optsquote(config, symbol, df_output, date):
|
||||
"""Retreive symbol option quotes on the console
|
||||
"""
|
||||
# global opts
|
||||
brokermod = list(config['brokermods'].values())[0]
|
||||
brokermod = config['brokermods'][0]
|
||||
|
||||
quotes = trio.run(
|
||||
partial(
|
||||
|
|
@ -380,157 +233,67 @@ def optsquote(config, symbol, date):
|
|||
log.error(f"No option quotes could be found for {symbol}?")
|
||||
return
|
||||
|
||||
click.echo(colorize_json(quotes))
|
||||
if df_output:
|
||||
df = pd.DataFrame(
|
||||
(quote.values() for quote in quotes),
|
||||
columns=quotes[0].keys(),
|
||||
)
|
||||
click.echo(df)
|
||||
else:
|
||||
click.echo(colorize_json(quotes))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument('tickers', nargs=-1, required=True)
|
||||
@click.pass_obj
|
||||
def mkt_info(
|
||||
config: dict,
|
||||
tickers: list[str],
|
||||
):
|
||||
'''
|
||||
Print symbol quotes to the console
|
||||
|
||||
'''
|
||||
from msgspec.json import encode, decode
|
||||
from ..accounting import MktPair
|
||||
from ..service import (
|
||||
open_piker_runtime,
|
||||
)
|
||||
|
||||
def symbol_info(config, tickers):
|
||||
"""Print symbol quotes to the console
|
||||
"""
|
||||
# global opts
|
||||
brokermods: dict[str, ModuleType] = config['brokermods']
|
||||
brokermod = config['brokermods'][0]
|
||||
|
||||
mkts: list[MktPair] = []
|
||||
async def main():
|
||||
|
||||
async with open_piker_runtime(
|
||||
name='mkt_info_query',
|
||||
# loglevel=loglevel,
|
||||
debug_mode=True,
|
||||
|
||||
) as (_, _):
|
||||
for fqme in tickers:
|
||||
bs_fqme, _, broker = fqme.rpartition('.')
|
||||
brokermod: ModuleType = brokermods[broker]
|
||||
mkt, bs_pair = await core.mkt_info(
|
||||
brokermod,
|
||||
bs_fqme,
|
||||
)
|
||||
mkts.append((mkt, bs_pair))
|
||||
|
||||
trio.run(main)
|
||||
|
||||
if not mkts:
|
||||
log.error(
|
||||
f'No market info could be found for {tickers}'
|
||||
)
|
||||
quotes = trio.run(partial(core.symbol_info, brokermod, tickers))
|
||||
if not quotes:
|
||||
log.error(f"No quotes could be found for {tickers}?")
|
||||
return
|
||||
|
||||
if len(mkts) < len(tickers):
|
||||
syms = tuple(map(itemgetter('fqme'), mkts))
|
||||
if len(quotes) < len(tickers):
|
||||
syms = tuple(map(itemgetter('symbol'), quotes))
|
||||
for ticker in tickers:
|
||||
if ticker not in syms:
|
||||
log.warn(f"Could not find symbol {ticker}?")
|
||||
brokermod.log.warn(f"Could not find symbol {ticker}?")
|
||||
|
||||
|
||||
# TODO: use ``rich.Table`` intead here!
|
||||
for mkt, bs_pair in mkts:
|
||||
click.echo(
|
||||
'\n'
|
||||
'----------------------------------------------------\n'
|
||||
f'{type(bs_pair)}\n'
|
||||
'----------------------------------------------------\n'
|
||||
f'{colorize_json(bs_pair.to_dict())}\n'
|
||||
'----------------------------------------------------\n'
|
||||
f'as piker `MktPair` with fqme: {mkt.fqme}\n'
|
||||
'----------------------------------------------------\n'
|
||||
# NOTE: roundtrip to json codec for console print
|
||||
f'{colorize_json(decode(encode(mkt)))}'
|
||||
)
|
||||
click.echo(colorize_json(quotes))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument('pattern', required=True)
|
||||
# TODO: move this to top level click/typer context for all subs
|
||||
@click.option(
|
||||
'--pdb',
|
||||
is_flag=True,
|
||||
help='Enable tractor debug mode',
|
||||
)
|
||||
@click.pass_obj
|
||||
def search(
|
||||
config: dict,
|
||||
pattern: str,
|
||||
pdb: bool,
|
||||
):
|
||||
'''
|
||||
Search for symbols from broker backend(s).
|
||||
|
||||
'''
|
||||
def search(config, pattern):
|
||||
"""Search for symbols from broker backend(s).
|
||||
"""
|
||||
# global opts
|
||||
brokermods = list(config['brokermods'].values())
|
||||
brokermods = config['brokermods']
|
||||
|
||||
# define tractor entrypoint
|
||||
async def main(func):
|
||||
|
||||
async with maybe_open_pikerd(
|
||||
loglevel=config['loglevel'],
|
||||
debug_mode=pdb,
|
||||
):
|
||||
return await func()
|
||||
|
||||
from piker.toolz import open_crash_handler
|
||||
with open_crash_handler():
|
||||
quotes = trio.run(
|
||||
main,
|
||||
partial(
|
||||
core.symbol_search,
|
||||
brokermods,
|
||||
pattern,
|
||||
),
|
||||
)
|
||||
quotes = trio.run(
|
||||
main,
|
||||
partial(
|
||||
core.symbol_search,
|
||||
brokermods,
|
||||
pattern,
|
||||
),
|
||||
)
|
||||
|
||||
if not quotes:
|
||||
log.error(f"No matches could be found for {pattern}?")
|
||||
return
|
||||
if not quotes:
|
||||
log.error(f"No matches could be found for {pattern}?")
|
||||
return
|
||||
|
||||
click.echo(colorize_json(quotes))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument('section', required=False)
|
||||
@click.argument('value', required=False)
|
||||
@click.option('--delete', '-d', flag_value=True, help='Delete section')
|
||||
@click.pass_obj
|
||||
def brokercfg(config, section, value, delete):
|
||||
'''
|
||||
If invoked with no arguments, open an editor to edit broker
|
||||
configs file or get / update an individual section.
|
||||
|
||||
'''
|
||||
from .. import config
|
||||
|
||||
if section:
|
||||
conf, path = config.load()
|
||||
|
||||
if not delete:
|
||||
if value:
|
||||
config.set_value(conf, section, value)
|
||||
|
||||
click.echo(
|
||||
colorize_json(
|
||||
config.get_value(conf, section))
|
||||
)
|
||||
else:
|
||||
config.del_value(conf, section)
|
||||
|
||||
config.write(config=conf)
|
||||
|
||||
else:
|
||||
conf, path = config.load(raw=True)
|
||||
config.write(
|
||||
raw=click.edit(text=conf)
|
||||
)
|
||||
click.echo(colorize_json(quotes))
|
||||
|
|
|
|||
|
|
@ -0,0 +1,103 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Broker configuration mgmt.
|
||||
"""
|
||||
import os
|
||||
from os.path import dirname
|
||||
import shutil
|
||||
|
||||
import toml
|
||||
import click
|
||||
|
||||
from ..log import get_logger
|
||||
|
||||
log = get_logger('broker-config')
|
||||
|
||||
_config_dir = click.get_app_dir('piker')
|
||||
_file_name = 'brokers.toml'
|
||||
|
||||
|
||||
def _override_config_dir(
|
||||
path: str
|
||||
) -> None:
|
||||
global _config_dir
|
||||
_config_dir = path
|
||||
|
||||
|
||||
def get_broker_conf_path():
|
||||
"""Return the default config path normally under
|
||||
``~/.config/piker`` on linux.
|
||||
|
||||
Contains files such as:
|
||||
- brokers.toml
|
||||
- watchlists.toml
|
||||
- signals.toml
|
||||
- strats.toml
|
||||
|
||||
"""
|
||||
return os.path.join(_config_dir, _file_name)
|
||||
|
||||
|
||||
def repodir():
|
||||
"""Return the abspath to the repo directory.
|
||||
"""
|
||||
dirpath = os.path.abspath(
|
||||
# we're 3 levels down in **this** module file
|
||||
dirname(dirname(dirname(os.path.realpath(__file__))))
|
||||
)
|
||||
return dirpath
|
||||
|
||||
|
||||
def load(
|
||||
path: str = None
|
||||
) -> (dict, str):
|
||||
"""Load broker config.
|
||||
"""
|
||||
path = path or get_broker_conf_path()
|
||||
if not os.path.isfile(path):
|
||||
shutil.copyfile(
|
||||
os.path.join(repodir(), 'data/brokers.toml'),
|
||||
path,
|
||||
)
|
||||
|
||||
config = toml.load(path)
|
||||
log.debug(f"Read config file {path}")
|
||||
return config, path
|
||||
|
||||
|
||||
def write(
|
||||
config: dict, # toml config as dict
|
||||
path: str = None,
|
||||
) -> None:
|
||||
"""Write broker config to disk.
|
||||
|
||||
Create a ``brokers.ini`` file if one does not exist.
|
||||
"""
|
||||
path = path or get_broker_conf_path()
|
||||
dirname = os.path.dirname(path)
|
||||
if not os.path.isdir(dirname):
|
||||
log.debug(f"Creating config dir {_config_dir}")
|
||||
os.makedirs(dirname)
|
||||
|
||||
if not config:
|
||||
raise ValueError(
|
||||
"Watch out you're trying to write a blank config!")
|
||||
|
||||
log.debug(f"Writing config file {path}")
|
||||
with open(path, 'w') as cf:
|
||||
return toml.dump(config, cf)
|
||||
|
|
@ -26,11 +26,13 @@ from typing import List, Dict, Any, Optional
|
|||
|
||||
import trio
|
||||
|
||||
from ._util import log
|
||||
from ..log import get_logger
|
||||
from . import get_brokermod
|
||||
from ..service import maybe_spawn_brokerd
|
||||
from . import open_cached_client
|
||||
from ..accounting import MktPair
|
||||
from .._daemon import maybe_spawn_brokerd
|
||||
from .api import open_cached_client
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
async def api(brokername: str, methname: str, **kwargs) -> dict:
|
||||
|
|
@ -95,15 +97,15 @@ async def option_chain(
|
|||
return await client.option_chains(contracts)
|
||||
|
||||
|
||||
# async def contracts(
|
||||
# brokermod: ModuleType,
|
||||
# symbol: str,
|
||||
# ) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
||||
# """Return option contracts (all expiries) for ``symbol``.
|
||||
# """
|
||||
# async with brokermod.get_client() as client:
|
||||
# # return await client.get_all_contracts([symbol])
|
||||
# return await client.get_all_contracts([symbol])
|
||||
async def contracts(
|
||||
brokermod: ModuleType,
|
||||
symbol: str,
|
||||
) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
||||
"""Return option contracts (all expiries) for ``symbol``.
|
||||
"""
|
||||
async with brokermod.get_client() as client:
|
||||
# return await client.get_all_contracts([symbol])
|
||||
return await client.get_all_contracts([symbol])
|
||||
|
||||
|
||||
async def bars(
|
||||
|
|
@ -117,6 +119,17 @@ async def bars(
|
|||
return await client.bars(symbol, **kwargs)
|
||||
|
||||
|
||||
async def symbol_info(
|
||||
brokermod: ModuleType,
|
||||
symbol: str,
|
||||
**kwargs,
|
||||
) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
||||
"""Return symbol info from broker.
|
||||
"""
|
||||
async with brokermod.get_client() as client:
|
||||
return await client.symbol_info(symbol, **kwargs)
|
||||
|
||||
|
||||
async def search_w_brokerd(name: str, pattern: str) -> dict:
|
||||
|
||||
async with open_cached_client(name) as client:
|
||||
|
|
@ -129,27 +142,15 @@ async def symbol_search(
|
|||
brokermods: list[ModuleType],
|
||||
pattern: str,
|
||||
**kwargs,
|
||||
|
||||
) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
||||
'''
|
||||
Return symbol info from broker.
|
||||
|
||||
'''
|
||||
"""Return symbol info from broker.
|
||||
"""
|
||||
results = []
|
||||
|
||||
async def search_backend(
|
||||
brokermod: ModuleType
|
||||
) -> None:
|
||||
|
||||
brokername: str = mod.name
|
||||
async def search_backend(brokername: str) -> None:
|
||||
|
||||
async with maybe_spawn_brokerd(
|
||||
mod.name,
|
||||
infect_asyncio=getattr(
|
||||
mod,
|
||||
'_infect_asyncio',
|
||||
False,
|
||||
),
|
||||
brokername,
|
||||
) as portal:
|
||||
|
||||
results.append((
|
||||
|
|
@ -167,20 +168,3 @@ async def symbol_search(
|
|||
n.start_soon(search_backend, mod.name)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
async def mkt_info(
|
||||
brokermod: ModuleType,
|
||||
fqme: str,
|
||||
**kwargs,
|
||||
|
||||
) -> MktPair:
|
||||
'''
|
||||
Return MktPair info from broker including src and dst assets.
|
||||
|
||||
'''
|
||||
async with open_cached_client(brokermod.name) as client:
|
||||
assert client
|
||||
return await brokermod.get_mkt_info(
|
||||
fqme.replace(brokermod.name, '')
|
||||
)
|
||||
|
|
|
|||
|
|
@ -14,14 +14,9 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
NB: this is the old original implementation that was used way way back
|
||||
when the project started with ``kivy``.
|
||||
|
||||
This code is left for reference but will likely be merged in
|
||||
appropriately and removed.
|
||||
|
||||
'''
|
||||
"""
|
||||
Real-time data feed machinery
|
||||
"""
|
||||
import time
|
||||
from functools import partial
|
||||
from dataclasses import dataclass, field
|
||||
|
|
@ -38,16 +33,15 @@ import contextlib
|
|||
|
||||
import trio
|
||||
import tractor
|
||||
from tractor.experimental import msgpub
|
||||
from async_generator import asynccontextmanager
|
||||
|
||||
from ._util import (
|
||||
log,
|
||||
get_console_log,
|
||||
)
|
||||
from ..log import get_logger, get_console_log
|
||||
from . import get_brokermod
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
async def wait_for_network(
|
||||
net_func: Callable,
|
||||
sleep: int = 1
|
||||
|
|
@ -99,7 +93,7 @@ class BrokerFeed:
|
|||
)
|
||||
|
||||
|
||||
@msgpub(tasks=['stock', 'option'])
|
||||
@tractor.msg.pub(tasks=['stock', 'option'])
|
||||
async def stream_poll_requests(
|
||||
get_topics: Callable,
|
||||
get_quotes: Coroutine,
|
||||
|
|
@ -227,28 +221,26 @@ async def get_cached_feed(
|
|||
|
||||
@tractor.stream
|
||||
async def start_quote_stream(
|
||||
stream: tractor.Context, # marks this as a streaming func
|
||||
ctx: tractor.Context, # marks this as a streaming func
|
||||
broker: str,
|
||||
symbols: List[Any],
|
||||
feed_type: str = 'stock',
|
||||
rate: int = 3,
|
||||
) -> None:
|
||||
'''
|
||||
Handle per-broker quote stream subscriptions using a "lazy" pub-sub
|
||||
"""Handle per-broker quote stream subscriptions using a "lazy" pub-sub
|
||||
pattern.
|
||||
|
||||
Spawns new quoter tasks for each broker backend on-demand.
|
||||
Since most brokers seems to support batch quote requests we
|
||||
limit to one task per process (for now).
|
||||
|
||||
'''
|
||||
"""
|
||||
# XXX: why do we need this again?
|
||||
get_console_log(tractor.current_actor().loglevel)
|
||||
|
||||
# pull global vars from local actor
|
||||
symbols = list(symbols)
|
||||
log.info(
|
||||
f"{stream.chan.uid} subscribed to {broker} for symbols {symbols}")
|
||||
f"{ctx.chan.uid} subscribed to {broker} for symbols {symbols}")
|
||||
# another actor task may have already created it
|
||||
async with get_cached_feed(broker) as feed:
|
||||
|
||||
|
|
@ -292,13 +284,13 @@ async def start_quote_stream(
|
|||
assert fquote['displayable']
|
||||
payload[sym] = fquote
|
||||
|
||||
await stream.send_yield(payload)
|
||||
await ctx.send_yield(payload)
|
||||
|
||||
await stream_poll_requests(
|
||||
|
||||
# ``trionics.msgpub`` required kwargs
|
||||
# ``msg.pub`` required kwargs
|
||||
task_name=feed_type,
|
||||
ctx=stream,
|
||||
ctx=ctx,
|
||||
topics=symbols,
|
||||
packetizer=feed.mod.packetizer,
|
||||
|
||||
|
|
@ -321,11 +313,9 @@ async def call_client(
|
|||
|
||||
|
||||
class DataFeed:
|
||||
'''
|
||||
Data feed client for streaming symbol data from and making API
|
||||
client calls to a (remote) ``brokerd`` daemon.
|
||||
|
||||
'''
|
||||
"""Data feed client for streaming symbol data from and making API client calls
|
||||
to a (remote) ``brokerd`` daemon.
|
||||
"""
|
||||
_allowed = ('stock', 'option')
|
||||
|
||||
def __init__(self, portal, brokermod):
|
||||
|
|
|
|||
|
|
@ -1,70 +0,0 @@
|
|||
``deribit`` backend
|
||||
------------------
|
||||
pretty good liquidity crypto derivatives, uses custom json rpc over ws for
|
||||
client methods, then `cryptofeed` for data streams.
|
||||
|
||||
status
|
||||
******
|
||||
- supports option charts
|
||||
- no order support yet
|
||||
|
||||
|
||||
config
|
||||
******
|
||||
In order to get order mode support your ``brokers.toml``
|
||||
needs to have something like the following:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[deribit]
|
||||
key_id = 'XXXXXXXX'
|
||||
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
||||
|
||||
To obtain an api id and secret you need to create an account, which can be a
|
||||
real market account over at:
|
||||
|
||||
- deribit.com (requires KYC for deposit address)
|
||||
|
||||
Or a testnet account over at:
|
||||
|
||||
- test.deribit.com
|
||||
|
||||
For testnet once the account is created here is how you deposit fake crypto to
|
||||
try it out:
|
||||
|
||||
1) Go to Wallet:
|
||||
|
||||
.. figure:: assets/0_wallet.png
|
||||
:align: center
|
||||
:target: assets/0_wallet.png
|
||||
:alt: wallet page
|
||||
|
||||
2) Then click on the elipsis menu and select deposit
|
||||
|
||||
.. figure:: assets/1_wallet_select_deposit.png
|
||||
:align: center
|
||||
:target: assets/1_wallet_select_deposit.png
|
||||
:alt: wallet deposit page
|
||||
|
||||
3) This will take you to the deposit address page
|
||||
|
||||
.. figure:: assets/2_gen_deposit_addr.png
|
||||
:align: center
|
||||
:target: assets/2_gen_deposit_addr.png
|
||||
:alt: generate deposit address page
|
||||
|
||||
4) After clicking generate you should see the address, copy it and go to the
|
||||
`coin faucet <https://test.deribit.com/dericoin/BTC/deposit>`_ and send fake
|
||||
coins to that address.
|
||||
|
||||
.. figure:: assets/3_deposit_address.png
|
||||
:align: center
|
||||
:target: assets/3_deposit_address.png
|
||||
:alt: generated address
|
||||
|
||||
5) Back in the deposit address page you should see the deposit in your history
|
||||
|
||||
.. figure:: assets/4_wallet_deposit_history.png
|
||||
:align: center
|
||||
:target: assets/4_wallet_deposit_history.png
|
||||
:alt: wallet deposit history
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Deribit backend.
|
||||
|
||||
'''
|
||||
|
||||
from piker.log import get_logger
|
||||
|
||||
from .api import (
|
||||
get_client,
|
||||
)
|
||||
from .feed import (
|
||||
open_history_client,
|
||||
open_symbol_search,
|
||||
stream_quotes,
|
||||
# backfill_bars,
|
||||
)
|
||||
# from .broker import (
|
||||
# open_trade_dialog,
|
||||
# norm_trade_records,
|
||||
# )
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
# 'trades_dialogue',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
# 'norm_trade_records',
|
||||
]
|
||||
|
||||
|
||||
# tractor RPC enable arg
|
||||
__enable_modules__: list[str] = [
|
||||
'api',
|
||||
'feed',
|
||||
# 'broker',
|
||||
]
|
||||
|
||||
# passed to ``tractor.ActorNursery.start_actor()``
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
# annotation to let backend agnostic code
|
||||
# know if ``brokerd`` should be spawned with
|
||||
# ``tractor``'s aio mode.
|
||||
_infect_asyncio: bool = True
|
||||
|
|
@ -1,675 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Deribit backend.
|
||||
|
||||
'''
|
||||
import asyncio
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
)
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
import time
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Callable,
|
||||
)
|
||||
|
||||
import pendulum
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
from rapidfuzz import process as fuzzy
|
||||
import numpy as np
|
||||
from tractor.trionics import (
|
||||
broadcast_receiver,
|
||||
maybe_open_context
|
||||
)
|
||||
from tractor import to_asyncio
|
||||
# XXX WOOPS XD
|
||||
# yeah you'll need to install it since it was removed in #489 by
|
||||
# accident; well i thought we had removed all usage..
|
||||
from cryptofeed import FeedHandler
|
||||
from cryptofeed.defines import (
|
||||
DERIBIT,
|
||||
L1_BOOK, TRADES,
|
||||
OPTION, CALL, PUT
|
||||
)
|
||||
from cryptofeed.symbols import Symbol
|
||||
|
||||
from piker.data import (
|
||||
def_iohlcv_fields,
|
||||
match_from_pairs,
|
||||
Struct,
|
||||
)
|
||||
from piker.data._web_bs import (
|
||||
open_jsonrpc_session
|
||||
)
|
||||
|
||||
|
||||
from piker import config
|
||||
from piker.log import get_logger
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
|
||||
_url = 'https://www.deribit.com'
|
||||
_ws_url = 'wss://www.deribit.com/ws/api/v2'
|
||||
_testnet_ws_url = 'wss://test.deribit.com/ws/api/v2'
|
||||
|
||||
|
||||
class JSONRPCResult(Struct):
|
||||
jsonrpc: str = '2.0'
|
||||
id: int
|
||||
result: Optional[list[dict]] = None
|
||||
error: Optional[dict] = None
|
||||
usIn: int
|
||||
usOut: int
|
||||
usDiff: int
|
||||
testnet: bool
|
||||
|
||||
class JSONRPCChannel(Struct):
|
||||
jsonrpc: str = '2.0'
|
||||
method: str
|
||||
params: dict
|
||||
|
||||
|
||||
class KLinesResult(Struct):
|
||||
close: list[float]
|
||||
cost: list[float]
|
||||
high: list[float]
|
||||
low: list[float]
|
||||
open: list[float]
|
||||
status: str
|
||||
ticks: list[int]
|
||||
volume: list[float]
|
||||
|
||||
class Trade(Struct):
|
||||
trade_seq: int
|
||||
trade_id: str
|
||||
timestamp: int
|
||||
tick_direction: int
|
||||
price: float
|
||||
mark_price: float
|
||||
iv: float
|
||||
instrument_name: str
|
||||
index_price: float
|
||||
direction: str
|
||||
combo_trade_id: Optional[int] = 0,
|
||||
combo_id: Optional[str] = '',
|
||||
amount: float
|
||||
|
||||
class LastTradesResult(Struct):
|
||||
trades: list[Trade]
|
||||
has_more: bool
|
||||
|
||||
|
||||
# convert datetime obj timestamp to unixtime in milliseconds
|
||||
def deribit_timestamp(when):
|
||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||
|
||||
|
||||
def str_to_cb_sym(name: str) -> Symbol:
|
||||
base, strike_price, expiry_date, option_type = name.split('-')
|
||||
|
||||
quote = base
|
||||
|
||||
if option_type == 'put':
|
||||
option_type = PUT
|
||||
elif option_type == 'call':
|
||||
option_type = CALL
|
||||
else:
|
||||
raise Exception("Couldn\'t parse option type")
|
||||
|
||||
return Symbol(
|
||||
base, quote,
|
||||
type=OPTION,
|
||||
strike_price=strike_price,
|
||||
option_type=option_type,
|
||||
expiry_date=expiry_date,
|
||||
expiry_normalize=False)
|
||||
|
||||
|
||||
def piker_sym_to_cb_sym(name: str) -> Symbol:
|
||||
base, expiry_date, strike_price, option_type = tuple(
|
||||
name.upper().split('-'))
|
||||
|
||||
quote = base
|
||||
|
||||
if option_type == 'P':
|
||||
option_type = PUT
|
||||
elif option_type == 'C':
|
||||
option_type = CALL
|
||||
else:
|
||||
raise Exception("Couldn\'t parse option type")
|
||||
|
||||
return Symbol(
|
||||
base, quote,
|
||||
type=OPTION,
|
||||
strike_price=strike_price,
|
||||
option_type=option_type,
|
||||
expiry_date=expiry_date.upper())
|
||||
|
||||
|
||||
def cb_sym_to_deribit_inst(sym: Symbol):
|
||||
# cryptofeed normalized
|
||||
cb_norm = ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z']
|
||||
|
||||
# deribit specific
|
||||
months = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']
|
||||
|
||||
exp = sym.expiry_date
|
||||
|
||||
# YYMDD
|
||||
# 01234
|
||||
year, month, day = (
|
||||
exp[:2], months[cb_norm.index(exp[2:3])], exp[3:])
|
||||
|
||||
otype = 'C' if sym.option_type == CALL else 'P'
|
||||
|
||||
return f'{sym.base}-{day}{month}{year}-{sym.strike_price}-{otype}'
|
||||
|
||||
|
||||
def get_config() -> dict[str, Any]:
|
||||
|
||||
conf, path = config.load()
|
||||
|
||||
section = conf.get('deribit')
|
||||
|
||||
# TODO: document why we send this, basically because logging params for cryptofeed
|
||||
conf['log'] = {}
|
||||
conf['log']['disabled'] = True
|
||||
|
||||
if section is None:
|
||||
log.warning(f'No config section found for deribit in {path}')
|
||||
|
||||
return conf
|
||||
|
||||
|
||||
class Client:
|
||||
|
||||
def __init__(self, json_rpc: Callable) -> None:
|
||||
self._pairs: dict[str, Any] = None
|
||||
|
||||
config = get_config().get('deribit', {})
|
||||
|
||||
if ('key_id' in config) and ('key_secret' in config):
|
||||
self._key_id = config['key_id']
|
||||
self._key_secret = config['key_secret']
|
||||
|
||||
else:
|
||||
self._key_id = None
|
||||
self._key_secret = None
|
||||
|
||||
self.json_rpc = json_rpc
|
||||
|
||||
@property
|
||||
def currencies(self):
|
||||
return ['btc', 'eth', 'sol', 'usd']
|
||||
|
||||
async def get_balances(self, kind: str = 'option') -> dict[str, float]:
|
||||
"""Return the set of positions for this account
|
||||
by symbol.
|
||||
"""
|
||||
balances = {}
|
||||
|
||||
for currency in self.currencies:
|
||||
resp = await self.json_rpc(
|
||||
'private/get_positions', params={
|
||||
'currency': currency.upper(),
|
||||
'kind': kind})
|
||||
|
||||
balances[currency] = resp.result
|
||||
|
||||
return balances
|
||||
|
||||
async def get_assets(self) -> dict[str, float]:
|
||||
"""Return the set of asset balances for this account
|
||||
by symbol.
|
||||
"""
|
||||
balances = {}
|
||||
|
||||
for currency in self.currencies:
|
||||
resp = await self.json_rpc(
|
||||
'private/get_account_summary', params={
|
||||
'currency': currency.upper()})
|
||||
|
||||
balances[currency] = resp.result['balance']
|
||||
|
||||
return balances
|
||||
|
||||
async def submit_limit(
|
||||
self,
|
||||
symbol: str,
|
||||
price: float,
|
||||
action: str,
|
||||
size: float
|
||||
) -> dict:
|
||||
"""Place an order
|
||||
"""
|
||||
params = {
|
||||
'instrument_name': symbol.upper(),
|
||||
'amount': size,
|
||||
'type': 'limit',
|
||||
'price': price,
|
||||
}
|
||||
resp = await self.json_rpc(
|
||||
f'private/{action}', params)
|
||||
|
||||
return resp.result
|
||||
|
||||
async def submit_cancel(self, oid: str):
|
||||
"""Send cancel request for order id
|
||||
"""
|
||||
resp = await self.json_rpc(
|
||||
'private/cancel', {'order_id': oid})
|
||||
return resp.result
|
||||
|
||||
async def symbol_info(
|
||||
self,
|
||||
instrument: Optional[str] = None,
|
||||
currency: str = 'btc', # BTC, ETH, SOL, USDC
|
||||
kind: str = 'option',
|
||||
expired: bool = False
|
||||
|
||||
) -> dict[str, dict]:
|
||||
'''
|
||||
Get symbol infos.
|
||||
|
||||
'''
|
||||
if self._pairs:
|
||||
return self._pairs
|
||||
|
||||
# will retrieve all symbols by default
|
||||
params: dict[str, str] = {
|
||||
'currency': currency.upper(),
|
||||
'kind': kind,
|
||||
'expired': str(expired).lower()
|
||||
}
|
||||
|
||||
resp: JSONRPCResult = await self.json_rpc(
|
||||
'public/get_instruments',
|
||||
params,
|
||||
)
|
||||
# convert to symbol-keyed table
|
||||
results: list[dict] | None = resp.result
|
||||
instruments: dict[str, dict] = {
|
||||
item['instrument_name'].lower(): item
|
||||
for item in results
|
||||
}
|
||||
|
||||
if instrument is not None:
|
||||
return instruments[instrument]
|
||||
else:
|
||||
return instruments
|
||||
|
||||
async def cache_symbols(
|
||||
self,
|
||||
) -> dict:
|
||||
|
||||
if not self._pairs:
|
||||
self._pairs = await self.symbol_info()
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def search_symbols(
|
||||
self,
|
||||
pattern: str,
|
||||
limit: int = 30,
|
||||
) -> dict[str, Any]:
|
||||
'''
|
||||
Fuzzy search symbology set for pairs matching `pattern`.
|
||||
|
||||
'''
|
||||
pairs: dict[str, Any] = await self.symbol_info()
|
||||
matches: dict[str, Pair] = match_from_pairs(
|
||||
pairs=pairs,
|
||||
query=pattern.upper(),
|
||||
score_cutoff=35,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
# repack in name-keyed table
|
||||
return {
|
||||
pair['instrument_name'].lower(): pair
|
||||
for pair in matches.values()
|
||||
}
|
||||
|
||||
async def bars(
|
||||
self,
|
||||
symbol: str,
|
||||
start_dt: Optional[datetime] = None,
|
||||
end_dt: Optional[datetime] = None,
|
||||
limit: int = 1000,
|
||||
as_np: bool = True,
|
||||
) -> dict:
|
||||
instrument = symbol
|
||||
|
||||
if end_dt is None:
|
||||
end_dt = pendulum.now('UTC')
|
||||
|
||||
if start_dt is None:
|
||||
start_dt = end_dt.start_of(
|
||||
'minute').subtract(minutes=limit)
|
||||
|
||||
start_time = deribit_timestamp(start_dt)
|
||||
end_time = deribit_timestamp(end_dt)
|
||||
|
||||
# https://docs.deribit.com/#public-get_tradingview_chart_data
|
||||
resp = await self.json_rpc(
|
||||
'public/get_tradingview_chart_data',
|
||||
params={
|
||||
'instrument_name': instrument.upper(),
|
||||
'start_timestamp': start_time,
|
||||
'end_timestamp': end_time,
|
||||
'resolution': '1'
|
||||
})
|
||||
|
||||
result = KLinesResult(**resp.result)
|
||||
new_bars = []
|
||||
for i in range(len(result.close)):
|
||||
|
||||
_open = result.open[i]
|
||||
high = result.high[i]
|
||||
low = result.low[i]
|
||||
close = result.close[i]
|
||||
volume = result.volume[i]
|
||||
|
||||
row = [
|
||||
(start_time + (i * (60 * 1000))) / 1000.0, # time
|
||||
result.open[i],
|
||||
result.high[i],
|
||||
result.low[i],
|
||||
result.close[i],
|
||||
result.volume[i],
|
||||
0
|
||||
]
|
||||
|
||||
new_bars.append((i,) + tuple(row))
|
||||
|
||||
array = np.array(new_bars, dtype=def_iohlcv_fields) if as_np else klines
|
||||
return array
|
||||
|
||||
async def last_trades(
|
||||
self,
|
||||
instrument: str,
|
||||
count: int = 10
|
||||
):
|
||||
resp = await self.json_rpc(
|
||||
'public/get_last_trades_by_instrument',
|
||||
params={
|
||||
'instrument_name': instrument,
|
||||
'count': count
|
||||
})
|
||||
|
||||
return LastTradesResult(**resp.result)
|
||||
|
||||
|
||||
@acm
|
||||
async def get_client(
|
||||
is_brokercheck: bool = False
|
||||
) -> Client:
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as n,
|
||||
open_jsonrpc_session(
|
||||
_testnet_ws_url, dtype=JSONRPCResult) as json_rpc
|
||||
):
|
||||
client = Client(json_rpc)
|
||||
|
||||
_refresh_token: Optional[str] = None
|
||||
_access_token: Optional[str] = None
|
||||
|
||||
async def _auth_loop(
|
||||
task_status: TaskStatus = trio.TASK_STATUS_IGNORED
|
||||
):
|
||||
"""Background task that adquires a first access token and then will
|
||||
refresh the access token while the nursery isn't cancelled.
|
||||
|
||||
https://docs.deribit.com/?python#authentication-2
|
||||
"""
|
||||
renew_time = 10
|
||||
access_scope = 'trade:read_write'
|
||||
_expiry_time = time.time()
|
||||
got_access = False
|
||||
nonlocal _refresh_token
|
||||
nonlocal _access_token
|
||||
|
||||
while True:
|
||||
if time.time() - _expiry_time < renew_time:
|
||||
# if we are close to token expiry time
|
||||
|
||||
if _refresh_token != None:
|
||||
# if we have a refresh token already dont need to send
|
||||
# secret
|
||||
params = {
|
||||
'grant_type': 'refresh_token',
|
||||
'refresh_token': _refresh_token,
|
||||
'scope': access_scope
|
||||
}
|
||||
|
||||
else:
|
||||
# we don't have refresh token, send secret to initialize
|
||||
params = {
|
||||
'grant_type': 'client_credentials',
|
||||
'client_id': client._key_id,
|
||||
'client_secret': client._key_secret,
|
||||
'scope': access_scope
|
||||
}
|
||||
|
||||
resp = await json_rpc('public/auth', params)
|
||||
result = resp.result
|
||||
|
||||
_expiry_time = time.time() + result['expires_in']
|
||||
_refresh_token = result['refresh_token']
|
||||
|
||||
if 'access_token' in result:
|
||||
_access_token = result['access_token']
|
||||
|
||||
if not got_access:
|
||||
# first time this loop runs we must indicate task is
|
||||
# started, we have auth
|
||||
got_access = True
|
||||
task_status.started()
|
||||
|
||||
else:
|
||||
await trio.sleep(renew_time / 2)
|
||||
|
||||
# if we have client creds launch auth loop
|
||||
if client._key_id is not None:
|
||||
await n.start(_auth_loop)
|
||||
|
||||
await client.cache_symbols()
|
||||
yield client
|
||||
n.cancel_scope.cancel()
|
||||
|
||||
|
||||
@acm
|
||||
async def open_feed_handler():
|
||||
fh = FeedHandler(config=get_config())
|
||||
yield fh
|
||||
await to_asyncio.run_task(fh.stop_async)
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_feed_handler() -> trio.abc.ReceiveStream:
|
||||
async with maybe_open_context(
|
||||
acm_func=open_feed_handler,
|
||||
key='feedhandler',
|
||||
) as (cache_hit, fh):
|
||||
yield fh
|
||||
|
||||
|
||||
async def aio_price_feed_relay(
|
||||
fh: FeedHandler,
|
||||
instrument: Symbol,
|
||||
from_trio: asyncio.Queue,
|
||||
to_trio: trio.abc.SendChannel,
|
||||
) -> None:
|
||||
async def _trade(data: dict, receipt_timestamp):
|
||||
to_trio.send_nowait(('trade', {
|
||||
'symbol': cb_sym_to_deribit_inst(
|
||||
str_to_cb_sym(data.symbol)).lower(),
|
||||
'last': data,
|
||||
'broker_ts': time.time(),
|
||||
'data': data.to_dict(),
|
||||
'receipt': receipt_timestamp
|
||||
}))
|
||||
|
||||
async def _l1(data: dict, receipt_timestamp):
|
||||
to_trio.send_nowait(('l1', {
|
||||
'symbol': cb_sym_to_deribit_inst(
|
||||
str_to_cb_sym(data.symbol)).lower(),
|
||||
'ticks': [
|
||||
{'type': 'bid',
|
||||
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
||||
{'type': 'bsize',
|
||||
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
||||
{'type': 'ask',
|
||||
'price': float(data.ask_price), 'size': float(data.ask_size)},
|
||||
{'type': 'asize',
|
||||
'price': float(data.ask_price), 'size': float(data.ask_size)}
|
||||
]
|
||||
}))
|
||||
|
||||
fh.add_feed(
|
||||
DERIBIT,
|
||||
channels=[TRADES, L1_BOOK],
|
||||
symbols=[piker_sym_to_cb_sym(instrument)],
|
||||
callbacks={
|
||||
TRADES: _trade,
|
||||
L1_BOOK: _l1
|
||||
})
|
||||
|
||||
if not fh.running:
|
||||
fh.run(
|
||||
start_loop=False,
|
||||
install_signal_handlers=False)
|
||||
|
||||
# sync with trio
|
||||
to_trio.send_nowait(None)
|
||||
|
||||
await asyncio.sleep(float('inf'))
|
||||
|
||||
|
||||
@acm
|
||||
async def open_price_feed(
|
||||
instrument: str
|
||||
) -> trio.abc.ReceiveStream:
|
||||
async with maybe_open_feed_handler() as fh:
|
||||
async with to_asyncio.open_channel_from(
|
||||
partial(
|
||||
aio_price_feed_relay,
|
||||
fh,
|
||||
instrument
|
||||
)
|
||||
) as (first, chan):
|
||||
yield chan
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_price_feed(
|
||||
instrument: str
|
||||
) -> trio.abc.ReceiveStream:
|
||||
|
||||
# TODO: add a predicate to maybe_open_context
|
||||
async with maybe_open_context(
|
||||
acm_func=open_price_feed,
|
||||
kwargs={
|
||||
'instrument': instrument
|
||||
},
|
||||
key=f'{instrument}-price',
|
||||
) as (cache_hit, feed):
|
||||
if cache_hit:
|
||||
yield broadcast_receiver(feed, 10)
|
||||
else:
|
||||
yield feed
|
||||
|
||||
|
||||
|
||||
async def aio_order_feed_relay(
|
||||
fh: FeedHandler,
|
||||
instrument: Symbol,
|
||||
from_trio: asyncio.Queue,
|
||||
to_trio: trio.abc.SendChannel,
|
||||
) -> None:
|
||||
async def _fill(data: dict, receipt_timestamp):
|
||||
breakpoint()
|
||||
|
||||
async def _order_info(data: dict, receipt_timestamp):
|
||||
breakpoint()
|
||||
|
||||
fh.add_feed(
|
||||
DERIBIT,
|
||||
channels=[FILLS, ORDER_INFO],
|
||||
symbols=[instrument.upper()],
|
||||
callbacks={
|
||||
FILLS: _fill,
|
||||
ORDER_INFO: _order_info,
|
||||
})
|
||||
|
||||
if not fh.running:
|
||||
fh.run(
|
||||
start_loop=False,
|
||||
install_signal_handlers=False)
|
||||
|
||||
# sync with trio
|
||||
to_trio.send_nowait(None)
|
||||
|
||||
await asyncio.sleep(float('inf'))
|
||||
|
||||
|
||||
@acm
|
||||
async def open_order_feed(
|
||||
instrument: list[str]
|
||||
) -> trio.abc.ReceiveStream:
|
||||
async with maybe_open_feed_handler() as fh:
|
||||
async with to_asyncio.open_channel_from(
|
||||
partial(
|
||||
aio_order_feed_relay,
|
||||
fh,
|
||||
instrument
|
||||
)
|
||||
) as (first, chan):
|
||||
yield chan
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_order_feed(
|
||||
instrument: str
|
||||
) -> trio.abc.ReceiveStream:
|
||||
|
||||
# TODO: add a predicate to maybe_open_context
|
||||
async with maybe_open_context(
|
||||
acm_func=open_order_feed,
|
||||
kwargs={
|
||||
'instrument': instrument,
|
||||
'fh': fh
|
||||
},
|
||||
key=f'{instrument}-order',
|
||||
) as (cache_hit, feed):
|
||||
if cache_hit:
|
||||
yield broadcast_receiver(feed, 10)
|
||||
else:
|
||||
yield feed
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 169 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 106 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 59 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 70 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 132 KiB |
|
|
@ -1,185 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Deribit backend.
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional, Callable
|
||||
import time
|
||||
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import pendulum
|
||||
from rapidfuzz import process as fuzzy
|
||||
import numpy as np
|
||||
import tractor
|
||||
|
||||
from piker.brokers import open_cached_client
|
||||
from piker.log import get_logger, get_console_log
|
||||
from piker.data import ShmArray
|
||||
from piker.brokers._util import (
|
||||
BrokerError,
|
||||
DataUnavailable,
|
||||
)
|
||||
|
||||
from cryptofeed import FeedHandler
|
||||
from cryptofeed.defines import (
|
||||
DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
|
||||
)
|
||||
from cryptofeed.symbols import Symbol
|
||||
|
||||
from .api import (
|
||||
Client, Trade,
|
||||
get_config,
|
||||
str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst,
|
||||
maybe_open_price_feed
|
||||
)
|
||||
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
@acm
|
||||
async def open_history_client(
|
||||
mkt: MktPair,
|
||||
) -> tuple[Callable, int]:
|
||||
|
||||
fnstrument: str = mkt.bs_fqme
|
||||
# TODO implement history getter for the new storage layer.
|
||||
async with open_cached_client('deribit') as client:
|
||||
|
||||
async def get_ohlc(
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
|
||||
array = await client.bars(
|
||||
instrument,
|
||||
start_dt=start_dt,
|
||||
end_dt=end_dt,
|
||||
)
|
||||
if len(array) == 0:
|
||||
raise DataUnavailable
|
||||
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: list[str],
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
sym = symbols[0]
|
||||
|
||||
async with (
|
||||
open_cached_client('deribit') as client,
|
||||
send_chan as send_chan
|
||||
):
|
||||
|
||||
init_msgs = {
|
||||
# pass back token, and bool, signalling if we're the writer
|
||||
# and that history has been written
|
||||
sym: {
|
||||
'symbol_info': {
|
||||
'asset_type': 'option',
|
||||
'price_tick_size': 0.0005
|
||||
},
|
||||
'shm_write_opts': {'sum_tick_vml': False},
|
||||
'fqsn': sym,
|
||||
},
|
||||
}
|
||||
|
||||
nsym = piker_sym_to_cb_sym(sym)
|
||||
|
||||
async with maybe_open_price_feed(sym) as stream:
|
||||
|
||||
cache = await client.cache_symbols()
|
||||
|
||||
last_trades = (await client.last_trades(
|
||||
cb_sym_to_deribit_inst(nsym), count=1)).trades
|
||||
|
||||
if len(last_trades) == 0:
|
||||
last_trade = None
|
||||
async for typ, quote in stream:
|
||||
if typ == 'trade':
|
||||
last_trade = Trade(**(quote['data']))
|
||||
break
|
||||
|
||||
else:
|
||||
last_trade = Trade(**(last_trades[0]))
|
||||
|
||||
first_quote = {
|
||||
'symbol': sym,
|
||||
'last': last_trade.price,
|
||||
'brokerd_ts': last_trade.timestamp,
|
||||
'ticks': [{
|
||||
'type': 'trade',
|
||||
'price': last_trade.price,
|
||||
'size': last_trade.amount,
|
||||
'broker_ts': last_trade.timestamp
|
||||
}]
|
||||
}
|
||||
task_status.started((init_msgs, first_quote))
|
||||
|
||||
feed_is_live.set()
|
||||
|
||||
async for typ, quote in stream:
|
||||
topic = quote['symbol']
|
||||
await send_chan.send({topic: quote})
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(
|
||||
ctx: tractor.Context,
|
||||
) -> Client:
|
||||
async with open_cached_client('deribit') as client:
|
||||
|
||||
# load all symbols locally for fast search
|
||||
cache = await client.cache_symbols()
|
||||
await ctx.started()
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
async for pattern in stream:
|
||||
# repack in dict form
|
||||
await stream.send(
|
||||
await client.search_symbols(pattern))
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,134 +0,0 @@
|
|||
``ib`` backend
|
||||
--------------
|
||||
more or less the "everything broker" for traditional and international
|
||||
markets. they are the "go to" provider for automatic retail trading
|
||||
and we interface to their APIs using the `ib_insync` project.
|
||||
|
||||
status
|
||||
******
|
||||
current support is *production grade* and both real-time data and order
|
||||
management should be correct and fast. this backend is used by core devs
|
||||
for live trading.
|
||||
|
||||
currently there is not yet full support for:
|
||||
- options charting and trading
|
||||
- paxos based crypto rt feeds and trading
|
||||
|
||||
|
||||
config
|
||||
******
|
||||
In order to get order mode support your ``brokers.toml``
|
||||
needs to have something like the following:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[ib]
|
||||
hosts = [
|
||||
"127.0.0.1",
|
||||
]
|
||||
# TODO: when we eventually spawn gateways in our
|
||||
# container, we can just dynamically allocate these
|
||||
# using IBC.
|
||||
ports = [
|
||||
4002,
|
||||
4003,
|
||||
4006,
|
||||
4001,
|
||||
7497,
|
||||
]
|
||||
|
||||
# XXX: for a paper account the flex web query service
|
||||
# is not supported so you have to manually download
|
||||
# and XML report and put it in a location that can be
|
||||
# accessed by the ``brokerd.ib`` backend code for parsing.
|
||||
flex_token = '1111111111111111'
|
||||
flex_trades_query_id = '6969696' # live accounts only?
|
||||
|
||||
# 3rd party web-api token
|
||||
# (XXX: not sure if this works yet)
|
||||
trade_log_token = '111111111111111'
|
||||
|
||||
# when clients are being scanned this determines
|
||||
# which clients are preferred to be used for data feeds
|
||||
# based on account names which are detected as active
|
||||
# on each client.
|
||||
prefer_data_account = [
|
||||
# this has to be first in order to make data work with dual paper + live
|
||||
'main',
|
||||
'algopaper',
|
||||
]
|
||||
|
||||
[ib.accounts]
|
||||
main = 'U69696969'
|
||||
algopaper = 'DU9696969'
|
||||
|
||||
|
||||
If everything works correctly you should see any current positions
|
||||
loaded in the pps pane on chart load and you should also be able to
|
||||
check your trade records in the file::
|
||||
|
||||
<pikerk_conf_dir>/ledgers/trades_ib_algopaper.toml
|
||||
|
||||
|
||||
An example ledger file will have entries written verbatim from the
|
||||
trade events schema:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
["0000e1a7.630f5e5a.01.01"]
|
||||
secType = "FUT"
|
||||
conId = 515416577
|
||||
symbol = "MNQ"
|
||||
lastTradeDateOrContractMonth = "20221216"
|
||||
strike = 0.0
|
||||
right = ""
|
||||
multiplier = "2"
|
||||
exchange = "GLOBEX"
|
||||
primaryExchange = ""
|
||||
currency = "USD"
|
||||
localSymbol = "MNQZ2"
|
||||
tradingClass = "MNQ"
|
||||
includeExpired = false
|
||||
secIdType = ""
|
||||
secId = ""
|
||||
comboLegsDescrip = ""
|
||||
comboLegs = []
|
||||
execId = "0000e1a7.630f5e5a.01.01"
|
||||
time = 1661972086.0
|
||||
acctNumber = "DU69696969"
|
||||
side = "BOT"
|
||||
shares = 1.0
|
||||
price = 12372.75
|
||||
permId = 441472655
|
||||
clientId = 6116
|
||||
orderId = 985
|
||||
liquidation = 0
|
||||
cumQty = 1.0
|
||||
avgPrice = 12372.75
|
||||
orderRef = ""
|
||||
evRule = ""
|
||||
evMultiplier = 0.0
|
||||
modelCode = ""
|
||||
lastLiquidity = 1
|
||||
broker_time = 1661972086.0
|
||||
name = "ib"
|
||||
commission = 0.57
|
||||
realizedPNL = 243.41
|
||||
yield_ = 0.0
|
||||
yieldRedemptionDate = 0
|
||||
listingExchange = "GLOBEX"
|
||||
date = "2022-08-31T18:54:46+00:00"
|
||||
|
||||
|
||||
your ``pps.toml`` file will have position entries like,
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[ib.algopaper."mnq.globex.20221216"]
|
||||
size = -1.0
|
||||
ppu = 12423.630576923071
|
||||
bs_mktid = 515416577
|
||||
expiry = "2022-12-16T00:00:00+00:00"
|
||||
clears = [
|
||||
{ dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" },
|
||||
]
|
||||
|
|
@ -1,93 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Interactive Brokers API backend.
|
||||
|
||||
Sub-modules within break into the core functionalities:
|
||||
|
||||
- ``broker.py`` part for orders / trading endpoints
|
||||
- ``feed.py`` for real-time data feed endpoints
|
||||
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||
wrapping around ``ib_insync``.
|
||||
|
||||
"""
|
||||
from .api import (
|
||||
get_client,
|
||||
)
|
||||
from .feed import (
|
||||
open_history_client,
|
||||
stream_quotes,
|
||||
)
|
||||
from .broker import (
|
||||
open_trade_dialog,
|
||||
)
|
||||
from .ledger import (
|
||||
norm_trade,
|
||||
norm_trade_records,
|
||||
tx_sort,
|
||||
)
|
||||
from .symbols import (
|
||||
get_mkt_info,
|
||||
open_symbol_search,
|
||||
_search_conf,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
'get_mkt_info',
|
||||
'norm_trade',
|
||||
'norm_trade_records',
|
||||
'open_trade_dialog',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
'_search_conf',
|
||||
'tx_sort',
|
||||
]
|
||||
|
||||
_brokerd_mods: list[str] = [
|
||||
'api',
|
||||
'broker',
|
||||
]
|
||||
|
||||
_datad_mods: list[str] = [
|
||||
'feed',
|
||||
'symbols',
|
||||
]
|
||||
|
||||
|
||||
# tractor RPC enable arg
|
||||
__enable_modules__: list[str] = (
|
||||
_brokerd_mods
|
||||
+
|
||||
_datad_mods
|
||||
)
|
||||
|
||||
# passed to ``tractor.ActorNursery.start_actor()``
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
# annotation to let backend agnostic code
|
||||
# know if ``brokerd`` should be spawned with
|
||||
# ``tractor``'s aio mode.
|
||||
_infect_asyncio: bool = True
|
||||
|
||||
# XXX NOTE: for now we disable symcache with this backend since
|
||||
# there is no clearly simple nor practical way to download "all
|
||||
# symbology info" for all supported venues..
|
||||
_no_symcache: bool = True
|
||||
|
|
@ -1,195 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
"FLEX" report processing utils.
|
||||
|
||||
"""
|
||||
from bidict import bidict
|
||||
import pendulum
|
||||
from pprint import pformat
|
||||
from typing import Any
|
||||
|
||||
from .api import (
|
||||
get_config,
|
||||
log,
|
||||
)
|
||||
from piker.accounting import (
|
||||
open_trade_ledger,
|
||||
)
|
||||
|
||||
|
||||
def parse_flex_dt(
|
||||
record: str,
|
||||
) -> pendulum.datetime:
|
||||
'''
|
||||
Parse stupid flex record datetime stamps for the `dateTime` field..
|
||||
|
||||
'''
|
||||
date, ts = record.split(';')
|
||||
dt = pendulum.parse(date)
|
||||
ts = f'{ts[:2]}:{ts[2:4]}:{ts[4:]}'
|
||||
tsdt = pendulum.parse(ts)
|
||||
return dt.set(hour=tsdt.hour, minute=tsdt.minute, second=tsdt.second)
|
||||
|
||||
|
||||
def flex_records_to_ledger_entries(
|
||||
accounts: bidict,
|
||||
trade_entries: list[object],
|
||||
|
||||
) -> dict:
|
||||
'''
|
||||
Convert flex report entry objects into ``dict`` form, pretty much
|
||||
straight up without modification except add a `pydatetime` field
|
||||
from the parsed timestamp.
|
||||
|
||||
'''
|
||||
trades_by_account = {}
|
||||
for t in trade_entries:
|
||||
entry = t.__dict__
|
||||
|
||||
# XXX: LOL apparently ``toml`` has a bug
|
||||
# where a section key error will show up in the write
|
||||
# if you leave a table key as an `int`? So i guess
|
||||
# cast to strs for all keys..
|
||||
|
||||
# oddly for some so-called "BookTrade" entries
|
||||
# this field seems to be blank, no cuckin clue.
|
||||
# trade['ibExecID']
|
||||
tid = str(entry.get('ibExecID') or entry['tradeID'])
|
||||
# date = str(entry['tradeDate'])
|
||||
|
||||
# XXX: is it going to cause problems if a account name
|
||||
# get's lost? The user should be able to find it based
|
||||
# on the actual exec history right?
|
||||
acctid = accounts[str(entry['accountId'])]
|
||||
|
||||
# probably a flex record with a wonky non-std timestamp..
|
||||
dt = entry['pydatetime'] = parse_flex_dt(entry['dateTime'])
|
||||
entry['datetime'] = str(dt)
|
||||
|
||||
if not tid:
|
||||
# this is likely some kind of internal adjustment
|
||||
# transaction, likely one of the following:
|
||||
# - an expiry event that will show a "book trade" indicating
|
||||
# some adjustment to cash balances: zeroing or itm settle.
|
||||
# - a manual cash balance position adjustment likely done by
|
||||
# the user from the accounts window in TWS where they can
|
||||
# manually set the avg price and size:
|
||||
# https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST
|
||||
log.warning(f'Skipping ID-less ledger entry:\n{pformat(entry)}')
|
||||
continue
|
||||
|
||||
trades_by_account.setdefault(
|
||||
acctid, {}
|
||||
)[tid] = entry
|
||||
|
||||
for acctid in trades_by_account:
|
||||
trades_by_account[acctid] = dict(sorted(
|
||||
trades_by_account[acctid].items(),
|
||||
key=lambda entry: entry[1]['pydatetime'],
|
||||
))
|
||||
|
||||
return trades_by_account
|
||||
|
||||
|
||||
def load_flex_trades(
|
||||
path: str | None = None,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
|
||||
from ib_insync import flexreport, util
|
||||
|
||||
conf = get_config()
|
||||
|
||||
if not path:
|
||||
# load ``brokers.toml`` and try to get the flex
|
||||
# token and query id that must be previously defined
|
||||
# by the user.
|
||||
token = conf.get('flex_token')
|
||||
if not token:
|
||||
raise ValueError(
|
||||
'You must specify a ``flex_token`` field in your'
|
||||
'`brokers.toml` in order load your trade log, see our'
|
||||
'intructions for how to set this up here:\n'
|
||||
'PUT LINK HERE!'
|
||||
)
|
||||
|
||||
qid = conf['flex_trades_query_id']
|
||||
|
||||
# TODO: hack this into our logging
|
||||
# system like we do with the API client..
|
||||
util.logToConsole()
|
||||
|
||||
# TODO: rewrite the query part of this with async..httpx?
|
||||
report = flexreport.FlexReport(
|
||||
token=token,
|
||||
queryId=qid,
|
||||
)
|
||||
|
||||
else:
|
||||
# XXX: another project we could potentially look at,
|
||||
# https://pypi.org/project/ibflex/
|
||||
report = flexreport.FlexReport(path=path)
|
||||
|
||||
trade_entries = report.extract('Trade')
|
||||
ln = len(trade_entries)
|
||||
log.info(f'Loaded {ln} trades from flex query')
|
||||
|
||||
trades_by_account = flex_records_to_ledger_entries(
|
||||
conf['accounts'].inverse, # reverse map to user account names
|
||||
trade_entries,
|
||||
)
|
||||
|
||||
ledger_dict: dict | None = None
|
||||
|
||||
for acctid in trades_by_account:
|
||||
trades_by_id = trades_by_account[acctid]
|
||||
|
||||
with open_trade_ledger(
|
||||
'ib',
|
||||
acctid,
|
||||
allow_from_sync_code=True,
|
||||
) as ledger_dict:
|
||||
tid_delta = set(trades_by_id) - set(ledger_dict)
|
||||
log.info(
|
||||
'New trades detected\n'
|
||||
f'{pformat(tid_delta)}'
|
||||
)
|
||||
if tid_delta:
|
||||
sorted_delta = dict(sorted(
|
||||
{tid: trades_by_id[tid] for tid in tid_delta}.items(),
|
||||
key=lambda entry: entry[1].pop('pydatetime'),
|
||||
))
|
||||
ledger_dict.update(sorted_delta)
|
||||
|
||||
return ledger_dict
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
import os
|
||||
|
||||
args = sys.argv
|
||||
if len(args) > 1:
|
||||
args = args[1:]
|
||||
for arg in args:
|
||||
path = os.path.abspath(arg)
|
||||
load_flex_trades(path=path)
|
||||
else:
|
||||
# expect brokers.toml to have an entry and
|
||||
# pull from the web service.
|
||||
load_flex_trades()
|
||||
|
|
@ -1,269 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
``ib`` utilities and hacks suitable for use in the backend and/or as
|
||||
runnable script-programs.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from functools import partial
|
||||
from typing import (
|
||||
Literal,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
import subprocess
|
||||
|
||||
import tractor
|
||||
|
||||
from piker.brokers._util import get_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .api import Client
|
||||
from ib_insync import IB
|
||||
|
||||
log = get_logger('piker.brokers.ib')
|
||||
|
||||
_reset_tech: Literal[
|
||||
'vnc',
|
||||
'i3ipc_xdotool',
|
||||
|
||||
# TODO: in theory we can use a different linux DE API or
|
||||
# some other type of similar window scanning/mgmt client
|
||||
# (on other OSs) to do the same.
|
||||
|
||||
] = 'vnc'
|
||||
|
||||
|
||||
async def data_reset_hack(
|
||||
# vnc_host: str,
|
||||
client: Client,
|
||||
reset_type: Literal['data', 'connection'],
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Run key combos for resetting data feeds and yield back to caller
|
||||
when complete.
|
||||
|
||||
NOTE: this is a linux-only hack around!
|
||||
|
||||
There are multiple "techs" you can use depending on your infra setup:
|
||||
|
||||
- if running ib-gw in a container with a VNC server running the most
|
||||
performant method is the `'vnc'` option.
|
||||
|
||||
- if running ib-gw/tws locally, and you are using `i3` you can use
|
||||
the ``i3ipc`` lib and ``xdotool`` to send the appropriate click
|
||||
and key-combos automatically to your local desktop's java X-apps.
|
||||
|
||||
https://interactivebrokers.github.io/tws-api/historical_limitations.html#pacing_violations
|
||||
|
||||
TODOs:
|
||||
- a return type that hopefully determines if the hack was
|
||||
successful.
|
||||
- other OS support?
|
||||
- integration with ``ib-gw`` run in docker + Xorg?
|
||||
- is it possible to offer a local server that can be accessed by
|
||||
a client? Would be sure be handy for running native java blobs
|
||||
that need to be wrangle.
|
||||
|
||||
'''
|
||||
ib_client: IB = client.ib
|
||||
|
||||
# look up any user defined vnc socket address mapped from
|
||||
# a particular API socket port.
|
||||
api_port: str = str(ib_client.client.port)
|
||||
vnc_host: str
|
||||
vnc_port: int
|
||||
vnc_sockaddr: tuple[str] | None = client.conf.get('vnc_addrs')
|
||||
|
||||
no_setup_msg:str = (
|
||||
f'No data reset hack test setup for {vnc_sockaddr}!\n'
|
||||
'See config setup tips @\n'
|
||||
'https://github.com/pikers/piker/tree/master/piker/brokers/ib'
|
||||
)
|
||||
|
||||
if not vnc_sockaddr:
|
||||
log.warning(
|
||||
no_setup_msg
|
||||
+
|
||||
'REQUIRES A `vnc_addrs: array` ENTRY'
|
||||
)
|
||||
|
||||
vnc_host, vnc_port = vnc_sockaddr.get(
|
||||
api_port,
|
||||
('localhost', 3003)
|
||||
)
|
||||
global _reset_tech
|
||||
|
||||
match _reset_tech:
|
||||
case 'vnc':
|
||||
try:
|
||||
await tractor.to_asyncio.run_task(
|
||||
partial(
|
||||
vnc_click_hack,
|
||||
host=vnc_host,
|
||||
port=vnc_port,
|
||||
)
|
||||
)
|
||||
except OSError:
|
||||
if vnc_host != 'localhost':
|
||||
log.warning(no_setup_msg)
|
||||
return False
|
||||
|
||||
try:
|
||||
import i3ipc # noqa (since a deps dynamic check)
|
||||
except ModuleNotFoundError:
|
||||
log.warning(no_setup_msg)
|
||||
return False
|
||||
|
||||
try:
|
||||
i3ipc_xdotool_manual_click_hack()
|
||||
_reset_tech = 'i3ipc_xdotool'
|
||||
return True
|
||||
except OSError:
|
||||
log.exception(no_setup_msg)
|
||||
return False
|
||||
|
||||
case 'i3ipc_xdotool':
|
||||
i3ipc_xdotool_manual_click_hack()
|
||||
|
||||
case _ as tech:
|
||||
raise RuntimeError(f'{tech} is not supported for reset tech!?')
|
||||
|
||||
# we don't really need the ``xdotool`` approach any more B)
|
||||
return True
|
||||
|
||||
|
||||
async def vnc_click_hack(
|
||||
host: str,
|
||||
port: int,
|
||||
reset_type: str = 'data'
|
||||
) -> None:
|
||||
'''
|
||||
Reset the data or network connection for the VNC attached
|
||||
ib gateway using magic combos.
|
||||
|
||||
'''
|
||||
try:
|
||||
import asyncvnc
|
||||
except ModuleNotFoundError:
|
||||
log.warning(
|
||||
"In order to leverage `piker`'s built-in data reset hacks, install "
|
||||
"the `asyncvnc` project: https://github.com/barneygale/asyncvnc"
|
||||
)
|
||||
return
|
||||
|
||||
# two different hot keys which trigger diff types of reset
|
||||
# requests B)
|
||||
key = {
|
||||
'data': 'f',
|
||||
'connection': 'r'
|
||||
}[reset_type]
|
||||
|
||||
async with asyncvnc.connect(
|
||||
host,
|
||||
port=port,
|
||||
|
||||
# TODO: doesn't work see:
|
||||
# https://github.com/barneygale/asyncvnc/issues/7
|
||||
# password='ibcansmbz',
|
||||
|
||||
) as client:
|
||||
|
||||
# move to middle of screen
|
||||
# 640x1800
|
||||
client.mouse.move(
|
||||
x=500,
|
||||
y=500,
|
||||
)
|
||||
client.mouse.click()
|
||||
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
||||
|
||||
|
||||
def i3ipc_xdotool_manual_click_hack() -> None:
|
||||
'''
|
||||
Do the data reset hack but expecting a local X-window using `xdotool`.
|
||||
|
||||
'''
|
||||
import i3ipc
|
||||
i3 = i3ipc.Connection()
|
||||
|
||||
# TODO: might be worth offering some kinda api for grabbing
|
||||
# the window id from the pid?
|
||||
# https://stackoverflow.com/a/2250879
|
||||
t = i3.get_tree()
|
||||
|
||||
orig_win_id = t.find_focused().window
|
||||
|
||||
# for tws
|
||||
win_names: list[str] = [
|
||||
'Interactive Brokers', # tws running in i3
|
||||
'IB Gateway', # gw running in i3
|
||||
# 'IB', # gw running in i3 (newer version?)
|
||||
]
|
||||
|
||||
try:
|
||||
for name in win_names:
|
||||
results = t.find_titled(name)
|
||||
print(f'results for {name}: {results}')
|
||||
if results:
|
||||
con = results[0]
|
||||
print(f'Resetting data feed for {name}')
|
||||
win_id = str(con.window)
|
||||
w, h = con.rect.width, con.rect.height
|
||||
|
||||
# TODO: seems to be a few libs for python but not sure
|
||||
# if they support all the sub commands we need, order of
|
||||
# most recent commit history:
|
||||
# https://github.com/rr-/pyxdotool
|
||||
# https://github.com/ShaneHutter/pyxdotool
|
||||
# https://github.com/cphyc/pyxdotool
|
||||
|
||||
# TODO: only run the reconnect (2nd) kc on a detected
|
||||
# disconnect?
|
||||
for key_combo, timeout in [
|
||||
# only required if we need a connection reset.
|
||||
# ('ctrl+alt+r', 12),
|
||||
# data feed reset.
|
||||
('ctrl+alt+f', 6)
|
||||
]:
|
||||
subprocess.call([
|
||||
'xdotool',
|
||||
'windowactivate', '--sync', win_id,
|
||||
|
||||
# move mouse to bottom left of window (where
|
||||
# there should be nothing to click).
|
||||
'mousemove_relative', '--sync', str(w-4), str(h-4),
|
||||
|
||||
# NOTE: we may need to stick a `--retry 3` in here..
|
||||
'click', '--window', win_id,
|
||||
'--repeat', '3', '1',
|
||||
|
||||
# hackzorzes
|
||||
'key', key_combo,
|
||||
],
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
# re-activate and focus original window
|
||||
subprocess.call([
|
||||
'xdotool',
|
||||
'windowactivate', '--sync', str(orig_win_id),
|
||||
'click', '--window', str(orig_win_id), '1',
|
||||
])
|
||||
except subprocess.TimeoutExpired:
|
||||
log.exception('xdotool timed out?')
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -1,529 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Trade transaction accounting and normalization.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from bisect import insort
|
||||
from dataclasses import asdict
|
||||
from decimal import Decimal
|
||||
from functools import partial
|
||||
from pprint import pformat
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
from bidict import bidict
|
||||
from pendulum import (
|
||||
DateTime,
|
||||
parse,
|
||||
from_timestamp,
|
||||
)
|
||||
from ib_insync import (
|
||||
Contract,
|
||||
Commodity,
|
||||
Fill,
|
||||
Execution,
|
||||
CommissionReport,
|
||||
)
|
||||
|
||||
from piker.types import Struct
|
||||
from piker.data import (
|
||||
SymbologyCache,
|
||||
)
|
||||
from piker.accounting import (
|
||||
Asset,
|
||||
dec_digits,
|
||||
digits_to_dec,
|
||||
Transaction,
|
||||
MktPair,
|
||||
iter_by_dt,
|
||||
)
|
||||
from ._flex_reports import parse_flex_dt
|
||||
from ._util import log
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .api import (
|
||||
Client,
|
||||
MethodProxy,
|
||||
)
|
||||
|
||||
|
||||
tx_sort: Callable = partial(
|
||||
iter_by_dt,
|
||||
parsers={
|
||||
'dateTime': parse_flex_dt,
|
||||
'datetime': parse,
|
||||
|
||||
# XXX: for some some fucking 2022 and
|
||||
# back options records.. f@#$ me..
|
||||
'date': parse,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def norm_trade(
|
||||
tid: str,
|
||||
record: dict[str, Any],
|
||||
|
||||
# this is the dict that was returned from
|
||||
# `Client.get_mkt_pairs()` and when running offline ledger
|
||||
# processing from `.accounting`, this will be the table loaded
|
||||
# into `SymbologyCache.pairs`.
|
||||
pairs: dict[str, Struct],
|
||||
symcache: SymbologyCache | None = None,
|
||||
|
||||
) -> Transaction | None:
|
||||
|
||||
conid: int = str(record.get('conId') or record['conid'])
|
||||
bs_mktid: str = str(conid)
|
||||
|
||||
# NOTE: sometimes weird records (like BTTX?)
|
||||
# have no field for this?
|
||||
comms: float = -1 * (
|
||||
record.get('commission')
|
||||
or record.get('ibCommission')
|
||||
or 0
|
||||
)
|
||||
if not comms:
|
||||
log.warning(
|
||||
'No commissions found for record?\n'
|
||||
f'{pformat(record)}\n'
|
||||
)
|
||||
|
||||
price: float = (
|
||||
record.get('price')
|
||||
or record.get('tradePrice')
|
||||
)
|
||||
if price is None:
|
||||
log.warning(
|
||||
'No `price` field found in record?\n'
|
||||
'Skipping normalization..\n'
|
||||
f'{pformat(record)}\n'
|
||||
)
|
||||
return None
|
||||
|
||||
# the api doesn't do the -/+ on the quantity for you but flex
|
||||
# records do.. are you fucking serious ib...!?
|
||||
size: float|int = (
|
||||
record.get('quantity')
|
||||
or record['shares']
|
||||
) * {
|
||||
'BOT': 1,
|
||||
'SLD': -1,
|
||||
}[record['side']]
|
||||
|
||||
symbol: str = record['symbol']
|
||||
exch: str = (
|
||||
record.get('listingExchange')
|
||||
or record.get('primaryExchange')
|
||||
or record['exchange']
|
||||
)
|
||||
|
||||
# NOTE: remove null values since `tomlkit` can't serialize
|
||||
# them to file.
|
||||
if dnc := record.pop('deltaNeutralContract', None):
|
||||
record['deltaNeutralContract'] = dnc
|
||||
|
||||
# likely an opts contract record from a flex report..
|
||||
# TODO: no idea how to parse ^ the strike part from flex..
|
||||
# (00010000 any, or 00007500 tsla, ..)
|
||||
# we probably must do the contract lookup for this?
|
||||
if (
|
||||
' ' in symbol
|
||||
or '--' in exch
|
||||
):
|
||||
underlying, _, tail = symbol.partition(' ')
|
||||
exch: str = 'opt'
|
||||
expiry: str = tail[:6]
|
||||
# otype = tail[6]
|
||||
# strike = tail[7:]
|
||||
|
||||
log.warning(
|
||||
f'Skipping option contract -> NO SUPPORT YET!\n'
|
||||
f'{symbol}\n'
|
||||
)
|
||||
return None
|
||||
|
||||
# timestamping is way different in API records
|
||||
dtstr: str = record.get('datetime')
|
||||
date: str = record.get('date')
|
||||
flex_dtstr: str = record.get('dateTime')
|
||||
|
||||
if dtstr or date:
|
||||
dt: DateTime = parse(dtstr or date)
|
||||
|
||||
elif flex_dtstr:
|
||||
# probably a flex record with a wonky non-std timestamp..
|
||||
dt: DateTime = parse_flex_dt(record['dateTime'])
|
||||
|
||||
# special handling of symbol extraction from
|
||||
# flex records using some ad-hoc schema parsing.
|
||||
asset_type: str = (
|
||||
record.get('assetCategory')
|
||||
or record.get('secType')
|
||||
or 'STK'
|
||||
)
|
||||
|
||||
if (expiry := (
|
||||
record.get('lastTradeDateOrContractMonth')
|
||||
or record.get('expiry')
|
||||
)
|
||||
):
|
||||
expiry: str = str(expiry).strip(' ')
|
||||
# NOTE: we directly use the (simple and usually short)
|
||||
# date-string expiry token when packing the `MktPair`
|
||||
# since we want the fqme to contain *that* token.
|
||||
# It might make sense later to instead parse and then
|
||||
# render different output str format(s) for this same
|
||||
# purpose depending on asset-type-market down the road.
|
||||
# Eg. for derivs we use the short token only for fqme
|
||||
# but use the isoformat('T') for transactions and
|
||||
# account file position entries?
|
||||
# dt_str: str = pendulum.parse(expiry).isoformat('T')
|
||||
|
||||
# XXX: pretty much all legacy market assets have a fiat
|
||||
# currency (denomination) determined by their venue.
|
||||
currency: str = record['currency']
|
||||
src = Asset(
|
||||
name=currency.lower(),
|
||||
atype='fiat',
|
||||
tx_tick=Decimal('0.01'),
|
||||
)
|
||||
|
||||
match asset_type:
|
||||
case 'FUT':
|
||||
# XXX (flex) ledger entries don't necessarily have any
|
||||
# simple 3-char key.. sometimes the .symbol is some
|
||||
# weird internal key that we probably don't want in the
|
||||
# .fqme => we should probably just wrap `Contract` to
|
||||
# this like we do other crypto$ backends XD
|
||||
|
||||
# NOTE: at least older FLEX records should have
|
||||
# this field.. no idea about API entries..
|
||||
local_symbol: str | None = record.get('localSymbol')
|
||||
underlying_key: str = record.get('underlyingSymbol')
|
||||
descr: str | None = record.get('description')
|
||||
|
||||
if (
|
||||
not (
|
||||
local_symbol
|
||||
and symbol in local_symbol
|
||||
)
|
||||
and (
|
||||
descr
|
||||
and symbol not in descr
|
||||
)
|
||||
):
|
||||
con_key, exp_str = descr.split(' ')
|
||||
symbol: str = underlying_key or con_key
|
||||
|
||||
dst = Asset(
|
||||
name=symbol.lower(),
|
||||
atype='future',
|
||||
tx_tick=Decimal('1'),
|
||||
)
|
||||
|
||||
case 'STK':
|
||||
dst = Asset(
|
||||
name=symbol.lower(),
|
||||
atype='stock',
|
||||
tx_tick=Decimal('1'),
|
||||
)
|
||||
|
||||
case 'CASH':
|
||||
if currency not in symbol:
|
||||
# likely a dict-casted `Forex` contract which
|
||||
# has .symbol as the dst and .currency as the
|
||||
# src.
|
||||
name: str = symbol.lower()
|
||||
else:
|
||||
# likely a flex-report record which puts
|
||||
# EUR.USD as the symbol field and just USD in
|
||||
# the currency field.
|
||||
name: str = symbol.lower().replace(f'.{src.name}', '')
|
||||
|
||||
dst = Asset(
|
||||
name=name,
|
||||
atype='fiat',
|
||||
tx_tick=Decimal('0.01'),
|
||||
)
|
||||
|
||||
case 'OPT':
|
||||
dst = Asset(
|
||||
name=symbol.lower(),
|
||||
atype='option',
|
||||
tx_tick=Decimal('1'),
|
||||
|
||||
# TODO: we should probably always cast to the
|
||||
# `Contract` instance then dict-serialize that for
|
||||
# the `.info` field!
|
||||
# info=asdict(Option()),
|
||||
)
|
||||
|
||||
case 'CMDTY':
|
||||
from .symbols import _adhoc_symbol_map
|
||||
con_kwargs, _ = _adhoc_symbol_map[symbol.upper()]
|
||||
dst = Asset(
|
||||
name=symbol.lower(),
|
||||
atype='commodity',
|
||||
tx_tick=Decimal('1'),
|
||||
info=asdict(Commodity(**con_kwargs)),
|
||||
)
|
||||
|
||||
# try to build out piker fqme from record.
|
||||
# src: str = record['currency']
|
||||
price_tick: Decimal = digits_to_dec(dec_digits(price))
|
||||
|
||||
# NOTE: can't serlialize `tomlkit.String` so cast to native
|
||||
atype: str = str(dst.atype)
|
||||
|
||||
# if not (mkt := symcache.mktmaps.get(bs_mktid)):
|
||||
mkt = MktPair(
|
||||
bs_mktid=bs_mktid,
|
||||
dst=dst,
|
||||
|
||||
price_tick=price_tick,
|
||||
# NOTE: for "legacy" assets, volume is normally discreet, not
|
||||
# a float, but we keep a digit in case the suitz decide
|
||||
# to get crazy and change it; we'll be kinda ready
|
||||
# schema-wise..
|
||||
size_tick=Decimal('1'),
|
||||
|
||||
src=src, # XXX: normally always a fiat
|
||||
|
||||
_atype=atype,
|
||||
|
||||
venue=exch,
|
||||
expiry=expiry,
|
||||
broker='ib',
|
||||
|
||||
_fqme_without_src=(atype != 'fiat'),
|
||||
)
|
||||
|
||||
fqme: str = mkt.fqme
|
||||
|
||||
# XXX: if passed in, we fill out the symcache ad-hoc in order
|
||||
# to make downstream accounting work..
|
||||
if symcache is not None:
|
||||
orig_mkt: MktPair | None = symcache.mktmaps.get(bs_mktid)
|
||||
if (
|
||||
orig_mkt
|
||||
and orig_mkt.fqme != mkt.fqme
|
||||
):
|
||||
log.warning(
|
||||
# print(
|
||||
f'Contracts with common `conId`: {bs_mktid} mismatch..\n'
|
||||
f'{orig_mkt.fqme} -> {mkt.fqme}\n'
|
||||
# 'with DIFF:\n'
|
||||
# f'{mkt - orig_mkt}'
|
||||
)
|
||||
|
||||
symcache.mktmaps[bs_mktid] = mkt
|
||||
symcache.mktmaps[fqme] = mkt
|
||||
symcache.assets[src.name] = src
|
||||
symcache.assets[dst.name] = dst
|
||||
|
||||
# NOTE: for flex records the normal fields for defining an fqme
|
||||
# sometimes won't be available so we rely on two approaches for
|
||||
# the "reverse lookup" of piker style fqme keys:
|
||||
# - when dealing with API trade records received from
|
||||
# `IB.trades()` we do a contract lookup at he time of processing
|
||||
# - when dealing with flex records, it is assumed the record
|
||||
# is at least a day old and thus the TWS position reporting system
|
||||
# should already have entries if the pps are still open, in
|
||||
# which case, we can pull the fqme from that table (see
|
||||
# `trades_dialogue()` above).
|
||||
return Transaction(
|
||||
fqme=fqme,
|
||||
tid=tid,
|
||||
size=size,
|
||||
price=price,
|
||||
cost=comms,
|
||||
dt=dt,
|
||||
expiry=expiry,
|
||||
bs_mktid=str(conid),
|
||||
)
|
||||
|
||||
|
||||
|
||||
def norm_trade_records(
|
||||
ledger: dict[str, Any],
|
||||
symcache: SymbologyCache | None = None,
|
||||
|
||||
) -> dict[str, Transaction]:
|
||||
'''
|
||||
Normalize (xml) flex-report or (recent) API trade records into
|
||||
our ledger format with parsing for `MktPair` and `Asset`
|
||||
extraction to fill in the `Transaction.sys: MktPair` field.
|
||||
|
||||
'''
|
||||
records: list[Transaction] = []
|
||||
for tid, record in ledger.items():
|
||||
|
||||
txn = norm_trade(
|
||||
tid,
|
||||
record,
|
||||
|
||||
# NOTE: currently no symcache support
|
||||
pairs={},
|
||||
symcache=symcache,
|
||||
)
|
||||
|
||||
if txn is None:
|
||||
continue
|
||||
|
||||
# inject txns sorted by datetime
|
||||
insort(
|
||||
records,
|
||||
txn,
|
||||
key=lambda t: t.dt
|
||||
)
|
||||
|
||||
return {r.tid: r for r in records}
|
||||
|
||||
|
||||
def api_trades_to_ledger_entries(
|
||||
accounts: bidict[str, str],
|
||||
fills: list[Fill],
|
||||
|
||||
) -> dict[str, dict]:
|
||||
'''
|
||||
Convert API execution objects entry objects into
|
||||
flattened-``dict`` form, pretty much straight up without
|
||||
modification except add a `pydatetime` field from the parsed
|
||||
timestamp so that on write
|
||||
|
||||
'''
|
||||
trades_by_account: dict[str, dict] = {}
|
||||
for fill in fills:
|
||||
|
||||
# NOTE: for the schema, see the defn for `Fill` which is
|
||||
# a `NamedTuple` subtype
|
||||
fdict: dict = fill._asdict()
|
||||
|
||||
# flatten all (sub-)objects and convert to dicts.
|
||||
# with values packed into one top level entry.
|
||||
val: CommissionReport | Execution | Contract
|
||||
txn_dict: dict[str, Any] = {}
|
||||
for attr_name, val in fdict.items():
|
||||
match attr_name:
|
||||
# value is a `@dataclass` subtype
|
||||
case 'contract' | 'execution' | 'commissionReport':
|
||||
txn_dict.update(asdict(val))
|
||||
|
||||
case 'time':
|
||||
# ib has wack ns timestamps, or is that us?
|
||||
continue
|
||||
|
||||
# TODO: we can remove this case right since there's
|
||||
# only 4 fields on a `Fill`?
|
||||
case _:
|
||||
txn_dict[attr_name] = val
|
||||
|
||||
tid = str(txn_dict['execId'])
|
||||
dt = from_timestamp(txn_dict['time'])
|
||||
txn_dict['datetime'] = str(dt)
|
||||
acctid = accounts[txn_dict['acctNumber']]
|
||||
|
||||
# NOTE: only inserted (then later popped) for sorting below!
|
||||
txn_dict['pydatetime'] = dt
|
||||
|
||||
if not tid:
|
||||
# this is likely some kind of internal adjustment
|
||||
# transaction, likely one of the following:
|
||||
# - an expiry event that will show a "book trade" indicating
|
||||
# some adjustment to cash balances: zeroing or itm settle.
|
||||
# - a manual cash balance position adjustment likely done by
|
||||
# the user from the accounts window in TWS where they can
|
||||
# manually set the avg price and size:
|
||||
# https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST
|
||||
log.warning(
|
||||
'Skipping ID-less ledger txn_dict:\n'
|
||||
f'{pformat(txn_dict)}'
|
||||
)
|
||||
continue
|
||||
|
||||
trades_by_account.setdefault(
|
||||
acctid, {}
|
||||
)[tid] = txn_dict
|
||||
|
||||
# TODO: maybe we should just bisect.insort() into a list of
|
||||
# tuples and then return a dict of that?
|
||||
# sort entries in output by python based datetime
|
||||
for acctid in trades_by_account:
|
||||
trades_by_account[acctid] = dict(sorted(
|
||||
trades_by_account[acctid].items(),
|
||||
key=lambda entry: entry[1].pop('pydatetime'),
|
||||
))
|
||||
|
||||
return trades_by_account
|
||||
|
||||
|
||||
async def update_ledger_from_api_trades(
|
||||
fills: list[Fill],
|
||||
client: Client | MethodProxy,
|
||||
accounts_def_inv: bidict[str, str],
|
||||
|
||||
# NOTE: provided for ad-hoc insertions "as transactions are
|
||||
# processed" -> see `norm_trade()` signature requirements.
|
||||
symcache: SymbologyCache | None = None,
|
||||
|
||||
) -> tuple[
|
||||
dict[str, Transaction],
|
||||
dict[str, dict],
|
||||
]:
|
||||
# XXX; ERRGGG..
|
||||
# pack in the "primary/listing exchange" value from a
|
||||
# contract lookup since it seems this isn't available by
|
||||
# default from the `.fills()` method endpoint...
|
||||
fill: Fill
|
||||
for fill in fills:
|
||||
con: Contract = fill.contract
|
||||
conid: str = con.conId
|
||||
pexch: str | None = con.primaryExchange
|
||||
|
||||
if not pexch:
|
||||
cons = await client.get_con(conid=conid)
|
||||
if cons:
|
||||
con = cons[0]
|
||||
pexch = con.primaryExchange or con.exchange
|
||||
else:
|
||||
# for futes it seems like the primary is always empty?
|
||||
pexch: str = con.exchange
|
||||
|
||||
# pack in the ``Contract.secType``
|
||||
# entry['asset_type'] = condict['secType']
|
||||
|
||||
entries: dict[str, dict] = api_trades_to_ledger_entries(
|
||||
accounts_def_inv,
|
||||
fills,
|
||||
)
|
||||
# normalize recent session's trades to the `Transaction` type
|
||||
trans_by_acct: dict[str, dict[str, Transaction]] = {}
|
||||
|
||||
for acctid, trades_by_id in entries.items():
|
||||
# normalize to transaction form
|
||||
trans_by_acct[acctid] = norm_trade_records(
|
||||
trades_by_id,
|
||||
symcache=symcache,
|
||||
)
|
||||
|
||||
return trans_by_acct, entries
|
||||
|
|
@ -1,615 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Symbology search and normalization.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from contextlib import (
|
||||
nullcontext,
|
||||
)
|
||||
from decimal import Decimal
|
||||
import time
|
||||
from typing import (
|
||||
Awaitable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
from rapidfuzz import process as fuzzy
|
||||
import ib_insync as ibis
|
||||
import tractor
|
||||
import trio
|
||||
|
||||
from piker.accounting import (
|
||||
Asset,
|
||||
MktPair,
|
||||
unpack_fqme,
|
||||
)
|
||||
from piker._cacheables import (
|
||||
async_lifo_cache,
|
||||
)
|
||||
|
||||
from ._util import (
|
||||
log,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .api import (
|
||||
MethodProxy,
|
||||
Client,
|
||||
)
|
||||
|
||||
_futes_venues = (
|
||||
'GLOBEX',
|
||||
'NYMEX',
|
||||
'CME',
|
||||
'CMECRYPTO',
|
||||
'COMEX',
|
||||
# 'CMDTY', # special name case..
|
||||
'CBOT', # (treasury) yield futures
|
||||
)
|
||||
|
||||
_adhoc_cmdty_set = {
|
||||
# metals
|
||||
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
||||
'xauusd.cmdty', # london gold spot ^
|
||||
'xagusd.cmdty', # silver spot
|
||||
}
|
||||
|
||||
# NOTE: if you aren't seeing one of these symbol's futues contracts
|
||||
# show up, it's likely the `.<venue>` part is wrong!
|
||||
_adhoc_futes_set = {
|
||||
|
||||
# equities
|
||||
'nq.cme',
|
||||
'mnq.cme', # micro
|
||||
|
||||
'es.cme',
|
||||
'mes.cme', # micro
|
||||
|
||||
# cypto$
|
||||
'brr.cme',
|
||||
'mbt.cme', # micro
|
||||
'ethusdrr.cme',
|
||||
|
||||
# agriculture
|
||||
'he.comex', # lean hogs
|
||||
'le.comex', # live cattle (geezers)
|
||||
'gf.comex', # feeder cattle (younguns)
|
||||
|
||||
# raw
|
||||
'lb.comex', # random len lumber
|
||||
|
||||
'gc.comex',
|
||||
'mgc.comex', # micro
|
||||
|
||||
# oil & gas
|
||||
'cl.nymex',
|
||||
|
||||
'ni.comex', # silver futes
|
||||
'qi.comex', # mini-silver futes
|
||||
|
||||
# treasury yields
|
||||
# etfs by duration:
|
||||
# SHY -> IEI -> IEF -> TLT
|
||||
'zt.cbot', # 2y
|
||||
'z3n.cbot', # 3y
|
||||
'zf.cbot', # 5y
|
||||
'zn.cbot', # 10y
|
||||
'zb.cbot', # 30y
|
||||
|
||||
# (micros of above)
|
||||
'2yy.cbot',
|
||||
'5yy.cbot',
|
||||
'10y.cbot',
|
||||
'30y.cbot',
|
||||
}
|
||||
|
||||
|
||||
# taken from list here:
|
||||
# https://www.interactivebrokers.com/en/trading/products-spot-currencies.php
|
||||
_adhoc_fiat_set = set((
|
||||
'USD, AED, AUD, CAD,'
|
||||
'CHF, CNH, CZK, DKK,'
|
||||
'EUR, GBP, HKD, HUF,'
|
||||
'ILS, JPY, MXN, NOK,'
|
||||
'NZD, PLN, RUB, SAR,'
|
||||
'SEK, SGD, TRY, ZAR'
|
||||
).split(' ,')
|
||||
)
|
||||
|
||||
# manually discovered tick discrepancies,
|
||||
# onl god knows how or why they'd cuck these up..
|
||||
_adhoc_mkt_infos: dict[int | str, dict] = {
|
||||
'vtgn.nasdaq': {'price_tick': Decimal('0.01')},
|
||||
}
|
||||
|
||||
|
||||
# map of symbols to contract ids
|
||||
_adhoc_symbol_map = {
|
||||
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
||||
|
||||
# NOTE: some cmdtys/metals don't have trade data like gold/usd:
|
||||
# https://groups.io/g/twsapi/message/44174
|
||||
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
|
||||
}
|
||||
for qsn in _adhoc_futes_set:
|
||||
sym, venue = qsn.split('.')
|
||||
assert venue.upper() in _futes_venues, f'{venue}'
|
||||
_adhoc_symbol_map[sym.upper()] = (
|
||||
{'exchange': venue},
|
||||
{},
|
||||
)
|
||||
|
||||
|
||||
# exchanges we don't support at the moment due to not knowing
|
||||
# how to do symbol-contract lookup correctly likely due
|
||||
# to not having the data feeds subscribed.
|
||||
_exch_skip_list = {
|
||||
|
||||
'ASX', # aussie stocks
|
||||
'MEXI', # mexican stocks
|
||||
|
||||
# no idea
|
||||
'NSE',
|
||||
'VALUE',
|
||||
'FUNDSERV',
|
||||
'SWB2',
|
||||
'PSE',
|
||||
'PHLX',
|
||||
}
|
||||
|
||||
# optional search config the backend can register for
|
||||
# it's symbol search handling (in this case we avoid
|
||||
# accepting patterns before the kb has settled more then
|
||||
# a quarter second).
|
||||
_search_conf = {
|
||||
'pause_period': 6 / 16,
|
||||
}
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(ctx: tractor.Context) -> None:
|
||||
'''
|
||||
Symbology search brokerd-endpoint.
|
||||
|
||||
'''
|
||||
from .api import open_client_proxies
|
||||
from .feed import open_data_client
|
||||
|
||||
# TODO: load user defined symbol set locally for fast search?
|
||||
await ctx.started({})
|
||||
|
||||
async with (
|
||||
open_client_proxies() as (proxies, _),
|
||||
open_data_client() as data_proxy,
|
||||
):
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
# select a non-history client for symbol search to lighten
|
||||
# the load in the main data node.
|
||||
proxy = data_proxy
|
||||
for name, proxy in proxies.items():
|
||||
if proxy is data_proxy:
|
||||
continue
|
||||
break
|
||||
|
||||
ib_client = proxy._aio_ns.ib
|
||||
log.info(
|
||||
f'Using API client for symbol-search\n'
|
||||
f'{ib_client}\n'
|
||||
)
|
||||
|
||||
last = time.time()
|
||||
async for pattern in stream:
|
||||
log.info(f'received {pattern}')
|
||||
now: float = time.time()
|
||||
|
||||
# this causes tractor hang...
|
||||
# assert 0
|
||||
|
||||
assert pattern, 'IB can not accept blank search pattern'
|
||||
|
||||
# throttle search requests to no faster then 1Hz
|
||||
diff = now - last
|
||||
if diff < 1.0:
|
||||
log.debug('throttle sleeping')
|
||||
await trio.sleep(diff)
|
||||
try:
|
||||
pattern = stream.receive_nowait()
|
||||
except trio.WouldBlock:
|
||||
pass
|
||||
|
||||
if (
|
||||
not pattern
|
||||
or pattern.isspace()
|
||||
|
||||
# XXX: not sure if this is a bad assumption but it
|
||||
# seems to make search snappier?
|
||||
or len(pattern) < 1
|
||||
):
|
||||
log.warning('empty pattern received, skipping..')
|
||||
|
||||
# TODO: *BUG* if nothing is returned here the client
|
||||
# side will cache a null set result and not showing
|
||||
# anything to the use on re-searches when this query
|
||||
# timed out. We probably need a special "timeout" msg
|
||||
# or something...
|
||||
|
||||
# XXX: this unblocks the far end search task which may
|
||||
# hold up a multi-search nursery block
|
||||
await stream.send({})
|
||||
|
||||
continue
|
||||
|
||||
log.info(f'searching for {pattern}')
|
||||
|
||||
last = time.time()
|
||||
|
||||
# async batch search using api stocks endpoint and module
|
||||
# defined adhoc symbol set.
|
||||
stock_results = []
|
||||
|
||||
async def extend_results(
|
||||
target: Awaitable[list]
|
||||
) -> None:
|
||||
try:
|
||||
results = await target
|
||||
except tractor.trionics.Lagged:
|
||||
print("IB SYM-SEARCH OVERRUN?!?")
|
||||
return
|
||||
|
||||
stock_results.extend(results)
|
||||
|
||||
for _ in range(10):
|
||||
with trio.move_on_after(3) as cs:
|
||||
async with trio.open_nursery() as sn:
|
||||
sn.start_soon(
|
||||
extend_results,
|
||||
proxy.search_symbols(
|
||||
pattern=pattern,
|
||||
upto=5,
|
||||
),
|
||||
)
|
||||
|
||||
# trigger async request
|
||||
await trio.sleep(0)
|
||||
|
||||
if cs.cancelled_caught:
|
||||
log.warning(
|
||||
f'Search timeout? {proxy._aio_ns.ib.client}'
|
||||
)
|
||||
continue
|
||||
elif stock_results:
|
||||
break
|
||||
# else:
|
||||
# await tractor.pause()
|
||||
|
||||
# # match against our ad-hoc set immediately
|
||||
# adhoc_matches = fuzzy.extract(
|
||||
# pattern,
|
||||
# list(_adhoc_futes_set),
|
||||
# score_cutoff=90,
|
||||
# )
|
||||
# log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
||||
# adhoc_match_results = {}
|
||||
# if adhoc_matches:
|
||||
# # TODO: do we need to pull contract details?
|
||||
# adhoc_match_results = {i[0]: {} for i in
|
||||
# adhoc_matches}
|
||||
|
||||
log.debug(f'fuzzy matching stocks {stock_results}')
|
||||
stock_matches = fuzzy.extract(
|
||||
pattern,
|
||||
stock_results,
|
||||
score_cutoff=50,
|
||||
)
|
||||
|
||||
# matches = adhoc_match_results | {
|
||||
matches = {
|
||||
item[0]: {} for item in stock_matches
|
||||
}
|
||||
# TODO: we used to deliver contract details
|
||||
# {item[2]: item[0] for item in stock_matches}
|
||||
|
||||
log.debug(f"sending matches: {matches.keys()}")
|
||||
await stream.send(matches)
|
||||
|
||||
|
||||
# re-mapping to piker asset type names
|
||||
# https://github.com/erdewit/ib_insync/blob/master/ib_insync/contract.py#L113
|
||||
_asset_type_map = {
|
||||
'STK': 'stock',
|
||||
'OPT': 'option',
|
||||
'FUT': 'future',
|
||||
'CONTFUT': 'continuous_future',
|
||||
'CASH': 'fiat',
|
||||
'IND': 'index',
|
||||
'CFD': 'cfd',
|
||||
'BOND': 'bond',
|
||||
'CMDTY': 'commodity',
|
||||
'FOP': 'futures_option',
|
||||
'FUND': 'mutual_fund',
|
||||
'WAR': 'warrant',
|
||||
'IOPT': 'warran',
|
||||
'BAG': 'bag',
|
||||
'CRYPTO': 'crypto', # bc it's diff then fiat?
|
||||
# 'NEWS': 'news',
|
||||
}
|
||||
|
||||
|
||||
def parse_patt2fqme(
|
||||
# client: Client,
|
||||
pattern: str,
|
||||
|
||||
) -> tuple[str, str, str, str]:
|
||||
|
||||
# TODO: we can't use this currently because
|
||||
# ``wrapper.starTicker()`` currently cashes ticker instances
|
||||
# which means getting a singel quote will potentially look up
|
||||
# a quote for a ticker that it already streaming and thus run
|
||||
# into state clobbering (eg. list: Ticker.ticks). It probably
|
||||
# makes sense to try this once we get the pub-sub working on
|
||||
# individual symbols...
|
||||
|
||||
# XXX UPDATE: we can probably do the tick/trades scraping
|
||||
# inside our eventkit handler instead to bypass this entirely?
|
||||
|
||||
currency = ''
|
||||
|
||||
# fqme parsing stage
|
||||
# ------------------
|
||||
if '.ib' in pattern:
|
||||
_, symbol, venue, expiry = unpack_fqme(pattern)
|
||||
|
||||
else:
|
||||
symbol = pattern
|
||||
expiry = ''
|
||||
|
||||
# # another hack for forex pairs lul.
|
||||
# if (
|
||||
# '.idealpro' in symbol
|
||||
# # or '/' in symbol
|
||||
# ):
|
||||
# exch: str = 'IDEALPRO'
|
||||
# symbol = symbol.removesuffix('.idealpro')
|
||||
# if '/' in symbol:
|
||||
# symbol, currency = symbol.split('/')
|
||||
|
||||
# else:
|
||||
# TODO: yes, a cache..
|
||||
# try:
|
||||
# # give the cache a go
|
||||
# return client._contracts[symbol]
|
||||
# except KeyError:
|
||||
# log.debug(f'Looking up contract for {symbol}')
|
||||
expiry: str = ''
|
||||
if symbol.count('.') > 1:
|
||||
symbol, _, expiry = symbol.rpartition('.')
|
||||
|
||||
# use heuristics to figure out contract "type"
|
||||
symbol, venue = symbol.upper().rsplit('.', maxsplit=1)
|
||||
|
||||
return symbol, currency, venue, expiry
|
||||
|
||||
|
||||
def con2fqme(
|
||||
con: ibis.Contract,
|
||||
_cache: dict[int, (str, bool)] = {}
|
||||
|
||||
) -> tuple[str, bool]:
|
||||
'''
|
||||
Convert contracts to fqme-style strings to be used both in
|
||||
symbol-search matching and as feed tokens passed to the front
|
||||
end data deed layer.
|
||||
|
||||
Previously seen contracts are cached by id.
|
||||
|
||||
'''
|
||||
# should be real volume for this contract by default
|
||||
calc_price: bool = False
|
||||
if con.conId:
|
||||
try:
|
||||
# TODO: LOL so apparently IB just changes the contract
|
||||
# ID (int) on a whim.. so we probably need to use an
|
||||
# FQME style key after all...
|
||||
return _cache[con.conId]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
suffix: str = con.primaryExchange or con.exchange
|
||||
symbol: str = con.symbol
|
||||
expiry: str = con.lastTradeDateOrContractMonth or ''
|
||||
|
||||
match con:
|
||||
case ibis.Option():
|
||||
# TODO: option symbol parsing and sane display:
|
||||
symbol = con.localSymbol.replace(' ', '')
|
||||
|
||||
case (
|
||||
ibis.Commodity()
|
||||
# search API endpoint returns std con box..
|
||||
| ibis.Contract(secType='CMDTY')
|
||||
):
|
||||
# commodities and forex don't have an exchange name and
|
||||
# no real volume so we have to calculate the price
|
||||
suffix = con.secType
|
||||
|
||||
# no real volume on this tract
|
||||
calc_price = True
|
||||
|
||||
case ibis.Forex() | ibis.Contract(secType='CASH'):
|
||||
dst, src = con.localSymbol.split('.')
|
||||
symbol = ''.join([dst, src])
|
||||
suffix = con.exchange or 'idealpro'
|
||||
|
||||
# no real volume on forex feeds..
|
||||
calc_price = True
|
||||
|
||||
if not suffix:
|
||||
entry = _adhoc_symbol_map.get(
|
||||
con.symbol or con.localSymbol
|
||||
)
|
||||
if entry:
|
||||
meta, kwargs = entry
|
||||
cid = meta.get('conId')
|
||||
if cid:
|
||||
assert con.conId == meta['conId']
|
||||
suffix = meta['exchange']
|
||||
|
||||
# append a `.<suffix>` to the returned symbol
|
||||
# key for derivatives that normally is the expiry
|
||||
# date key.
|
||||
if expiry:
|
||||
suffix += f'.{expiry}'
|
||||
|
||||
fqme_key = symbol.lower()
|
||||
if suffix:
|
||||
fqme_key = '.'.join((fqme_key, suffix)).lower()
|
||||
|
||||
_cache[con.conId] = fqme_key, calc_price
|
||||
return fqme_key, calc_price
|
||||
|
||||
|
||||
@async_lifo_cache()
|
||||
async def get_mkt_info(
|
||||
fqme: str,
|
||||
|
||||
proxy: MethodProxy | None = None,
|
||||
|
||||
) -> tuple[MktPair, ibis.ContractDetails]:
|
||||
|
||||
if '.ib' not in fqme:
|
||||
fqme += '.ib'
|
||||
broker, pair, venue, expiry = unpack_fqme(fqme)
|
||||
|
||||
proxy: MethodProxy
|
||||
if proxy is not None:
|
||||
client_ctx = nullcontext(proxy)
|
||||
else:
|
||||
from .feed import (
|
||||
open_data_client,
|
||||
)
|
||||
client_ctx = open_data_client
|
||||
|
||||
async with client_ctx as proxy:
|
||||
try:
|
||||
(
|
||||
con, # Contract
|
||||
details, # ContractDetails
|
||||
) = await proxy.get_sym_details(fqme=fqme)
|
||||
except ConnectionError:
|
||||
log.exception(f'Proxy is ded {proxy._aio_ns}')
|
||||
raise
|
||||
|
||||
# TODO: more consistent field translation
|
||||
atype = _asset_type_map[con.secType]
|
||||
|
||||
if atype == 'commodity':
|
||||
venue: str = 'cmdty'
|
||||
else:
|
||||
venue = con.primaryExchange or con.exchange
|
||||
|
||||
price_tick: Decimal = Decimal(str(details.minTick))
|
||||
ib_min_tick_gt_2: Decimal = Decimal('0.01')
|
||||
if (
|
||||
price_tick < ib_min_tick_gt_2
|
||||
):
|
||||
# TODO: we need to add some kinda dynamic rounding sys
|
||||
# to our MktPair i guess?
|
||||
# not sure where the logic should sit, but likely inside
|
||||
# the `.clearing._ems` i suppose...
|
||||
log.warning(
|
||||
'IB seems to disallow a min price tick < 0.01 '
|
||||
'when the price is > 2.0..?\n'
|
||||
f'Decreasing min tick precision for {fqme} to 0.01'
|
||||
)
|
||||
# price_tick = ib_min_tick
|
||||
# await tractor.pause()
|
||||
|
||||
if atype == 'stock':
|
||||
# XXX: GRRRR they don't support fractional share sizes for
|
||||
# stocks from the API?!
|
||||
# if con.secType == 'STK':
|
||||
size_tick = Decimal('1')
|
||||
else:
|
||||
size_tick: Decimal = Decimal(
|
||||
str(details.minSize).rstrip('0')
|
||||
)
|
||||
# |-> TODO: there is also the Contract.sizeIncrement, bt wtf is it?
|
||||
|
||||
# NOTE: this is duplicate from the .broker.norm_trade_records()
|
||||
# routine, we should factor all this parsing somewhere..
|
||||
expiry_str = str(con.lastTradeDateOrContractMonth)
|
||||
# if expiry:
|
||||
# expiry_str: str = str(pendulum.parse(
|
||||
# str(expiry).strip(' ')
|
||||
# ))
|
||||
|
||||
# TODO: currently we can't pass the fiat src asset because
|
||||
# then we'll get a `MNQUSD` request for history data..
|
||||
# we need to figure out how we're going to handle this (later?)
|
||||
# but likely we want all backends to eventually handle
|
||||
# ``dst/src.venue.`` style !?
|
||||
src = Asset(
|
||||
name=str(con.currency).lower(),
|
||||
atype='fiat',
|
||||
tx_tick=Decimal('0.01'), # right?
|
||||
)
|
||||
dst = Asset(
|
||||
name=con.symbol.lower(),
|
||||
atype=atype,
|
||||
tx_tick=size_tick,
|
||||
)
|
||||
|
||||
mkt = MktPair(
|
||||
src=src,
|
||||
dst=dst,
|
||||
|
||||
price_tick=price_tick,
|
||||
size_tick=size_tick,
|
||||
|
||||
bs_mktid=str(con.conId),
|
||||
venue=str(venue),
|
||||
expiry=expiry_str,
|
||||
broker='ib',
|
||||
|
||||
# TODO: options contract info as str?
|
||||
# contract_info=<optionsdetails>
|
||||
_fqme_without_src=(atype != 'fiat'),
|
||||
)
|
||||
|
||||
# just.. wow.
|
||||
if entry := _adhoc_mkt_infos.get(mkt.bs_fqme):
|
||||
log.warning(f'Frickin {mkt.fqme} has an adhoc {entry}..')
|
||||
new = mkt.to_dict()
|
||||
new['price_tick'] = entry['price_tick']
|
||||
new['src'] = src
|
||||
new['dst'] = dst
|
||||
mkt = MktPair(**new)
|
||||
|
||||
# if possible register the bs_mktid to the just-built
|
||||
# mkt so that it can be retreived by order mode tasks later.
|
||||
# TODO NOTE: this is going to be problematic if/when we split
|
||||
# out the datatd vs. brokerd actors since the mktmap lookup
|
||||
# table will now be inaccessible..
|
||||
if proxy is not None:
|
||||
client: Client = proxy._aio_ns
|
||||
client._contracts[mkt.bs_fqme] = con
|
||||
client._cons2mkts[con] = mkt
|
||||
|
||||
return mkt, details
|
||||
|
|
@ -0,0 +1,583 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Kraken backend.
|
||||
|
||||
"""
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import asdict, field
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
import time
|
||||
|
||||
from trio_typing import TaskStatus
|
||||
import trio
|
||||
import arrow
|
||||
import asks
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import tractor
|
||||
from pydantic.dataclasses import dataclass
|
||||
from pydantic import BaseModel
|
||||
import wsproto
|
||||
|
||||
from .api import open_cached_client
|
||||
from ._util import resproc, SymbolNotFound, BrokerError
|
||||
from ..log import get_logger, get_console_log
|
||||
from ..data import ShmArray
|
||||
from ..data._web_bs import open_autorecon_ws
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
# <uri>/<version>/
|
||||
_url = 'https://api.kraken.com/0'
|
||||
|
||||
|
||||
# Broker specific ohlc schema which includes a vwap field
|
||||
_ohlc_dtype = [
|
||||
('index', int),
|
||||
('time', int),
|
||||
('open', float),
|
||||
('high', float),
|
||||
('low', float),
|
||||
('close', float),
|
||||
('volume', float),
|
||||
('count', int),
|
||||
('bar_wap', float),
|
||||
]
|
||||
|
||||
# UI components allow this to be declared such that additional
|
||||
# (historical) fields can be exposed.
|
||||
ohlc_dtype = np.dtype(_ohlc_dtype)
|
||||
|
||||
_show_wap_in_history = True
|
||||
|
||||
|
||||
_symbol_info_translation: Dict[str, str] = {
|
||||
'tick_decimals': 'pair_decimals',
|
||||
}
|
||||
|
||||
|
||||
# https://www.kraken.com/features/api#get-tradable-pairs
|
||||
class Pair(BaseModel):
|
||||
altname: str # alternate pair name
|
||||
wsname: str # WebSocket pair name (if available)
|
||||
aclass_base: str # asset class of base component
|
||||
base: str # asset id of base component
|
||||
aclass_quote: str # asset class of quote component
|
||||
quote: str # asset id of quote component
|
||||
lot: str # volume lot size
|
||||
|
||||
pair_decimals: int # scaling decimal places for pair
|
||||
lot_decimals: int # scaling decimal places for volume
|
||||
|
||||
# amount to multiply lot volume by to get currency volume
|
||||
lot_multiplier: float
|
||||
|
||||
# array of leverage amounts available when buying
|
||||
leverage_buy: List[int]
|
||||
# array of leverage amounts available when selling
|
||||
leverage_sell: List[int]
|
||||
|
||||
# fee schedule array in [volume, percent fee] tuples
|
||||
fees: List[Tuple[int, float]]
|
||||
|
||||
# maker fee schedule array in [volume, percent fee] tuples (if on
|
||||
# maker/taker)
|
||||
fees_maker: List[Tuple[int, float]]
|
||||
|
||||
fee_volume_currency: str # volume discount currency
|
||||
margin_call: str # margin call level
|
||||
margin_stop: str # stop-out/liquidation margin level
|
||||
ordermin: float # minimum order volume for pair
|
||||
|
||||
|
||||
@dataclass
|
||||
class OHLC:
|
||||
"""Description of the flattened OHLC quote format.
|
||||
|
||||
For schema details see:
|
||||
https://docs.kraken.com/websockets/#message-ohlc
|
||||
"""
|
||||
chan_id: int # internal kraken id
|
||||
chan_name: str # eg. ohlc-1 (name-interval)
|
||||
pair: str # fx pair
|
||||
time: float # Begin time of interval, in seconds since epoch
|
||||
etime: float # End time of interval, in seconds since epoch
|
||||
open: float # Open price of interval
|
||||
high: float # High price within interval
|
||||
low: float # Low price within interval
|
||||
close: float # Close price of interval
|
||||
vwap: float # Volume weighted average price within interval
|
||||
volume: float # Accumulated volume **within interval**
|
||||
count: int # Number of trades within interval
|
||||
# (sampled) generated tick data
|
||||
ticks: List[Any] = field(default_factory=list)
|
||||
|
||||
|
||||
class Client:
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._sesh = asks.Session(connections=4)
|
||||
self._sesh.base_location = _url
|
||||
self._sesh.headers.update({
|
||||
'User-Agent':
|
||||
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||
})
|
||||
self._pairs: list[str] = []
|
||||
|
||||
@property
|
||||
def pairs(self) -> Dict[str, Any]:
|
||||
if self._pairs is None:
|
||||
raise RuntimeError(
|
||||
"Make sure to run `cache_symbols()` on startup!"
|
||||
)
|
||||
# retreive and cache all symbols
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def _public(
|
||||
self,
|
||||
method: str,
|
||||
data: dict,
|
||||
) -> Dict[str, Any]:
|
||||
resp = await self._sesh.post(
|
||||
path=f'/public/{method}',
|
||||
json=data,
|
||||
timeout=float('inf')
|
||||
)
|
||||
return resproc(resp, log)
|
||||
|
||||
async def symbol_info(
|
||||
self,
|
||||
pair: Optional[str] = None,
|
||||
):
|
||||
if pair is not None:
|
||||
pairs = {'pair': pair}
|
||||
else:
|
||||
pairs = None # get all pairs
|
||||
|
||||
resp = await self._public('AssetPairs', pairs)
|
||||
err = resp['error']
|
||||
if err:
|
||||
symbolname = pairs['pair'] if pair else None
|
||||
raise SymbolNotFound(f'{symbolname}.kraken')
|
||||
|
||||
pairs = resp['result']
|
||||
|
||||
if pair is not None:
|
||||
_, data = next(iter(pairs.items()))
|
||||
return data
|
||||
else:
|
||||
return pairs
|
||||
|
||||
async def cache_symbols(
|
||||
self,
|
||||
) -> dict:
|
||||
if not self._pairs:
|
||||
self._pairs = await self.symbol_info()
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def search_symbols(
|
||||
self,
|
||||
pattern: str,
|
||||
limit: int = None,
|
||||
) -> Dict[str, Any]:
|
||||
if self._pairs is not None:
|
||||
data = self._pairs
|
||||
else:
|
||||
data = await self.symbol_info()
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
data,
|
||||
score_cutoff=50,
|
||||
)
|
||||
# repack in dict form
|
||||
return {item[0]['altname']: item[0] for item in matches}
|
||||
|
||||
async def bars(
|
||||
self,
|
||||
symbol: str = 'XBTUSD',
|
||||
# UTC 2017-07-02 12:53:20
|
||||
since: int = None,
|
||||
count: int = 720, # <- max allowed per query
|
||||
as_np: bool = True,
|
||||
) -> dict:
|
||||
if since is None:
|
||||
since = arrow.utcnow().floor('minute').shift(
|
||||
minutes=-count).timestamp()
|
||||
|
||||
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||
since = str(max(1499000000, since))
|
||||
json = await self._public(
|
||||
'OHLC',
|
||||
data={
|
||||
'pair': symbol,
|
||||
'since': since,
|
||||
},
|
||||
)
|
||||
try:
|
||||
res = json['result']
|
||||
res.pop('last')
|
||||
bars = next(iter(res.values()))
|
||||
|
||||
new_bars = []
|
||||
|
||||
first = bars[0]
|
||||
last_nz_vwap = first[-3]
|
||||
if last_nz_vwap == 0:
|
||||
# use close if vwap is zero
|
||||
last_nz_vwap = first[-4]
|
||||
|
||||
# convert all fields to native types
|
||||
for i, bar in enumerate(bars):
|
||||
# normalize weird zero-ed vwap values..cmon kraken..
|
||||
# indicates vwap didn't change since last bar
|
||||
vwap = float(bar.pop(-3))
|
||||
if vwap != 0:
|
||||
last_nz_vwap = vwap
|
||||
if vwap == 0:
|
||||
vwap = last_nz_vwap
|
||||
|
||||
# re-insert vwap as the last of the fields
|
||||
bar.append(vwap)
|
||||
|
||||
new_bars.append(
|
||||
(i,) + tuple(
|
||||
ftype(bar[j]) for j, (name, ftype) in enumerate(
|
||||
_ohlc_dtype[1:]
|
||||
)
|
||||
)
|
||||
)
|
||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||
return array
|
||||
except KeyError:
|
||||
raise SymbolNotFound(json['error'][0] + f': {symbol}')
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_client() -> Client:
|
||||
client = Client()
|
||||
|
||||
# at startup, load all symbols locally for fast search
|
||||
await client.cache_symbols()
|
||||
|
||||
yield client
|
||||
|
||||
|
||||
async def stream_messages(ws):
|
||||
|
||||
too_slow_count = last_hb = 0
|
||||
|
||||
while True:
|
||||
|
||||
with trio.move_on_after(5) as cs:
|
||||
msg = await ws.recv_msg()
|
||||
|
||||
# trigger reconnection if heartbeat is laggy
|
||||
if cs.cancelled_caught:
|
||||
|
||||
too_slow_count += 1
|
||||
|
||||
if too_slow_count > 20:
|
||||
log.warning(
|
||||
"Heartbeat is too slow, resetting ws connection")
|
||||
|
||||
await ws._connect()
|
||||
too_slow_count = 0
|
||||
continue
|
||||
|
||||
if isinstance(msg, dict):
|
||||
if msg.get('event') == 'heartbeat':
|
||||
|
||||
now = time.time()
|
||||
delay = now - last_hb
|
||||
last_hb = now
|
||||
|
||||
# XXX: why tf is this not printing without --tl flag?
|
||||
log.debug(f"Heartbeat after {delay}")
|
||||
# print(f"Heartbeat after {delay}")
|
||||
|
||||
continue
|
||||
|
||||
err = msg.get('errorMessage')
|
||||
if err:
|
||||
raise BrokerError(err)
|
||||
else:
|
||||
chan_id, *payload_array, chan_name, pair = msg
|
||||
|
||||
if 'ohlc' in chan_name:
|
||||
|
||||
yield 'ohlc', OHLC(chan_id, chan_name, pair, *payload_array[0])
|
||||
|
||||
elif 'spread' in chan_name:
|
||||
|
||||
bid, ask, ts, bsize, asize = map(float, payload_array[0])
|
||||
|
||||
# TODO: really makes you think IB has a horrible API...
|
||||
quote = {
|
||||
'symbol': pair.replace('/', ''),
|
||||
'ticks': [
|
||||
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||
|
||||
{'type': 'ask', 'price': ask, 'size': asize},
|
||||
{'type': 'asize', 'price': ask, 'size': asize},
|
||||
],
|
||||
}
|
||||
yield 'l1', quote
|
||||
|
||||
# elif 'book' in msg[-2]:
|
||||
# chan_id, *payload_array, chan_name, pair = msg
|
||||
# print(msg)
|
||||
|
||||
else:
|
||||
print(f'UNHANDLED MSG: {msg}')
|
||||
|
||||
|
||||
def normalize(
|
||||
ohlc: OHLC,
|
||||
) -> dict:
|
||||
quote = asdict(ohlc)
|
||||
quote['broker_ts'] = quote['time']
|
||||
quote['brokerd_ts'] = time.time()
|
||||
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
||||
quote['last'] = quote['close']
|
||||
quote['bar_wap'] = ohlc.vwap
|
||||
|
||||
# seriously eh? what's with this non-symmetry everywhere
|
||||
# in subscription systems...
|
||||
# XXX: piker style is always lowercases symbols.
|
||||
topic = quote['pair'].replace('/', '').lower()
|
||||
|
||||
# print(quote)
|
||||
return topic, quote
|
||||
|
||||
|
||||
def make_sub(pairs: List[str], data: Dict[str, Any]) -> Dict[str, str]:
|
||||
"""Create a request subscription packet dict.
|
||||
|
||||
https://docs.kraken.com/websockets/#message-subscribe
|
||||
|
||||
"""
|
||||
# eg. specific logic for this in kraken's sync client:
|
||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||
return {
|
||||
'pair': pairs,
|
||||
'event': 'subscribe',
|
||||
'subscription': data,
|
||||
}
|
||||
|
||||
|
||||
async def backfill_bars(
|
||||
|
||||
sym: str,
|
||||
shm: ShmArray, # type: ignore # noqa
|
||||
count: int = 10, # NOTE: any more and we'll overrun the underlying buffer
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
"""Fill historical bars into shared mem / storage afap.
|
||||
"""
|
||||
with trio.CancelScope() as cs:
|
||||
async with open_cached_client('kraken') as client:
|
||||
bars = await client.bars(symbol=sym)
|
||||
shm.push(bars)
|
||||
task_status.started(cs)
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: List[str],
|
||||
shm: ShmArray,
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# backend specific
|
||||
sub_type: str = 'ohlc',
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[Tuple[Dict, Dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
"""Subscribe for ohlc stream of quotes for ``pairs``.
|
||||
|
||||
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||
"""
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
ws_pairs = {}
|
||||
sym_infos = {}
|
||||
|
||||
async with open_cached_client('kraken') as client, send_chan as send_chan:
|
||||
|
||||
# keep client cached for real-time section
|
||||
for sym in symbols:
|
||||
|
||||
# transform to upper since piker style is always lower
|
||||
sym = sym.upper()
|
||||
|
||||
si = Pair(**await client.symbol_info(sym)) # validation
|
||||
syminfo = si.dict()
|
||||
syminfo['price_tick_size'] = 1 / 10**si.pair_decimals
|
||||
syminfo['lot_tick_size'] = 1 / 10**si.lot_decimals
|
||||
sym_infos[sym] = syminfo
|
||||
ws_pairs[sym] = si.wsname
|
||||
|
||||
symbol = symbols[0].lower()
|
||||
|
||||
init_msgs = {
|
||||
# pass back token, and bool, signalling if we're the writer
|
||||
# and that history has been written
|
||||
symbol: {
|
||||
'symbol_info': sym_infos[sym],
|
||||
'shm_write_opts': {'sum_tick_vml': False},
|
||||
},
|
||||
}
|
||||
|
||||
@asynccontextmanager
|
||||
async def subscribe(ws: wsproto.WSConnection):
|
||||
# XXX: setup subs
|
||||
# https://docs.kraken.com/websockets/#message-subscribe
|
||||
# specific logic for this in kraken's shitty sync client:
|
||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||
ohlc_sub = make_sub(
|
||||
list(ws_pairs.values()),
|
||||
{'name': 'ohlc', 'interval': 1}
|
||||
)
|
||||
|
||||
# TODO: we want to eventually allow unsubs which should
|
||||
# be completely fine to request from a separate task
|
||||
# since internally the ws methods appear to be FIFO
|
||||
# locked.
|
||||
await ws.send_msg(ohlc_sub)
|
||||
|
||||
# trade data (aka L1)
|
||||
l1_sub = make_sub(
|
||||
list(ws_pairs.values()),
|
||||
{'name': 'spread'} # 'depth': 10}
|
||||
)
|
||||
|
||||
# pull a first quote and deliver
|
||||
await ws.send_msg(l1_sub)
|
||||
|
||||
yield
|
||||
|
||||
# unsub from all pairs on teardown
|
||||
await ws.send_msg({
|
||||
'pair': list(ws_pairs.values()),
|
||||
'event': 'unsubscribe',
|
||||
'subscription': ['ohlc', 'spread'],
|
||||
})
|
||||
|
||||
# XXX: do we need to ack the unsub?
|
||||
# await ws.recv_msg()
|
||||
|
||||
# see the tips on reonnection logic:
|
||||
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||
async with open_autorecon_ws(
|
||||
'wss://ws.kraken.com/',
|
||||
fixture=subscribe,
|
||||
) as ws:
|
||||
|
||||
# pull a first quote and deliver
|
||||
msg_gen = stream_messages(ws)
|
||||
|
||||
# TODO: use ``anext()`` when it lands in 3.10!
|
||||
typ, ohlc_last = await msg_gen.__anext__()
|
||||
|
||||
topic, quote = normalize(ohlc_last)
|
||||
|
||||
first_quote = {topic: quote}
|
||||
task_status.started((init_msgs, first_quote))
|
||||
|
||||
# lol, only "closes" when they're margin squeezing clients ;P
|
||||
feed_is_live.set()
|
||||
|
||||
# keep start of last interval for volume tracking
|
||||
last_interval_start = ohlc_last.etime
|
||||
|
||||
# start streaming
|
||||
async for typ, ohlc in msg_gen:
|
||||
|
||||
if typ == 'ohlc':
|
||||
|
||||
# TODO: can get rid of all this by using
|
||||
# ``trades`` subscription...
|
||||
|
||||
# generate tick values to match time & sales pane:
|
||||
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
|
||||
volume = ohlc.volume
|
||||
|
||||
# new OHLC sample interval
|
||||
if ohlc.etime > last_interval_start:
|
||||
last_interval_start = ohlc.etime
|
||||
tick_volume = volume
|
||||
|
||||
else:
|
||||
# this is the tick volume *within the interval*
|
||||
tick_volume = volume - ohlc_last.volume
|
||||
|
||||
ohlc_last = ohlc
|
||||
last = ohlc.close
|
||||
|
||||
if tick_volume:
|
||||
ohlc.ticks.append({
|
||||
'type': 'trade',
|
||||
'price': last,
|
||||
'size': tick_volume,
|
||||
})
|
||||
|
||||
topic, quote = normalize(ohlc)
|
||||
|
||||
elif typ == 'l1':
|
||||
quote = ohlc
|
||||
topic = quote['symbol'].lower()
|
||||
|
||||
# XXX: format required by ``tractor.msg.pub``
|
||||
# requires a ``Dict[topic: str, quote: dict]``
|
||||
await send_chan.send({topic: quote})
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(
|
||||
ctx: tractor.Context,
|
||||
) -> Client:
|
||||
async with open_cached_client('kraken') as client:
|
||||
|
||||
# load all symbols locally for fast search
|
||||
cache = await client.cache_symbols()
|
||||
await ctx.started(cache)
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
async for pattern in stream:
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
cache,
|
||||
score_cutoff=50,
|
||||
)
|
||||
# repack in dict form
|
||||
await stream.send(
|
||||
{item[0]['altname']: item[0]
|
||||
for item in matches}
|
||||
)
|
||||
|
|
@ -1,64 +0,0 @@
|
|||
``kraken`` backend
|
||||
------------------
|
||||
though they don't have the most liquidity of all the cexes they sure are
|
||||
accommodating to those of us who appreciate a little ``xmr``.
|
||||
|
||||
status
|
||||
******
|
||||
current support is *production grade* and both real-time data and order
|
||||
management should be correct and fast. this backend is used by core devs
|
||||
for live trading.
|
||||
|
||||
|
||||
config
|
||||
******
|
||||
In order to get order mode support your ``brokers.toml``
|
||||
needs to have something like the following:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[kraken]
|
||||
accounts.spot = 'spot'
|
||||
key_descr = "spot"
|
||||
api_key = "69696969696969696696969696969696969696969696969696969696"
|
||||
secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696"
|
||||
|
||||
|
||||
If everything works correctly you should see any current positions
|
||||
loaded in the pps pane on chart load and you should also be able to
|
||||
check your trade records in the file::
|
||||
|
||||
<pikerk_conf_dir>/ledgers/trades_kraken_spot.toml
|
||||
|
||||
|
||||
An example ledger file will have entries written verbatim from the
|
||||
trade events schema:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[TFJBKK-SMBZS-VJ4UWS]
|
||||
ordertxid = "SMBZSA-7CNQU-3HWLNJ"
|
||||
postxid = "SMBZSE-M7IF5-CFI7LT"
|
||||
pair = "XXMRZEUR"
|
||||
time = 1655691993.4133966
|
||||
type = "buy"
|
||||
ordertype = "limit"
|
||||
price = "103.97000000"
|
||||
cost = "499.99999977"
|
||||
fee = "0.80000000"
|
||||
vol = "4.80907954"
|
||||
margin = "0.00000000"
|
||||
misc = ""
|
||||
|
||||
|
||||
your ``pps.toml`` file will have position entries like,
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[kraken.spot."xmreur.kraken"]
|
||||
size = 4.80907954
|
||||
ppu = 103.97000000
|
||||
bs_mktid = "XXMRZEUR"
|
||||
clears = [
|
||||
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
|
||||
]
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Kraken backend.
|
||||
|
||||
Sub-modules within break into the core functionalities:
|
||||
|
||||
- .api: for the core API machinery which generally
|
||||
a ``asks``/``trio-websocket`` implemented ``Client``.
|
||||
- .broker: part for orders / trading endpoints.
|
||||
- .feed: for real-time and historical data query endpoints.
|
||||
- .ledger: for transaction processing as it pertains to accounting.
|
||||
- .symbols: for market (name) search and symbology meta-defs.
|
||||
|
||||
'''
|
||||
from .symbols import (
|
||||
Pair, # for symcache
|
||||
open_symbol_search,
|
||||
# required by `.accounting`, `.data`
|
||||
get_mkt_info,
|
||||
)
|
||||
# required by `.brokers`
|
||||
from .api import (
|
||||
get_client,
|
||||
)
|
||||
from .feed import (
|
||||
# required by `.data`
|
||||
stream_quotes,
|
||||
open_history_client,
|
||||
)
|
||||
from .broker import (
|
||||
# required by `.clearing`
|
||||
open_trade_dialog,
|
||||
)
|
||||
from .ledger import (
|
||||
# required by `.accounting`
|
||||
norm_trade,
|
||||
norm_trade_records,
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
'get_mkt_info',
|
||||
'Pair',
|
||||
'open_trade_dialog',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
'norm_trade_records',
|
||||
'norm_trade',
|
||||
]
|
||||
|
||||
|
||||
# tractor RPC enable arg
|
||||
__enable_modules__: list[str] = [
|
||||
'api',
|
||||
'broker',
|
||||
'feed',
|
||||
'symbols',
|
||||
]
|
||||
|
|
@ -1,703 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Core (web) API client
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
import itertools
|
||||
from typing import (
|
||||
Any,
|
||||
Union,
|
||||
)
|
||||
import time
|
||||
|
||||
import httpx
|
||||
import pendulum
|
||||
import numpy as np
|
||||
import urllib.parse
|
||||
import hashlib
|
||||
import hmac
|
||||
import base64
|
||||
import trio
|
||||
|
||||
from piker import config
|
||||
from piker.data import (
|
||||
def_iohlcv_fields,
|
||||
match_from_pairs,
|
||||
)
|
||||
from piker.accounting._mktinfo import (
|
||||
Asset,
|
||||
digits_to_dec,
|
||||
dec_digits,
|
||||
)
|
||||
from piker.brokers._util import (
|
||||
resproc,
|
||||
SymbolNotFound,
|
||||
BrokerError,
|
||||
DataThrottle,
|
||||
)
|
||||
from piker.accounting import Transaction
|
||||
from piker.log import get_logger
|
||||
from .symbols import Pair
|
||||
|
||||
log = get_logger('piker.brokers.kraken')
|
||||
|
||||
# <uri>/<version>/
|
||||
_url = 'https://api.kraken.com/0'
|
||||
|
||||
_headers: dict[str, str] = {
|
||||
'User-Agent': 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||
}
|
||||
|
||||
# TODO: this is the only backend providing this right?
|
||||
# in which case we should drop it from the defaults and
|
||||
# instead make a custom fields descr in this module!
|
||||
_show_wap_in_history = True
|
||||
_symbol_info_translation: dict[str, str] = {
|
||||
'tick_decimals': 'pair_decimals',
|
||||
}
|
||||
|
||||
|
||||
def get_config() -> dict[str, Any]:
|
||||
'''
|
||||
Load our section from `piker/brokers.toml`.
|
||||
|
||||
'''
|
||||
conf, path = config.load(
|
||||
conf_name='brokers',
|
||||
touch_if_dne=True,
|
||||
)
|
||||
if (section := conf.get('kraken')) is None:
|
||||
log.warning(
|
||||
f'No config section found for kraken in {path}'
|
||||
)
|
||||
return {}
|
||||
|
||||
return section
|
||||
|
||||
|
||||
def get_kraken_signature(
|
||||
urlpath: str,
|
||||
data: dict[str, Any],
|
||||
secret: str
|
||||
) -> str:
|
||||
postdata = urllib.parse.urlencode(data)
|
||||
encoded = (str(data['nonce']) + postdata).encode()
|
||||
message = urlpath.encode() + hashlib.sha256(encoded).digest()
|
||||
|
||||
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
|
||||
sigdigest = base64.b64encode(mac.digest())
|
||||
return sigdigest.decode()
|
||||
|
||||
|
||||
class InvalidKey(ValueError):
|
||||
'''
|
||||
EAPI:Invalid key
|
||||
This error is returned when the API key used for the call is
|
||||
either expired or disabled, please review the API key in your
|
||||
Settings -> API tab of account management or generate a new one
|
||||
and update your application.
|
||||
|
||||
'''
|
||||
|
||||
|
||||
class Client:
|
||||
|
||||
# assets and mkt pairs are key-ed by kraken's ReST response
|
||||
# symbol-bs_mktids (we call them "X-keys" like fricking
|
||||
# "XXMRZEUR"). these keys used directly since ledger endpoints
|
||||
# return transaction sets keyed with the same set!
|
||||
_Assets: dict[str, Asset] = {}
|
||||
_AssetPairs: dict[str, Pair] = {}
|
||||
|
||||
# offer lookup tables for all .altname and .wsname
|
||||
# to the equivalent .xname so that various symbol-schemas
|
||||
# can be mapped to `Pair`s in the tables above.
|
||||
_altnames: dict[str, str] = {}
|
||||
_wsnames: dict[str, str] = {}
|
||||
|
||||
# key-ed by `Pair.bs_fqme: str`, and thus used for search
|
||||
# allowing for lookup using piker's own FQME symbology sys.
|
||||
_pairs: dict[str, Pair] = {}
|
||||
_assets: dict[str, Asset] = {}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: dict[str, str],
|
||||
httpx_client: httpx.AsyncClient,
|
||||
|
||||
name: str = '',
|
||||
api_key: str = '',
|
||||
secret: str = ''
|
||||
) -> None:
|
||||
|
||||
self._sesh: httpx.AsyncClient = httpx_client
|
||||
|
||||
self._name = name
|
||||
self._api_key = api_key
|
||||
self._secret = secret
|
||||
|
||||
self.conf: dict[str, str] = config
|
||||
|
||||
@property
|
||||
def pairs(self) -> dict[str, Pair]:
|
||||
|
||||
if self._pairs is None:
|
||||
raise RuntimeError(
|
||||
"Client didn't run `.get_mkt_pairs()` on startup?!"
|
||||
)
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def _public(
|
||||
self,
|
||||
method: str,
|
||||
data: dict,
|
||||
) -> dict[str, Any]:
|
||||
resp: httpx.Response = await self._sesh.post(
|
||||
url=f'/public/{method}',
|
||||
json=data,
|
||||
)
|
||||
return resproc(resp, log)
|
||||
|
||||
async def _private(
|
||||
self,
|
||||
method: str,
|
||||
data: dict,
|
||||
uri_path: str
|
||||
) -> dict[str, Any]:
|
||||
headers = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
'API-Key': self._api_key,
|
||||
'API-Sign': get_kraken_signature(
|
||||
uri_path,
|
||||
data,
|
||||
self._secret,
|
||||
),
|
||||
}
|
||||
resp: httpx.Response = await self._sesh.post(
|
||||
url=f'/private/{method}',
|
||||
data=data,
|
||||
headers=headers,
|
||||
)
|
||||
return resproc(resp, log)
|
||||
|
||||
async def endpoint(
|
||||
self,
|
||||
method: str,
|
||||
data: dict[str, Any]
|
||||
|
||||
) -> dict[str, Any]:
|
||||
uri_path = f'/0/private/{method}'
|
||||
data['nonce'] = str(int(1000*time.time()))
|
||||
return await self._private(method, data, uri_path)
|
||||
|
||||
async def get_balances(
|
||||
self,
|
||||
) -> dict[str, float]:
|
||||
'''
|
||||
Return the set of asset balances for this account
|
||||
by symbol.
|
||||
|
||||
'''
|
||||
resp = await self.endpoint(
|
||||
'Balance',
|
||||
{},
|
||||
)
|
||||
by_bsmktid: dict[str, dict] = resp['result']
|
||||
|
||||
balances: dict = {}
|
||||
for xname, bal in by_bsmktid.items():
|
||||
asset: Asset = self._Assets[xname]
|
||||
|
||||
# TODO: which KEY should we use? it's used to index
|
||||
# the `Account.pps: dict` ..
|
||||
key: str = asset.name.lower()
|
||||
# TODO: should we just return a `Decimal` here
|
||||
# or is the rounded version ok?
|
||||
balances[key] = round(
|
||||
float(bal),
|
||||
ndigits=dec_digits(asset.tx_tick)
|
||||
)
|
||||
|
||||
return balances
|
||||
|
||||
async def get_assets(
|
||||
self,
|
||||
reload: bool = False,
|
||||
|
||||
) -> dict[str, Asset]:
|
||||
'''
|
||||
Load and cache all asset infos and pack into
|
||||
our native ``Asset`` struct.
|
||||
|
||||
https://docs.kraken.com/rest/#tag/Market-Data/operation/getAssetInfo
|
||||
|
||||
return msg:
|
||||
"asset1": {
|
||||
"aclass": "string",
|
||||
"altname": "string",
|
||||
"decimals": 0,
|
||||
"display_decimals": 0,
|
||||
"collateral_value": 0,
|
||||
"status": "string"
|
||||
}
|
||||
|
||||
'''
|
||||
if (
|
||||
not self._assets
|
||||
or reload
|
||||
):
|
||||
resp = await self._public('Assets', {})
|
||||
assets: dict[str, dict] = resp['result']
|
||||
|
||||
for bs_mktid, info in assets.items():
|
||||
|
||||
altname: str = info['altname']
|
||||
aclass: str = info['aclass']
|
||||
asset = Asset(
|
||||
name=altname,
|
||||
atype=f'crypto_{aclass}',
|
||||
tx_tick=digits_to_dec(info['decimals']),
|
||||
info=info,
|
||||
)
|
||||
# NOTE: yes we keep 2 sets since kraken insists on
|
||||
# keeping 3 frickin sets bc apparently they have
|
||||
# no sane data engineers whol all like different
|
||||
# keys for their fricking symbology sets..
|
||||
self._Assets[bs_mktid] = asset
|
||||
self._assets[altname.lower()] = asset
|
||||
self._assets[altname] = asset
|
||||
|
||||
# we return the "most native" set merged with our preferred
|
||||
# naming (which i guess is the "altname" one) since that's
|
||||
# what the symcache loader will be storing, and we need the
|
||||
# keys that are easiest to match against in any trade
|
||||
# records.
|
||||
return self._Assets | self._assets
|
||||
|
||||
async def get_trades(
|
||||
self,
|
||||
fetch_limit: int | None = None,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
'''
|
||||
Get the trades (aka cleared orders) history from the rest endpoint:
|
||||
https://docs.kraken.com/rest/#operation/getTradeHistory
|
||||
|
||||
'''
|
||||
ofs = 0
|
||||
trades_by_id: dict[str, Any] = {}
|
||||
|
||||
for i in itertools.count():
|
||||
if (
|
||||
fetch_limit
|
||||
and i >= fetch_limit
|
||||
):
|
||||
break
|
||||
|
||||
# increment 'ofs' pagination offset
|
||||
ofs = i*50
|
||||
|
||||
resp = await self.endpoint(
|
||||
'TradesHistory',
|
||||
{'ofs': ofs},
|
||||
)
|
||||
by_id = resp['result']['trades']
|
||||
trades_by_id.update(by_id)
|
||||
|
||||
# can get up to 50 results per query, see:
|
||||
# https://docs.kraken.com/rest/#tag/User-Data/operation/getTradeHistory
|
||||
if (
|
||||
len(by_id) < 50
|
||||
):
|
||||
err = resp.get('error')
|
||||
if err:
|
||||
raise BrokerError(err)
|
||||
|
||||
# we know we received the max amount of
|
||||
# trade results so there may be more history.
|
||||
# catch the end of the trades
|
||||
count = resp['result']['count']
|
||||
break
|
||||
|
||||
# santity check on update
|
||||
assert count == len(trades_by_id.values())
|
||||
return trades_by_id
|
||||
|
||||
async def get_xfers(
|
||||
self,
|
||||
asset: str,
|
||||
src_asset: str = '',
|
||||
|
||||
) -> dict[str, Transaction]:
|
||||
'''
|
||||
Get asset balance transfer transactions.
|
||||
|
||||
Currently only withdrawals are supported.
|
||||
|
||||
'''
|
||||
resp = await self.endpoint(
|
||||
'WithdrawStatus',
|
||||
{'asset': asset},
|
||||
)
|
||||
try:
|
||||
xfers: list[dict] = resp['result']
|
||||
except KeyError:
|
||||
log.exception(f'Kraken suxxx: {resp}')
|
||||
return []
|
||||
|
||||
# eg. resp schema:
|
||||
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
|
||||
# 'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid':
|
||||
# 'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44',
|
||||
# 'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z',
|
||||
# 'amount': '0.00300726', 'fee': '0.00001000', 'time':
|
||||
# 1658347714, 'status': 'Success'}]}
|
||||
|
||||
if xfers:
|
||||
import tractor
|
||||
await tractor.pp()
|
||||
|
||||
trans: dict[str, Transaction] = {}
|
||||
for entry in xfers:
|
||||
# look up the normalized name and asset info
|
||||
asset_key: str = entry['asset']
|
||||
asset: Asset = self._Assets[asset_key]
|
||||
asset_key: str = asset.name.lower()
|
||||
|
||||
# XXX: this is in the asset units (likely) so it isn't
|
||||
# quite the same as a commisions cost necessarily..)
|
||||
# TODO: also round this based on `Pair` cost precision info?
|
||||
cost = float(entry['fee'])
|
||||
# fqme: str = asset_key + '.kraken'
|
||||
|
||||
tx = Transaction(
|
||||
fqme=asset_key, # this must map to an entry in .assets!
|
||||
tid=entry['txid'],
|
||||
dt=pendulum.from_timestamp(entry['time']),
|
||||
bs_mktid=f'{asset_key}{src_asset}',
|
||||
size=-1*(
|
||||
float(entry['amount'])
|
||||
+
|
||||
cost
|
||||
),
|
||||
# since this will be treated as a "sell" it
|
||||
# shouldn't be needed to compute the be price.
|
||||
price='NaN',
|
||||
|
||||
# XXX: see note above
|
||||
cost=cost,
|
||||
|
||||
# not a trade but a withdrawal or deposit on the
|
||||
# asset (chain) system.
|
||||
etype='transfer',
|
||||
|
||||
)
|
||||
trans[tx.tid] = tx
|
||||
|
||||
return trans
|
||||
|
||||
async def submit_limit(
|
||||
self,
|
||||
symbol: str,
|
||||
price: float,
|
||||
action: str,
|
||||
size: float,
|
||||
reqid: str = None,
|
||||
validate: bool = False # set True test call without a real submission
|
||||
|
||||
) -> dict:
|
||||
'''
|
||||
Place an order and return integer request id provided by client.
|
||||
|
||||
'''
|
||||
# Build common data dict for common keys from both endpoints
|
||||
data = {
|
||||
"pair": symbol,
|
||||
"price": str(price),
|
||||
"validate": validate
|
||||
}
|
||||
if reqid is None:
|
||||
# Build order data for kraken api
|
||||
data |= {
|
||||
"ordertype": "limit",
|
||||
"type": action,
|
||||
"volume": str(size),
|
||||
}
|
||||
return await self.endpoint('AddOrder', data)
|
||||
|
||||
else:
|
||||
# Edit order data for kraken api
|
||||
data["txid"] = reqid
|
||||
return await self.endpoint('EditOrder', data)
|
||||
|
||||
async def submit_cancel(
|
||||
self,
|
||||
reqid: str,
|
||||
) -> dict:
|
||||
'''
|
||||
Send cancel request for order id ``reqid``.
|
||||
|
||||
'''
|
||||
# txid is a transaction id given by kraken
|
||||
return await self.endpoint('CancelOrder', {"txid": reqid})
|
||||
|
||||
async def asset_pairs(
|
||||
self,
|
||||
pair_patt: str | None = None,
|
||||
|
||||
) -> dict[str, Pair] | Pair:
|
||||
'''
|
||||
Query for a tradeable asset pair (info), or all if no input
|
||||
pattern is provided.
|
||||
|
||||
https://docs.kraken.com/rest/#tag/Market-Data/operation/getTradableAssetPairs
|
||||
|
||||
'''
|
||||
if not self._AssetPairs:
|
||||
# get all pairs by default, or filter
|
||||
# to whatever pattern is provided as input.
|
||||
req_pairs: dict[str, str] | None = None
|
||||
if pair_patt is not None:
|
||||
req_pairs = {'pair': pair_patt}
|
||||
|
||||
resp = await self._public(
|
||||
'AssetPairs',
|
||||
req_pairs,
|
||||
)
|
||||
err = resp['error']
|
||||
if err:
|
||||
raise SymbolNotFound(pair_patt)
|
||||
|
||||
# NOTE: we try to key pairs by our custom defined
|
||||
# `.bs_fqme` field since we want to offer search over
|
||||
# this pattern set, callers should fill out lookup
|
||||
# tables for kraken's bs_mktid keys to map to these
|
||||
# keys!
|
||||
# XXX: FURTHER kraken's data eng team decided to offer
|
||||
# 3 frickin market-pair-symbol key sets depending on
|
||||
# which frickin API is being used.
|
||||
# Example for the trading pair 'LTC<EUR'
|
||||
# - the "X-key" from rest eps 'XLTCZEUR'
|
||||
# - the "websocket key" from ws msgs is 'LTC/EUR'
|
||||
# - the "altname key" also delivered in pair info is 'LTCEUR'
|
||||
for xkey, data in resp['result'].items():
|
||||
|
||||
# NOTE: always cache in pairs tables for faster lookup
|
||||
pair = Pair(xname=xkey, **data)
|
||||
|
||||
# register the above `Pair` structs for all
|
||||
# key-sets/monikers: a set of 4 (frickin) tables
|
||||
# acting as a combined surjection of all possible
|
||||
# (and stupid) kraken names to their `Pair` obj.
|
||||
self._AssetPairs[xkey] = pair
|
||||
self._pairs[pair.bs_fqme] = pair
|
||||
self._altnames[pair.altname] = pair
|
||||
self._wsnames[pair.wsname] = pair
|
||||
|
||||
if pair_patt is not None:
|
||||
return next(iter(self._pairs.items()))[1]
|
||||
|
||||
return self._AssetPairs
|
||||
|
||||
async def get_mkt_pairs(
|
||||
self,
|
||||
reload: bool = False,
|
||||
) -> dict:
|
||||
'''
|
||||
Load all market pair info build and cache it for downstream
|
||||
use.
|
||||
|
||||
Multiple pair info lookup tables (like ``._altnames:
|
||||
dict[str, str]``) are created for looking up the
|
||||
piker-native `Pair`-struct from any input of the three
|
||||
(yes, it's that idiotic..) available symbol/pair-key-sets
|
||||
that kraken frickin offers depending on the API including
|
||||
the .altname, .wsname and the weird ass default set they
|
||||
return in ReST responses .xname..
|
||||
|
||||
'''
|
||||
if (
|
||||
not self._pairs
|
||||
or reload
|
||||
):
|
||||
await self.asset_pairs()
|
||||
|
||||
return self._AssetPairs
|
||||
|
||||
async def search_symbols(
|
||||
self,
|
||||
pattern: str,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
'''
|
||||
Search for a symbol by "alt name"..
|
||||
|
||||
It is expected that the ``Client._pairs`` table
|
||||
gets populated before conducting the underlying fuzzy-search
|
||||
over the pair-key set.
|
||||
|
||||
'''
|
||||
if not len(self._pairs):
|
||||
await self.get_mkt_pairs()
|
||||
assert self._pairs, '`Client.get_mkt_pairs()` was never called!?'
|
||||
|
||||
matches: dict[str, Pair] = match_from_pairs(
|
||||
pairs=self._pairs,
|
||||
query=pattern.upper(),
|
||||
score_cutoff=50,
|
||||
)
|
||||
|
||||
# repack in .altname-keyed output table
|
||||
return {
|
||||
pair.altname: pair
|
||||
for pair in matches.values()
|
||||
}
|
||||
|
||||
async def bars(
|
||||
self,
|
||||
symbol: str = 'XBTUSD',
|
||||
|
||||
# UTC 2017-07-02 12:53:20
|
||||
since: Union[int, datetime] | None = None,
|
||||
count: int = 720, # <- max allowed per query
|
||||
as_np: bool = True,
|
||||
|
||||
) -> dict:
|
||||
|
||||
if since is None:
|
||||
since = pendulum.now('UTC').start_of('minute').subtract(
|
||||
minutes=count).timestamp()
|
||||
|
||||
elif isinstance(since, int):
|
||||
since = pendulum.from_timestamp(since).timestamp()
|
||||
|
||||
else: # presumably a pendulum datetime
|
||||
since = since.timestamp()
|
||||
|
||||
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||
since = str(max(1499000000, int(since)))
|
||||
json = await self._public(
|
||||
'OHLC',
|
||||
data={
|
||||
'pair': symbol,
|
||||
'since': since,
|
||||
},
|
||||
)
|
||||
try:
|
||||
res = json['result']
|
||||
res.pop('last')
|
||||
bars = next(iter(res.values()))
|
||||
|
||||
new_bars = []
|
||||
|
||||
first = bars[0]
|
||||
last_nz_vwap = first[-3]
|
||||
if last_nz_vwap == 0:
|
||||
# use close if vwap is zero
|
||||
last_nz_vwap = first[-4]
|
||||
|
||||
# convert all fields to native types
|
||||
for i, bar in enumerate(bars):
|
||||
# normalize weird zero-ed vwap values..cmon kraken..
|
||||
# indicates vwap didn't change since last bar
|
||||
vwap = float(bar.pop(-3))
|
||||
if vwap != 0:
|
||||
last_nz_vwap = vwap
|
||||
if vwap == 0:
|
||||
vwap = last_nz_vwap
|
||||
|
||||
# re-insert vwap as the last of the fields
|
||||
bar.append(vwap)
|
||||
|
||||
new_bars.append(
|
||||
(i,) + tuple(
|
||||
ftype(bar[j]) for j, (name, ftype) in enumerate(
|
||||
def_iohlcv_fields[1:]
|
||||
)
|
||||
)
|
||||
)
|
||||
array = np.array(new_bars, dtype=def_iohlcv_fields) if as_np else bars
|
||||
return array
|
||||
except KeyError:
|
||||
errmsg = json['error'][0]
|
||||
|
||||
if 'not found' in errmsg:
|
||||
raise SymbolNotFound(errmsg + f': {symbol}')
|
||||
|
||||
elif 'Too many requests' in errmsg:
|
||||
raise DataThrottle(f'{symbol}')
|
||||
|
||||
else:
|
||||
raise BrokerError(errmsg)
|
||||
|
||||
@classmethod
|
||||
def to_bs_fqme(
|
||||
cls,
|
||||
pair_str: str
|
||||
) -> str:
|
||||
'''
|
||||
Normalize symbol names to to a 3x3 pair from the global
|
||||
definition map which we build out from the data retreived from
|
||||
the 'AssetPairs' endpoint, see methods above.
|
||||
|
||||
'''
|
||||
try:
|
||||
return cls._altnames[pair_str.upper()].bs_fqme
|
||||
except KeyError as ke:
|
||||
raise SymbolNotFound(f'kraken has no {ke.args[0]}')
|
||||
|
||||
|
||||
@acm
|
||||
async def get_client() -> Client:
|
||||
|
||||
conf: dict[str, Any] = get_config()
|
||||
async with httpx.AsyncClient(
|
||||
base_url=_url,
|
||||
headers=_headers,
|
||||
|
||||
# TODO: is there a way to numerate this?
|
||||
# https://www.python-httpx.org/advanced/clients/#why-use-a-client
|
||||
# connections=4
|
||||
) as trio_client:
|
||||
if conf:
|
||||
client = Client(
|
||||
conf,
|
||||
httpx_client=trio_client,
|
||||
|
||||
# TODO: don't break these up and just do internal
|
||||
# conf lookups instead..
|
||||
name=conf['key_descr'],
|
||||
api_key=conf['api_key'],
|
||||
secret=conf['secret']
|
||||
)
|
||||
else:
|
||||
client = Client(
|
||||
conf={},
|
||||
httpx_client=trio_client,
|
||||
)
|
||||
|
||||
# at startup, load all symbols, and asset info in
|
||||
# batch requests.
|
||||
async with trio.open_nursery() as nurse:
|
||||
nurse.start_soon(client.get_assets)
|
||||
await client.get_mkt_pairs()
|
||||
|
||||
yield client
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,415 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Real-time and historical data feed endpoints.
|
||||
|
||||
'''
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
aclosing,
|
||||
)
|
||||
from datetime import datetime
|
||||
from typing import (
|
||||
AsyncGenerator,
|
||||
Callable,
|
||||
Optional,
|
||||
)
|
||||
import time
|
||||
|
||||
import numpy as np
|
||||
import pendulum
|
||||
from trio_typing import TaskStatus
|
||||
import trio
|
||||
|
||||
from piker.accounting._mktinfo import (
|
||||
MktPair,
|
||||
)
|
||||
from piker.brokers import (
|
||||
open_cached_client,
|
||||
)
|
||||
from piker.brokers._util import (
|
||||
BrokerError,
|
||||
DataThrottle,
|
||||
DataUnavailable,
|
||||
)
|
||||
from piker.types import Struct
|
||||
from piker.data.validate import FeedInit
|
||||
from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
||||
from .api import (
|
||||
log,
|
||||
)
|
||||
from .symbols import get_mkt_info
|
||||
|
||||
|
||||
class OHLC(Struct, frozen=True):
|
||||
'''
|
||||
Description of the flattened OHLC quote format.
|
||||
|
||||
For schema details see:
|
||||
https://docs.kraken.com/websockets/#message-ohlc
|
||||
|
||||
'''
|
||||
chan_id: int # internal kraken id
|
||||
chan_name: str # eg. ohlc-1 (name-interval)
|
||||
pair: str # fx pair
|
||||
|
||||
# unpacked from array
|
||||
time: float # Begin time of interval, in seconds since epoch
|
||||
etime: float # End time of interval, in seconds since epoch
|
||||
open: float # Open price of interval
|
||||
high: float # High price within interval
|
||||
low: float # Low price within interval
|
||||
close: float # Close price of interval
|
||||
vwap: float # Volume weighted average price within interval
|
||||
volume: float # Accumulated volume **within interval**
|
||||
count: int # Number of trades within interval
|
||||
|
||||
|
||||
async def stream_messages(
|
||||
ws: NoBsWs,
|
||||
):
|
||||
'''
|
||||
Message stream parser and heartbeat handler.
|
||||
|
||||
Deliver ws subscription messages as well as handle heartbeat logic
|
||||
though a single async generator.
|
||||
|
||||
'''
|
||||
last_hb: float = 0
|
||||
|
||||
async for msg in ws:
|
||||
match msg:
|
||||
case {'event': 'heartbeat'}:
|
||||
now = time.time()
|
||||
delay = now - last_hb
|
||||
last_hb = now
|
||||
|
||||
# XXX: why tf is this not printing without --tl flag?
|
||||
log.debug(f"Heartbeat after {delay}")
|
||||
# print(f"Heartbeat after {delay}")
|
||||
|
||||
continue
|
||||
|
||||
case _:
|
||||
# passthrough sub msgs
|
||||
yield msg
|
||||
|
||||
|
||||
async def process_data_feed_msgs(
|
||||
ws: NoBsWs,
|
||||
):
|
||||
'''
|
||||
Parse and pack data feed messages.
|
||||
|
||||
'''
|
||||
async with aclosing(stream_messages(ws)) as ws_stream:
|
||||
async for msg in ws_stream:
|
||||
match msg:
|
||||
case {
|
||||
'errorMessage': errmsg
|
||||
}:
|
||||
raise BrokerError(errmsg)
|
||||
|
||||
case {
|
||||
'event': 'subscriptionStatus',
|
||||
} as sub:
|
||||
log.info(
|
||||
'WS subscription is active:\n'
|
||||
f'{sub}'
|
||||
)
|
||||
continue
|
||||
|
||||
case [
|
||||
chan_id,
|
||||
*payload_array,
|
||||
chan_name,
|
||||
pair
|
||||
]:
|
||||
if 'ohlc' in chan_name:
|
||||
array: list = payload_array[0]
|
||||
ohlc = OHLC(
|
||||
chan_id,
|
||||
chan_name,
|
||||
pair,
|
||||
*map(float, array[:-1]),
|
||||
count=array[-1],
|
||||
)
|
||||
yield 'ohlc', ohlc.copy()
|
||||
|
||||
elif 'spread' in chan_name:
|
||||
|
||||
bid, ask, ts, bsize, asize = map(
|
||||
float, payload_array[0])
|
||||
|
||||
# TODO: really makes you think IB has a horrible API...
|
||||
quote = {
|
||||
'symbol': pair.replace('/', ''),
|
||||
'ticks': [
|
||||
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||
|
||||
{'type': 'ask', 'price': ask, 'size': asize},
|
||||
{'type': 'asize', 'price': ask, 'size': asize},
|
||||
],
|
||||
}
|
||||
yield 'l1', quote
|
||||
|
||||
# elif 'book' in msg[-2]:
|
||||
# chan_id, *payload_array, chan_name, pair = msg
|
||||
# print(msg)
|
||||
|
||||
case {
|
||||
'connectionID': conid,
|
||||
'event': 'systemStatus',
|
||||
'status': 'online',
|
||||
'version': ver,
|
||||
}:
|
||||
log.info(
|
||||
f'Established {ver} ws connection with id: {conid}'
|
||||
)
|
||||
continue
|
||||
|
||||
case _:
|
||||
print(f'UNHANDLED MSG: {msg}')
|
||||
# yield msg
|
||||
|
||||
|
||||
def normalize(ohlc: OHLC) -> dict:
|
||||
'''
|
||||
Norm an `OHLC` msg to piker's minimal (live-)quote schema.
|
||||
|
||||
'''
|
||||
quote = ohlc.to_dict()
|
||||
quote['broker_ts'] = quote['time']
|
||||
quote['brokerd_ts'] = time.time()
|
||||
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
||||
quote['last'] = quote['close']
|
||||
quote['bar_wap'] = ohlc.vwap
|
||||
return quote
|
||||
|
||||
|
||||
@acm
|
||||
async def open_history_client(
|
||||
mkt: MktPair,
|
||||
|
||||
) -> AsyncGenerator[Callable, None]:
|
||||
|
||||
symbol: str = mkt.bs_mktid
|
||||
|
||||
# TODO implement history getter for the new storage layer.
|
||||
async with open_cached_client('kraken') as client:
|
||||
|
||||
# lol, kraken won't send any more then the "last"
|
||||
# 720 1m bars.. so we have to just ignore further
|
||||
# requests of this type..
|
||||
queries: int = 0
|
||||
|
||||
async def get_ohlc(
|
||||
timeframe: float,
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
|
||||
nonlocal queries
|
||||
if (
|
||||
queries > 0
|
||||
or timeframe != 60
|
||||
):
|
||||
raise DataUnavailable(
|
||||
'Only a single query for 1m bars supported')
|
||||
|
||||
count = 0
|
||||
while count <= 3:
|
||||
try:
|
||||
array = await client.bars(
|
||||
symbol,
|
||||
since=end_dt,
|
||||
)
|
||||
count += 1
|
||||
queries += 1
|
||||
break
|
||||
except DataThrottle:
|
||||
log.warning(f'kraken OHLC throttle for {symbol}')
|
||||
await trio.sleep(1)
|
||||
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: list[str],
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# backend specific
|
||||
sub_type: str = 'ohlc',
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Subscribe for ohlc stream of quotes for ``pairs``.
|
||||
|
||||
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||
|
||||
'''
|
||||
|
||||
ws_pairs: list[str] = []
|
||||
init_msgs: list[FeedInit] = []
|
||||
|
||||
async with (
|
||||
send_chan as send_chan,
|
||||
):
|
||||
for sym_str in symbols:
|
||||
mkt, pair = await get_mkt_info(sym_str)
|
||||
init_msgs.append(
|
||||
FeedInit(mkt_info=mkt)
|
||||
)
|
||||
|
||||
ws_pairs.append(pair.wsname)
|
||||
|
||||
@acm
|
||||
async def subscribe(ws: NoBsWs):
|
||||
|
||||
# XXX: setup subs
|
||||
# https://docs.kraken.com/websockets/#message-subscribe
|
||||
# specific logic for this in kraken's sync client:
|
||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||
ohlc_sub = {
|
||||
'event': 'subscribe',
|
||||
'pair': ws_pairs,
|
||||
'subscription': {
|
||||
'name': 'ohlc',
|
||||
'interval': 1,
|
||||
},
|
||||
}
|
||||
|
||||
# TODO: we want to eventually allow unsubs which should
|
||||
# be completely fine to request from a separate task
|
||||
# since internally the ws methods appear to be FIFO
|
||||
# locked.
|
||||
await ws.send_msg(ohlc_sub)
|
||||
|
||||
# trade data (aka L1)
|
||||
l1_sub = {
|
||||
'event': 'subscribe',
|
||||
'pair': ws_pairs,
|
||||
'subscription': {
|
||||
'name': 'spread',
|
||||
# 'depth': 10}
|
||||
},
|
||||
}
|
||||
|
||||
# pull a first quote and deliver
|
||||
await ws.send_msg(l1_sub)
|
||||
|
||||
yield
|
||||
|
||||
# unsub from all pairs on teardown
|
||||
if ws.connected():
|
||||
await ws.send_msg({
|
||||
'pair': ws_pairs,
|
||||
'event': 'unsubscribe',
|
||||
'subscription': ['ohlc', 'spread'],
|
||||
})
|
||||
|
||||
# XXX: do we need to ack the unsub?
|
||||
# await ws.recv_msg()
|
||||
|
||||
# see the tips on reconnection logic:
|
||||
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||
ws: NoBsWs
|
||||
async with (
|
||||
open_autorecon_ws(
|
||||
'wss://ws.kraken.com/',
|
||||
fixture=subscribe,
|
||||
reset_after=20,
|
||||
) as ws,
|
||||
|
||||
# avoid stream-gen closure from breaking trio..
|
||||
# NOTE: not sure this actually works XD particularly
|
||||
# if we call `ws._connect()` manally in the streaming
|
||||
# async gen..
|
||||
aclosing(process_data_feed_msgs(ws)) as msg_gen,
|
||||
):
|
||||
# pull a first quote and deliver
|
||||
typ, ohlc_last = await anext(msg_gen)
|
||||
quote = normalize(ohlc_last)
|
||||
|
||||
task_status.started((init_msgs, quote))
|
||||
feed_is_live.set()
|
||||
|
||||
# keep start of last interval for volume tracking
|
||||
last_interval_start: float = ohlc_last.etime
|
||||
|
||||
# start streaming
|
||||
topic: str = mkt.bs_fqme
|
||||
async for typ, quote in msg_gen:
|
||||
match typ:
|
||||
|
||||
# TODO: can get rid of all this by using
|
||||
# ``trades`` subscription..? Not sure why this
|
||||
# wasn't used originally? (music queues) zoltannn..
|
||||
# https://docs.kraken.com/websockets/#message-trade
|
||||
case 'ohlc':
|
||||
# generate tick values to match time & sales pane:
|
||||
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
|
||||
volume = quote.volume
|
||||
|
||||
# new OHLC sample interval
|
||||
if quote.etime > last_interval_start:
|
||||
last_interval_start: float = quote.etime
|
||||
tick_volume: float = volume
|
||||
|
||||
else:
|
||||
# this is the tick volume *within the interval*
|
||||
tick_volume: float = volume - ohlc_last.volume
|
||||
|
||||
ohlc_last = quote
|
||||
last = quote.close
|
||||
|
||||
quote = normalize(quote)
|
||||
ticks = quote.setdefault(
|
||||
'ticks',
|
||||
[],
|
||||
)
|
||||
if tick_volume:
|
||||
ticks.append({
|
||||
'type': 'trade',
|
||||
'price': last,
|
||||
'size': tick_volume,
|
||||
})
|
||||
|
||||
case 'l1':
|
||||
# passthrough quote msg
|
||||
pass
|
||||
|
||||
case _:
|
||||
log.warning(f'Unknown WSS message: {typ}, {quote}')
|
||||
|
||||
await send_chan.send({topic: quote})
|
||||
|
|
@ -1,269 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Trade transaction accounting and normalization.
|
||||
|
||||
'''
|
||||
import math
|
||||
from pprint import pformat
|
||||
from typing import (
|
||||
Any,
|
||||
)
|
||||
|
||||
import pendulum
|
||||
|
||||
from piker.accounting import (
|
||||
Transaction,
|
||||
Position,
|
||||
Account,
|
||||
get_likely_pair,
|
||||
TransactionLedger,
|
||||
# MktPair,
|
||||
)
|
||||
from piker.types import Struct
|
||||
from piker.data import (
|
||||
SymbologyCache,
|
||||
)
|
||||
from .api import (
|
||||
log,
|
||||
Client,
|
||||
Pair,
|
||||
)
|
||||
# from .feed import get_mkt_info
|
||||
|
||||
|
||||
def norm_trade(
|
||||
tid: str,
|
||||
record: dict[str, Any],
|
||||
|
||||
# this is the dict that was returned from
|
||||
# `Client.get_mkt_pairs()` and when running offline ledger
|
||||
# processing from `.accounting`, this will be the table loaded
|
||||
# into `SymbologyCache.pairs`.
|
||||
pairs: dict[str, Struct],
|
||||
symcache: SymbologyCache | None = None,
|
||||
|
||||
) -> Transaction:
|
||||
|
||||
size: float = float(record.get('vol')) * {
|
||||
'buy': 1,
|
||||
'sell': -1,
|
||||
}[record['type']]
|
||||
|
||||
# NOTE: this value may be either the websocket OR the rest schema
|
||||
# so we need to detect the key format and then choose the
|
||||
# correct symbol lookup table to evetually get a ``Pair``..
|
||||
# See internals of `Client.asset_pairs()` for deats!
|
||||
src_pair_key: str = record['pair']
|
||||
|
||||
# XXX: kraken's data engineering is soo bad they require THREE
|
||||
# different pair schemas (more or less seemingly tied to
|
||||
# transport-APIs)..LITERALLY they return different market id
|
||||
# pairs in the ledger endpoints vs. the websocket event subs..
|
||||
# lookup pair using appropriately provided tabled depending
|
||||
# on API-key-schema..
|
||||
pair: Pair = pairs[src_pair_key]
|
||||
fqme: str = pair.bs_fqme.lower() + '.kraken'
|
||||
|
||||
return Transaction(
|
||||
fqme=fqme,
|
||||
tid=tid,
|
||||
size=size,
|
||||
price=float(record['price']),
|
||||
cost=float(record['fee']),
|
||||
dt=pendulum.from_timestamp(float(record['time'])),
|
||||
bs_mktid=pair.bs_mktid,
|
||||
)
|
||||
|
||||
|
||||
async def norm_trade_records(
|
||||
ledger: dict[str, Any],
|
||||
client: Client,
|
||||
api_name_set: str = 'xname',
|
||||
|
||||
) -> dict[str, Transaction]:
|
||||
'''
|
||||
Loop through an input ``dict`` of trade records
|
||||
and convert them to ``Transactions``.
|
||||
|
||||
'''
|
||||
records: dict[str, Transaction] = {}
|
||||
for tid, record in ledger.items():
|
||||
|
||||
# manual_fqme: str = f'{bs_mktid.lower()}.kraken'
|
||||
# mkt: MktPair = (await get_mkt_info(manual_fqme))[0]
|
||||
# fqme: str = mkt.fqme
|
||||
# assert fqme == manual_fqme
|
||||
pairs: dict[str, Pair] = {
|
||||
'xname': client._AssetPairs,
|
||||
'wsname': client._wsnames,
|
||||
'altname': client._altnames,
|
||||
}[api_name_set]
|
||||
|
||||
records[tid] = norm_trade(
|
||||
tid,
|
||||
record,
|
||||
pairs=pairs,
|
||||
)
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def has_pp(
|
||||
acnt: Account,
|
||||
src_fiat: str,
|
||||
dst: str,
|
||||
size: float,
|
||||
|
||||
) -> Position | None:
|
||||
|
||||
src2dst: dict[str, str] = {}
|
||||
for bs_mktid in acnt.pps:
|
||||
likely_pair = get_likely_pair(
|
||||
src_fiat,
|
||||
dst,
|
||||
bs_mktid,
|
||||
)
|
||||
if likely_pair:
|
||||
src2dst[src_fiat] = dst
|
||||
|
||||
for src, dst in src2dst.items():
|
||||
pair: str = f'{dst}{src_fiat}'
|
||||
pos: Position = acnt.pps.get(pair)
|
||||
if (
|
||||
pos
|
||||
and math.isclose(pos.size, size)
|
||||
):
|
||||
return pos
|
||||
|
||||
elif (
|
||||
size == 0
|
||||
and pos.size
|
||||
):
|
||||
log.warning(
|
||||
f'`kraken` account says you have a ZERO '
|
||||
f'balance for {bs_mktid}:{pair}\n'
|
||||
f'but piker seems to think `{pos.size}`\n'
|
||||
'This is likely a discrepancy in piker '
|
||||
'accounting if the above number is'
|
||||
"large,' though it's likely to due lack"
|
||||
"f tracking xfers fees.."
|
||||
)
|
||||
return pos
|
||||
|
||||
return None # indicate no entry found
|
||||
|
||||
|
||||
# TODO: factor most of this "account updating from txns" into the
|
||||
# the `Account` impl so has to provide for hiding the mostly
|
||||
# cross-provider updates from txn sets
|
||||
async def verify_balances(
|
||||
acnt: Account,
|
||||
src_fiat: str,
|
||||
balances: dict[str, float],
|
||||
client: Client,
|
||||
ledger: TransactionLedger,
|
||||
ledger_trans: dict[str, Transaction], # from toml
|
||||
api_trans: dict[str, Transaction], # from API
|
||||
|
||||
simulate_pp_update: bool = False,
|
||||
|
||||
) -> None:
|
||||
for dst, size in balances.items():
|
||||
|
||||
# we don't care about tracking positions
|
||||
# in the user's source fiat currency.
|
||||
if (
|
||||
dst == src_fiat
|
||||
or not any(
|
||||
dst in bs_mktid for bs_mktid in acnt.pps
|
||||
)
|
||||
):
|
||||
log.warning(
|
||||
f'Skipping balance `{dst}`:{size} for position calcs!'
|
||||
)
|
||||
continue
|
||||
|
||||
# we have a balance for which there is no pos entry
|
||||
# - we have to likely update from the ledger?
|
||||
if not has_pp(acnt, src_fiat, dst, size):
|
||||
updated = acnt.update_from_ledger(
|
||||
ledger_trans,
|
||||
symcache=ledger.symcache,
|
||||
)
|
||||
log.info(f'Updated pps from ledger:\n{pformat(updated)}')
|
||||
|
||||
# FIRST try reloading from API records
|
||||
if (
|
||||
not has_pp(acnt, src_fiat, dst, size)
|
||||
and not simulate_pp_update
|
||||
):
|
||||
acnt.update_from_ledger(
|
||||
api_trans,
|
||||
symcache=ledger.symcache,
|
||||
)
|
||||
|
||||
# get transfers to make sense of abs
|
||||
# balances.
|
||||
# NOTE: we do this after ledger and API
|
||||
# loading since we might not have an
|
||||
# entry in the
|
||||
# ``account.kraken.spot.toml`` for the
|
||||
# necessary pair yet and thus this
|
||||
# likely pair grabber will likely fail.
|
||||
if not has_pp(acnt, src_fiat, dst, size):
|
||||
for bs_mktid in acnt.pps:
|
||||
likely_pair: str | None = get_likely_pair(
|
||||
src_fiat,
|
||||
dst,
|
||||
bs_mktid,
|
||||
)
|
||||
if likely_pair:
|
||||
break
|
||||
else:
|
||||
raise ValueError(
|
||||
'Could not find a position pair in '
|
||||
'ledger for likely widthdrawal '
|
||||
f'candidate: {dst}'
|
||||
)
|
||||
|
||||
# this was likely pos that had a withdrawal
|
||||
# from the dst asset out of the account.
|
||||
if likely_pair:
|
||||
xfer_trans = await client.get_xfers(
|
||||
dst,
|
||||
|
||||
# TODO: not all src assets are
|
||||
# 3 chars long...
|
||||
src_asset=likely_pair[3:],
|
||||
)
|
||||
if xfer_trans:
|
||||
updated = acnt.update_from_ledger(
|
||||
xfer_trans,
|
||||
cost_scalar=1,
|
||||
symcache=ledger.symcache,
|
||||
)
|
||||
log.info(
|
||||
f'Updated {dst} from transfers:\n'
|
||||
f'{pformat(updated)}'
|
||||
)
|
||||
|
||||
if has_pp(acnt, src_fiat, dst, size):
|
||||
raise ValueError(
|
||||
'Could not reproduce balance:\n'
|
||||
f'dst: {dst}, {size}\n'
|
||||
)
|
||||
|
|
@ -1,206 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Symbology defs and search.
|
||||
|
||||
'''
|
||||
from decimal import Decimal
|
||||
|
||||
import tractor
|
||||
from rapidfuzz import process as fuzzy
|
||||
|
||||
from piker._cacheables import (
|
||||
async_lifo_cache,
|
||||
)
|
||||
from piker.accounting._mktinfo import (
|
||||
digits_to_dec,
|
||||
)
|
||||
from piker.brokers import (
|
||||
open_cached_client,
|
||||
SymbolNotFound,
|
||||
)
|
||||
from piker.types import Struct
|
||||
from piker.accounting._mktinfo import (
|
||||
Asset,
|
||||
MktPair,
|
||||
unpack_fqme,
|
||||
)
|
||||
|
||||
|
||||
# https://www.kraken.com/features/api#get-tradable-pairs
|
||||
class Pair(Struct):
|
||||
xname: str # idiotic bs_mktid equiv i guess?
|
||||
altname: str # alternate pair name
|
||||
wsname: str # WebSocket pair name (if available)
|
||||
aclass_base: str # asset class of base component
|
||||
base: str # asset id of base component
|
||||
aclass_quote: str # asset class of quote component
|
||||
quote: str # asset id of quote component
|
||||
lot: str # volume lot size
|
||||
|
||||
cost_decimals: int
|
||||
costmin: float
|
||||
pair_decimals: int # scaling decimal places for pair
|
||||
lot_decimals: int # scaling decimal places for volume
|
||||
|
||||
# amount to multiply lot volume by to get currency volume
|
||||
lot_multiplier: float
|
||||
|
||||
# array of leverage amounts available when buying
|
||||
leverage_buy: list[int]
|
||||
# array of leverage amounts available when selling
|
||||
leverage_sell: list[int]
|
||||
|
||||
# fee schedule array in [volume, percent fee] tuples
|
||||
fees: list[tuple[int, float]]
|
||||
|
||||
# maker fee schedule array in [volume, percent fee] tuples (if on
|
||||
# maker/taker)
|
||||
fees_maker: list[tuple[int, float]]
|
||||
|
||||
fee_volume_currency: str # volume discount currency
|
||||
margin_call: str # margin call level
|
||||
margin_stop: str # stop-out/liquidation margin level
|
||||
ordermin: float # minimum order volume for pair
|
||||
tick_size: float # min price step size
|
||||
status: str
|
||||
|
||||
short_position_limit: float = 0
|
||||
long_position_limit: float = float('inf')
|
||||
|
||||
# TODO: should we make this a literal NamespacePath ref?
|
||||
ns_path: str = 'piker.brokers.kraken:Pair'
|
||||
|
||||
@property
|
||||
def bs_mktid(self) -> str:
|
||||
'''
|
||||
Kraken seems to index it's market symbol sets in
|
||||
transaction ledgers using the key returned from rest
|
||||
queries.. so use that since apparently they can't
|
||||
make up their minds on a better key set XD
|
||||
|
||||
'''
|
||||
return self.xname
|
||||
|
||||
@property
|
||||
def price_tick(self) -> Decimal:
|
||||
return digits_to_dec(self.pair_decimals)
|
||||
|
||||
@property
|
||||
def size_tick(self) -> Decimal:
|
||||
return digits_to_dec(self.lot_decimals)
|
||||
|
||||
@property
|
||||
def bs_dst_asset(self) -> str:
|
||||
dst, _ = self.wsname.split('/')
|
||||
return dst
|
||||
|
||||
@property
|
||||
def bs_src_asset(self) -> str:
|
||||
_, src = self.wsname.split('/')
|
||||
return src
|
||||
|
||||
@property
|
||||
def bs_fqme(self) -> str:
|
||||
'''
|
||||
Basically the `.altname` but with special '.' handling and
|
||||
`.SPOT` suffix appending (for future multi-venue support).
|
||||
|
||||
'''
|
||||
dst, src = self.wsname.split('/')
|
||||
# XXX: omg for stupid shite like ETH2.S/ETH..
|
||||
dst = dst.replace('.', '-')
|
||||
return f'{dst}{src}.SPOT'
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(ctx: tractor.Context) -> None:
|
||||
async with open_cached_client('kraken') as client:
|
||||
|
||||
# load all symbols locally for fast search
|
||||
cache = await client.get_mkt_pairs()
|
||||
await ctx.started(cache)
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
async for pattern in stream:
|
||||
await stream.send(
|
||||
await client.search_symbols(pattern)
|
||||
)
|
||||
|
||||
|
||||
@async_lifo_cache()
|
||||
async def get_mkt_info(
|
||||
fqme: str,
|
||||
|
||||
) -> tuple[MktPair, Pair]:
|
||||
'''
|
||||
Query for and return a `MktPair` and backend-native `Pair` (or
|
||||
wtv else) info.
|
||||
|
||||
If more then one fqme is provided return a ``dict`` of native
|
||||
key-strs to `MktPair`s.
|
||||
|
||||
'''
|
||||
venue: str = 'spot'
|
||||
expiry: str = ''
|
||||
if '.kraken' not in fqme:
|
||||
fqme += '.kraken'
|
||||
|
||||
broker, pair, venue, expiry = unpack_fqme(fqme)
|
||||
venue: str = venue or 'spot'
|
||||
|
||||
if venue.lower() != 'spot':
|
||||
raise SymbolNotFound(
|
||||
'kraken only supports spot markets right now!\n'
|
||||
f'{fqme}\n'
|
||||
)
|
||||
|
||||
async with open_cached_client('kraken') as client:
|
||||
|
||||
# uppercase since kraken bs_mktid is always upper
|
||||
# bs_fqme, _, broker = fqme.partition('.')
|
||||
# pair_str: str = bs_fqme.upper()
|
||||
pair_str: str = f'{pair}.{venue}'
|
||||
|
||||
pair: Pair | None = client._pairs.get(pair_str.upper())
|
||||
if not pair:
|
||||
bs_fqme: str = client.to_bs_fqme(pair_str)
|
||||
pair: Pair = client._pairs[bs_fqme]
|
||||
|
||||
if not (assets := client._assets):
|
||||
assets: dict[str, Asset] = await client.get_assets()
|
||||
|
||||
dst_asset: Asset = assets[pair.bs_dst_asset]
|
||||
src_asset: Asset = assets[pair.bs_src_asset]
|
||||
|
||||
mkt = MktPair(
|
||||
dst=dst_asset,
|
||||
src=src_asset,
|
||||
|
||||
price_tick=pair.price_tick,
|
||||
size_tick=pair.size_tick,
|
||||
bs_mktid=pair.bs_mktid,
|
||||
|
||||
expiry=expiry,
|
||||
venue=venue or 'spot',
|
||||
|
||||
# TODO: futes
|
||||
# _atype=_atype,
|
||||
|
||||
broker='kraken',
|
||||
)
|
||||
return mkt, pair
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
@ -19,6 +19,7 @@ Questrade API backend.
|
|||
"""
|
||||
from __future__ import annotations
|
||||
import inspect
|
||||
import contextlib
|
||||
import time
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
|
|
@ -31,26 +32,25 @@ from typing import (
|
|||
Callable,
|
||||
)
|
||||
|
||||
import pendulum
|
||||
import arrow
|
||||
import trio
|
||||
import tractor
|
||||
from async_generator import asynccontextmanager
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import wrapt
|
||||
import asks
|
||||
|
||||
from ..calc import humanize, percent_change
|
||||
from . import open_cached_client
|
||||
from piker._cacheables import async_lifo_cache
|
||||
from .. import config
|
||||
from . import config
|
||||
from ._util import resproc, BrokerError, SymbolNotFound
|
||||
from ..log import (
|
||||
colorize_json,
|
||||
)
|
||||
from ._util import (
|
||||
log,
|
||||
get_console_log,
|
||||
)
|
||||
from ..log import get_logger, colorize_json, get_console_log
|
||||
from .._async_utils import async_lifo_cache
|
||||
from . import get_brokermod
|
||||
from . import api
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_use_practice_account = False
|
||||
_refresh_token_ep = 'https://{}login.questrade.com/oauth2/'
|
||||
|
|
@ -602,16 +602,12 @@ class Client:
|
|||
sid = sids[symbol]
|
||||
|
||||
# get last market open end time
|
||||
est_end = now = pendulum.now('UTC').in_timezoe(
|
||||
'America/New_York').start_of('minute')
|
||||
|
||||
est_end = now = arrow.utcnow().to('US/Eastern').floor('minute')
|
||||
# on non-paid feeds we can't retreive the first 15 mins
|
||||
wd = now.isoweekday()
|
||||
if wd > 5:
|
||||
quotes = await self.quote([symbol])
|
||||
est_end = pendulum.parse(
|
||||
quotes[0]['lastTradeTime']
|
||||
)
|
||||
est_end = arrow.get(quotes[0]['lastTradeTime'])
|
||||
if est_end.hour == 0:
|
||||
# XXX don't bother figuring out extended hours for now
|
||||
est_end = est_end.replace(hour=17)
|
||||
|
|
@ -672,7 +668,7 @@ def get_OHLCV(
|
|||
"""
|
||||
del bar['end']
|
||||
del bar['VWAP']
|
||||
bar['start'] = pendulum.from_timestamp(bar['start']) / 10**9
|
||||
bar['start'] = pd.Timestamp(bar['start']).value/10**9
|
||||
return tuple(bar.values())
|
||||
|
||||
|
||||
|
|
@ -1201,7 +1197,7 @@ async def stream_quotes(
|
|||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel)
|
||||
|
||||
async with open_cached_client('questrade') as client:
|
||||
async with api.open_cached_client('questrade') as client:
|
||||
if feed_type == 'stock':
|
||||
formatter = format_stock_quote
|
||||
get_quotes = await stock_quoter(client, symbols)
|
||||
|
|
|
|||
|
|
@ -27,13 +27,12 @@ from typing import List
|
|||
from async_generator import asynccontextmanager
|
||||
import asks
|
||||
|
||||
from ._util import (
|
||||
resproc,
|
||||
BrokerError,
|
||||
log,
|
||||
)
|
||||
from ..log import get_logger
|
||||
from ._util import resproc, BrokerError
|
||||
from ..calc import percent_change
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_service_ep = 'https://api.robinhood.com'
|
||||
|
||||
|
||||
|
|
@ -66,10 +65,8 @@ class Client:
|
|||
self.api = _API(self._sess)
|
||||
|
||||
def _zip_in_order(self, symbols: [str], quotes: List[dict]):
|
||||
return {
|
||||
quote.get('symbol', sym) if quote else sym: quote
|
||||
for sym, quote in zip(symbols, quotes)
|
||||
}
|
||||
return {quote.get('symbol', sym) if quote else sym: quote
|
||||
for sym, quote in zip(symbols, results_dict)}
|
||||
|
||||
async def quote(self, symbols: [str]):
|
||||
"""Retrieve quotes for a list of ``symbols``.
|
||||
|
|
|
|||
|
|
@ -20,84 +20,30 @@ Handy financial calculations.
|
|||
import math
|
||||
import itertools
|
||||
|
||||
from bidict import bidict
|
||||
|
||||
|
||||
_mag2suffix = bidict({3: 'k', 6: 'M', 9: 'B'})
|
||||
|
||||
|
||||
def humanize(
|
||||
number: float,
|
||||
digits: int = 1
|
||||
|
||||
) -> str:
|
||||
'''
|
||||
Convert large numbers to something with at most ``digits`` and
|
||||
def humanize(number, digits=1):
|
||||
"""Convert large numbers to something with at most 3 digits and
|
||||
a letter suffix (eg. k: thousand, M: million, B: billion).
|
||||
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
float(number)
|
||||
except ValueError:
|
||||
return '0'
|
||||
|
||||
return 0
|
||||
if not number or number <= 0:
|
||||
return str(round(number, ndigits=digits))
|
||||
|
||||
mag = round(math.log(number, 10))
|
||||
return number
|
||||
mag2suffix = {3: 'k', 6: 'M', 9: 'B'}
|
||||
mag = math.floor(math.log(number, 10))
|
||||
if mag < 3:
|
||||
return str(round(number, ndigits=digits))
|
||||
|
||||
maxmag = max(
|
||||
itertools.takewhile(
|
||||
lambda key: mag >= key, _mag2suffix
|
||||
)
|
||||
)
|
||||
|
||||
return "{value}{suffix}".format(
|
||||
value=round(number/10**maxmag, ndigits=digits),
|
||||
suffix=_mag2suffix[maxmag],
|
||||
)
|
||||
return number
|
||||
maxmag = max(itertools.takewhile(lambda key: mag >= key, mag2suffix))
|
||||
return "{:.{digits}f}{}".format(
|
||||
number/10**maxmag, mag2suffix[maxmag], digits=digits)
|
||||
|
||||
|
||||
def puterize(
|
||||
|
||||
text: str,
|
||||
digits: int = 1,
|
||||
|
||||
) -> float:
|
||||
'''Inverse of ``humanize()`` above.
|
||||
|
||||
'''
|
||||
try:
|
||||
suffix = str(text)[-1]
|
||||
mult = _mag2suffix.inverse[suffix]
|
||||
value = text.rstrip(suffix)
|
||||
return round(float(value) * 10**mult, ndigits=digits)
|
||||
|
||||
except KeyError:
|
||||
# no matching suffix try just the value
|
||||
return float(text)
|
||||
|
||||
|
||||
def pnl(
|
||||
|
||||
init: float,
|
||||
new: float,
|
||||
|
||||
) -> float:
|
||||
'''Calcuate the percentage change of some ``new`` value
|
||||
def percent_change(init, new):
|
||||
"""Calcuate the percentage change of some ``new`` value
|
||||
from some initial value, ``init``.
|
||||
|
||||
'''
|
||||
"""
|
||||
if not (init and new):
|
||||
return 0
|
||||
|
||||
return (new - init) / init
|
||||
|
||||
|
||||
def percent_change(
|
||||
init: float,
|
||||
new: float,
|
||||
) -> float:
|
||||
return pnl(init, new) * 100.
|
||||
return (new - init) / init * 100.
|
||||
|
|
|
|||
|
|
@ -1,49 +0,0 @@
|
|||
piker.clearing
|
||||
______________
|
||||
trade execution-n-control subsys for both live and paper trading as
|
||||
well as algo-trading manual override/interaction across any backend
|
||||
broker and data provider.
|
||||
|
||||
avail UIs
|
||||
*********
|
||||
|
||||
order ctl
|
||||
---------
|
||||
the `piker.clearing` subsys is exposed mainly though
|
||||
the `piker chart` GUI as a "chart trader" style UX and
|
||||
is automatically enabled whenever a chart is opened.
|
||||
|
||||
.. ^TODO, more prose here!
|
||||
|
||||
the "manual" order control features are exposed via the
|
||||
`piker.ui.order_mode` API and can pretty much always be
|
||||
used (at least) in simulated-trading mode, aka "paper"-mode, and
|
||||
the micro-manual is as follows:
|
||||
|
||||
``order_mode`` (
|
||||
edge triggered activation by any of the following keys,
|
||||
``mouse-click`` on y-level to submit at that price
|
||||
):
|
||||
|
||||
- ``f``/ ``ctl-f`` to stage buy
|
||||
- ``d``/ ``ctl-d`` to stage sell
|
||||
- ``a`` to stage alert
|
||||
|
||||
|
||||
``search_mode`` (
|
||||
``ctl-l`` or ``ctl-space`` to open,
|
||||
``ctl-c`` or ``ctl-space`` to close
|
||||
) :
|
||||
|
||||
- begin typing to have symbol search automatically lookup
|
||||
symbols from all loaded backend (broker) providers
|
||||
- arrow keys and mouse click to navigate selection
|
||||
- vi-like ``ctl-[hjkl]`` for navigation
|
||||
|
||||
|
||||
position (pp) mgmt
|
||||
------------------
|
||||
you can also configure your position allocation limits from the
|
||||
sidepane.
|
||||
|
||||
.. ^TODO, explain and provide tut once more refined!
|
||||
|
|
@ -18,38 +18,3 @@
|
|||
Market machinery for order executions, book, management.
|
||||
|
||||
"""
|
||||
from ..log import get_logger
|
||||
from ._client import (
|
||||
open_ems,
|
||||
OrderClient,
|
||||
)
|
||||
from ._ems import (
|
||||
open_brokerd_dialog,
|
||||
)
|
||||
from ._util import OrderDialogs
|
||||
from ._messages import(
|
||||
Order,
|
||||
Status,
|
||||
Cancel,
|
||||
|
||||
# TODO: deprecate these and replace end-2-end with
|
||||
# client-side-dialog set above B)
|
||||
# https://github.com/pikers/piker/issues/514
|
||||
BrokerdPosition
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
'FeeModel',
|
||||
'open_ems',
|
||||
'OrderClient',
|
||||
'open_brokerd_dialog',
|
||||
'OrderDialogs',
|
||||
'Order',
|
||||
'Status',
|
||||
'Cancel',
|
||||
'BrokerdPosition'
|
||||
|
||||
]
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
@ -18,284 +18,228 @@
|
|||
Orders and execution client API.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Dict
|
||||
from pprint import pformat
|
||||
from typing import TYPE_CHECKING
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
import trio
|
||||
import tractor
|
||||
from tractor.trionics import broadcast_receiver
|
||||
|
||||
from ._util import (
|
||||
log, # sub-sys logger
|
||||
)
|
||||
from piker.types import Struct
|
||||
from ..service import maybe_open_emsd
|
||||
from ._messages import (
|
||||
Order,
|
||||
Cancel,
|
||||
BrokerdPosition,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._messages import (
|
||||
Status,
|
||||
)
|
||||
from ..data._source import Symbol
|
||||
from ..log import get_logger
|
||||
from ._ems import _emsd_main
|
||||
from .._daemon import maybe_open_emsd
|
||||
from ._messages import Order, Cancel
|
||||
|
||||
|
||||
class OrderClient(Struct):
|
||||
'''
|
||||
EMS-client-side order book ctl and tracking.
|
||||
log = get_logger(__name__)
|
||||
|
||||
(A)sync API for submitting orders and alerts to the `emsd` service;
|
||||
this is the main control for execution management from client code.
|
||||
|
||||
'''
|
||||
# IPC stream to `emsd` actor
|
||||
_ems_stream: tractor.MsgStream
|
||||
@dataclass
|
||||
class OrderBook:
|
||||
"""Buy-side (client-side ?) order book ctl and tracking.
|
||||
|
||||
A style similar to "model-view" is used here where this api is
|
||||
provided as a supervised control for an EMS actor which does all the
|
||||
hard/fast work of talking to brokers/exchanges to conduct
|
||||
executions.
|
||||
|
||||
Currently, this is mostly for keeping local state to match the EMS
|
||||
and use received events to trigger graphics updates.
|
||||
|
||||
"""
|
||||
# mem channels used to relay order requests to the EMS daemon
|
||||
_to_relay_task: trio.abc.SendChannel
|
||||
_from_sync_order_client: trio.abc.ReceiveChannel
|
||||
_to_ems: trio.abc.SendChannel
|
||||
_from_order_book: trio.abc.ReceiveChannel
|
||||
|
||||
# history table
|
||||
_sent_orders: dict[str, Order] = {}
|
||||
_sent_orders: Dict[str, Order] = field(default_factory=dict)
|
||||
_ready_to_receive: trio.Event = trio.Event()
|
||||
|
||||
def send_nowait(
|
||||
self,
|
||||
msg: Order | dict,
|
||||
def send(
|
||||
|
||||
) -> dict | Order:
|
||||
'''
|
||||
Sync version of ``.send()``.
|
||||
|
||||
'''
|
||||
self._sent_orders[msg.oid] = msg
|
||||
self._to_relay_task.send_nowait(msg)
|
||||
return msg
|
||||
|
||||
async def send(
|
||||
self,
|
||||
msg: Order | dict,
|
||||
|
||||
) -> dict | Order:
|
||||
'''
|
||||
Send a new order msg async to the `emsd` service.
|
||||
|
||||
'''
|
||||
self._sent_orders[msg.oid] = msg
|
||||
await self._ems_stream.send(msg)
|
||||
return msg
|
||||
|
||||
def update_nowait(
|
||||
self,
|
||||
uuid: str,
|
||||
**data: dict,
|
||||
symbol: str,
|
||||
brokers: list[str],
|
||||
price: float,
|
||||
size: float,
|
||||
action: str,
|
||||
exec_mode: str,
|
||||
|
||||
) -> dict:
|
||||
'''
|
||||
Sync version of ``.update()``.
|
||||
|
||||
'''
|
||||
cmd = self._sent_orders[uuid]
|
||||
msg = cmd.copy(update=data)
|
||||
self._sent_orders[uuid] = msg
|
||||
self._to_relay_task.send_nowait(msg)
|
||||
return msg
|
||||
|
||||
async def update(
|
||||
self,
|
||||
uuid: str,
|
||||
**data: dict,
|
||||
) -> dict:
|
||||
'''
|
||||
Update an existing order dialog with a msg updated from
|
||||
``update`` kwargs.
|
||||
|
||||
'''
|
||||
cmd = self._sent_orders[uuid]
|
||||
msg = cmd.copy(update=data)
|
||||
self._sent_orders[uuid] = msg
|
||||
await self._ems_stream.send(msg)
|
||||
return msg
|
||||
|
||||
def _mk_cancel_msg(
|
||||
self,
|
||||
uuid: str,
|
||||
) -> Cancel:
|
||||
cmd = self._sent_orders.get(uuid)
|
||||
if not cmd:
|
||||
log.error(
|
||||
f'Unknown order {uuid}!?\n'
|
||||
f'Maybe there is a stale entry or line?\n'
|
||||
f'You should report this as a bug!'
|
||||
)
|
||||
return
|
||||
|
||||
fqme = str(cmd.symbol)
|
||||
return Cancel(
|
||||
msg = Order(
|
||||
action=action,
|
||||
price=price,
|
||||
size=size,
|
||||
symbol=symbol,
|
||||
brokers=brokers,
|
||||
oid=uuid,
|
||||
symbol=fqme,
|
||||
exec_mode=exec_mode, # dark or live
|
||||
)
|
||||
|
||||
def cancel_nowait(
|
||||
self._sent_orders[uuid] = msg
|
||||
self._to_ems.send_nowait(msg.dict())
|
||||
return msg
|
||||
|
||||
def update(
|
||||
self,
|
||||
uuid: str,
|
||||
**data: dict,
|
||||
) -> dict:
|
||||
cmd = self._sent_orders[uuid]
|
||||
msg = cmd.dict()
|
||||
msg.update(data)
|
||||
self._sent_orders[uuid] = Order(**msg)
|
||||
self._to_ems.send_nowait(msg)
|
||||
return cmd
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Sync version of ``.cancel()``.
|
||||
def cancel(self, uuid: str) -> bool:
|
||||
"""Cancel an order (or alert) in the EMS.
|
||||
|
||||
'''
|
||||
self._to_relay_task.send_nowait(
|
||||
self._mk_cancel_msg(uuid)
|
||||
"""
|
||||
cmd = self._sent_orders[uuid]
|
||||
msg = Cancel(
|
||||
oid=uuid,
|
||||
symbol=cmd.symbol,
|
||||
)
|
||||
self._to_ems.send_nowait(msg.dict())
|
||||
|
||||
|
||||
_orders: OrderBook = None
|
||||
|
||||
|
||||
def get_orders(
|
||||
emsd_uid: tuple[str, str] = None
|
||||
) -> OrderBook:
|
||||
""""
|
||||
OrderBook singleton factory per actor.
|
||||
|
||||
"""
|
||||
if emsd_uid is not None:
|
||||
# TODO: read in target emsd's active book on startup
|
||||
pass
|
||||
|
||||
global _orders
|
||||
|
||||
if _orders is None:
|
||||
# setup local ui event streaming channels for request/resp
|
||||
# streamging with EMS daemon
|
||||
_orders = OrderBook(
|
||||
*trio.open_memory_channel(100),
|
||||
)
|
||||
|
||||
async def cancel(
|
||||
self,
|
||||
uuid: str,
|
||||
|
||||
) -> bool:
|
||||
'''
|
||||
Cancel an already existintg order (or alert) dialog.
|
||||
|
||||
'''
|
||||
await self._ems_stream.send(
|
||||
self._mk_cancel_msg(uuid)
|
||||
)
|
||||
return _orders
|
||||
|
||||
|
||||
# TODO: we can get rid of this relay loop once we move
|
||||
# order_mode inputs to async code!
|
||||
async def relay_order_cmds_from_sync_code(
|
||||
|
||||
async def relay_orders_from_sync_code(
|
||||
|
||||
client: OrderClient,
|
||||
symbol_key: str,
|
||||
to_ems_stream: tractor.MsgStream,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Order submission relay task: deliver orders sent from synchronous (UI)
|
||||
code to the EMS via ``OrderClient._from_sync_order_client``.
|
||||
"""
|
||||
Order streaming task: deliver orders transmitted from UI
|
||||
to downstream consumers.
|
||||
|
||||
This is run in the UI actor (usually the one running Qt but could be
|
||||
any other client service code). This process simply delivers order
|
||||
messages to the above ``_to_relay_task`` send channel (from sync code using
|
||||
messages to the above ``_to_ems`` send channel (from sync code using
|
||||
``.send_nowait()``), these values are pulled from the channel here
|
||||
and relayed to any consumer(s) that called this function using
|
||||
a ``tractor`` portal.
|
||||
|
||||
This effectively makes order messages look like they're being
|
||||
"pushed" from the parent to the EMS where local sync code is likely
|
||||
doing the pushing from some non-async UI handler.
|
||||
doing the pushing from some UI.
|
||||
|
||||
'''
|
||||
async with (
|
||||
client._from_sync_order_client.subscribe() as sync_order_cmds
|
||||
):
|
||||
async for cmd in sync_order_cmds:
|
||||
sym = cmd.symbol
|
||||
msg = pformat(cmd.to_dict())
|
||||
"""
|
||||
book = get_orders()
|
||||
orders_stream = book._from_order_book
|
||||
|
||||
if sym == symbol_key:
|
||||
log.info(f'Send order cmd:\n{msg}')
|
||||
# send msg over IPC / wire
|
||||
await to_ems_stream.send(cmd)
|
||||
async for cmd in orders_stream:
|
||||
|
||||
else:
|
||||
log.warning(
|
||||
f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
|
||||
f'\n{msg}'
|
||||
)
|
||||
print(cmd)
|
||||
if cmd['symbol'] == symbol_key:
|
||||
|
||||
# send msg over IPC / wire
|
||||
log.info(f'Send order cmd:\n{pformat(cmd)}')
|
||||
await to_ems_stream.send(cmd)
|
||||
|
||||
else:
|
||||
# XXX BRUTAL HACKZORZES !!!
|
||||
# re-insert for another consumer
|
||||
# we need broadcast channelz...asap
|
||||
# https://github.com/goodboy/tractor/issues/204
|
||||
book._to_ems.send_nowait(cmd)
|
||||
|
||||
|
||||
@acm
|
||||
@asynccontextmanager
|
||||
async def open_ems(
|
||||
fqme: str,
|
||||
mode: str = 'live',
|
||||
loglevel: str = 'error',
|
||||
broker: str,
|
||||
symbol: Symbol,
|
||||
|
||||
) -> tuple[
|
||||
OrderClient, # client
|
||||
tractor.MsgStream, # order ctl stream
|
||||
dict[
|
||||
# brokername, acctid
|
||||
tuple[str, str],
|
||||
dict[str, BrokerdPosition],
|
||||
],
|
||||
list[str],
|
||||
dict[str, Status],
|
||||
]:
|
||||
'''
|
||||
(Maybe) spawn an EMS-daemon (emsd), deliver an `OrderClient` for
|
||||
requesting orders/alerts and a `trades_stream` which delivers all
|
||||
response-msgs.
|
||||
) -> (OrderBook, tractor.MsgStream, dict):
|
||||
"""Spawn an EMS daemon and begin sending orders and receiving
|
||||
alerts.
|
||||
|
||||
This is a "client side" entrypoint which may spawn the `emsd` service
|
||||
if it can't be discovered and generally speaking is the lowest level
|
||||
broker control client-API.
|
||||
|
||||
'''
|
||||
# TODO: prolly hand in the `MktPair` instance directly here as well!
|
||||
from piker.accounting import unpack_fqme
|
||||
broker, mktep, venue, suffix = unpack_fqme(fqme)
|
||||
This EMS tries to reduce most broker's terrible order entry apis to
|
||||
a very simple protocol built on a few easy to grok and/or
|
||||
"rantsy" premises:
|
||||
|
||||
async with maybe_open_emsd(
|
||||
broker,
|
||||
loglevel=loglevel,
|
||||
) as portal:
|
||||
- most users will prefer "dark mode" where orders are not submitted
|
||||
to a broker until and execution condition is triggered
|
||||
(aka client-side "hidden orders")
|
||||
|
||||
- Brokers over-complicate their apis and generally speaking hire
|
||||
poor designers to create them. We're better off using creating a super
|
||||
minimal, schema-simple, request-event-stream protocol to unify all the
|
||||
existing piles of shit (and shocker, it'll probably just end up
|
||||
looking like a decent crypto exchange's api)
|
||||
|
||||
- all order types can be implemented with client-side limit orders
|
||||
|
||||
- we aren't reinventing a wheel in this case since none of these
|
||||
brokers are exposing FIX protocol; it is they doing the re-invention.
|
||||
|
||||
|
||||
TODO: make some fancy diagrams using mermaid.io
|
||||
|
||||
the possible set of responses from the stream is currently:
|
||||
- 'dark_submitted', 'broker_submitted'
|
||||
- 'dark_cancelled', 'broker_cancelled'
|
||||
- 'dark_executed', 'broker_executed'
|
||||
- 'broker_filled'
|
||||
|
||||
"""
|
||||
# wait for service to connect back to us signalling
|
||||
# ready for order commands
|
||||
book = get_orders()
|
||||
|
||||
async with maybe_open_emsd(broker) as portal:
|
||||
|
||||
from ._ems import _emsd_main
|
||||
async with (
|
||||
|
||||
# connect to emsd
|
||||
portal.open_context(
|
||||
_emsd_main,
|
||||
fqme=fqme,
|
||||
exec_mode=mode,
|
||||
loglevel=loglevel,
|
||||
|
||||
) as (
|
||||
ctx,
|
||||
(
|
||||
positions,
|
||||
accounts,
|
||||
dialogs,
|
||||
)
|
||||
),
|
||||
_emsd_main,
|
||||
broker=broker,
|
||||
symbol=symbol.key,
|
||||
|
||||
) as (ctx, positions),
|
||||
|
||||
# open 2-way trade command stream
|
||||
ctx.open_stream() as trades_stream,
|
||||
):
|
||||
size: int = 100 # what should this be?
|
||||
tx, rx = trio.open_memory_channel(size)
|
||||
brx = broadcast_receiver(rx, size)
|
||||
|
||||
# setup local ui event streaming channels for request/resp
|
||||
# streamging with EMS daemon
|
||||
client = OrderClient(
|
||||
_ems_stream=trades_stream,
|
||||
_to_relay_task=tx,
|
||||
_from_sync_order_client=brx,
|
||||
)
|
||||
|
||||
client._ems_stream = trades_stream
|
||||
|
||||
# start sync code order msg delivery task
|
||||
async with trio.open_nursery() as n:
|
||||
n.start_soon(
|
||||
relay_orders_from_sync_code,
|
||||
client,
|
||||
fqme,
|
||||
relay_order_cmds_from_sync_code,
|
||||
symbol.key,
|
||||
trades_stream
|
||||
)
|
||||
|
||||
yield (
|
||||
client,
|
||||
trades_stream,
|
||||
positions,
|
||||
accounts,
|
||||
dialogs,
|
||||
)
|
||||
|
||||
# stop the sync-msg-relay task on exit.
|
||||
n.cancel_scope.cancel()
|
||||
yield book, trades_stream, positions
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
@ -15,292 +15,224 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Clearing sub-system message and protocols.
|
||||
Clearing system messagingn types and protocols.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Literal,
|
||||
)
|
||||
from typing import Optional, Union
|
||||
|
||||
from msgspec import field
|
||||
# TODO: try out just encoding/send direction for now?
|
||||
# import msgspec
|
||||
from pydantic import BaseModel
|
||||
|
||||
from piker.types import Struct
|
||||
|
||||
|
||||
# TODO: ``msgspec`` stuff worth paying attention to:
|
||||
# - schema evolution:
|
||||
# https://jcristharif.com/msgspec/usage.html#schema-evolution
|
||||
# - for eg. ``BrokerdStatus``, instead just have separate messages?
|
||||
# - use literals for a common msg determined by diff keys?
|
||||
# - https://jcristharif.com/msgspec/usage.html#literal
|
||||
|
||||
# --------------
|
||||
# Client -> emsd
|
||||
# --------------
|
||||
|
||||
class Order(Struct):
|
||||
|
||||
# TODO: ideally we can combine these 2 fields into
|
||||
# 1 and just use the size polarity to determine a buy/sell.
|
||||
# i would like to see this become more like
|
||||
# https://jcristharif.com/msgspec/usage.html#literal
|
||||
# action: Literal[
|
||||
# 'live',
|
||||
# 'dark',
|
||||
# 'alert',
|
||||
# ]
|
||||
|
||||
action: Literal[
|
||||
'buy',
|
||||
'sell',
|
||||
'alert',
|
||||
]
|
||||
# determines whether the create execution
|
||||
# will be submitted to the ems or directly to
|
||||
# the backend broker
|
||||
exec_mode: Literal[
|
||||
'dark',
|
||||
'live',
|
||||
# 'paper', no right?
|
||||
]
|
||||
|
||||
# internal ``emdsd`` unique "order id"
|
||||
oid: str # uuid4
|
||||
# TODO: figure out how to optionally typecast this to `MktPair`?
|
||||
symbol: str # | MktPair
|
||||
account: str # should we set a default as '' ?
|
||||
|
||||
price: float
|
||||
size: float # -ve is "sell", +ve is "buy"
|
||||
|
||||
brokers: list[str] = []
|
||||
|
||||
|
||||
class Cancel(Struct):
|
||||
'''
|
||||
Cancel msg for removing a dark (ems triggered) or
|
||||
class Cancel(BaseModel):
|
||||
'''Cancel msg for removing a dark (ems triggered) or
|
||||
broker-submitted (live) trigger/order.
|
||||
|
||||
'''
|
||||
action: str = 'cancel'
|
||||
oid: str # uuid4
|
||||
symbol: str
|
||||
action: str = 'cancel'
|
||||
|
||||
|
||||
# --------------
|
||||
class Order(BaseModel):
|
||||
|
||||
action: str # {'buy', 'sell', 'alert'}
|
||||
# internal ``emdsd`` unique "order id"
|
||||
oid: str # uuid4
|
||||
symbol: str
|
||||
|
||||
price: float
|
||||
size: float
|
||||
brokers: list[str]
|
||||
|
||||
# Assigned once initial ack is received
|
||||
# ack_time_ns: Optional[int] = None
|
||||
|
||||
# determines whether the create execution
|
||||
# will be submitted to the ems or directly to
|
||||
# the backend broker
|
||||
exec_mode: str # {'dark', 'live', 'paper'}
|
||||
|
||||
|
||||
# Client <- emsd
|
||||
# --------------
|
||||
# update msgs from ems which relay state change info
|
||||
# from the active clearing engine.
|
||||
|
||||
class Status(Struct):
|
||||
|
||||
time_ns: int
|
||||
oid: str # uuid4 ems-order dialog id
|
||||
|
||||
resp: Literal[
|
||||
'pending', # acked by broker but not yet open
|
||||
'open',
|
||||
'dark_open', # dark/algo triggered order is open in ems clearing loop
|
||||
'triggered', # above triggered order sent to brokerd, or an alert closed
|
||||
'closed', # fully cleared all size/units
|
||||
'fill', # partial execution
|
||||
'canceled',
|
||||
'error',
|
||||
]
|
||||
class Status(BaseModel):
|
||||
|
||||
name: str = 'status'
|
||||
oid: str # uuid4
|
||||
time_ns: int
|
||||
|
||||
# {
|
||||
# 'dark_submitted',
|
||||
# 'dark_cancelled',
|
||||
# 'dark_triggered',
|
||||
|
||||
# 'broker_submitted',
|
||||
# 'broker_cancelled',
|
||||
# 'broker_executed',
|
||||
# 'broker_filled',
|
||||
|
||||
# 'alert_submitted',
|
||||
# 'alert_triggered',
|
||||
|
||||
# 'position',
|
||||
|
||||
# }
|
||||
resp: str # "response", see above
|
||||
|
||||
# symbol: str
|
||||
|
||||
# trigger info
|
||||
trigger_price: Optional[float] = None
|
||||
# price: float
|
||||
|
||||
# broker: Optional[str] = None
|
||||
|
||||
# this maps normally to the ``BrokerdOrder.reqid`` below, an id
|
||||
# normally allocated internally by the backend broker routing system
|
||||
reqid: int | str | None = None
|
||||
broker_reqid: Optional[Union[int, str]] = None
|
||||
|
||||
# the (last) source order/request msg if provided
|
||||
# (eg. the Order/Cancel which causes this msg) and
|
||||
# acts as a back-reference to the corresponding
|
||||
# request message which was the source of this msg.
|
||||
req: Order | None = None
|
||||
# for relaying backend msg data "through" the ems layer
|
||||
brokerd_msg: dict = {}
|
||||
|
||||
|
||||
# emsd -> brokerd
|
||||
# requests *sent* from ems to respective backend broker daemon
|
||||
|
||||
class BrokerdCancel(BaseModel):
|
||||
|
||||
action: str = 'cancel'
|
||||
oid: str # piker emsd order id
|
||||
time_ns: int
|
||||
|
||||
# "broker request id": broker specific/internal order id if this is
|
||||
# None, creates a new order otherwise if the id is valid the backend
|
||||
# api must modify the existing matching order. If the broker allows
|
||||
# for setting a unique order id then this value will be relayed back
|
||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||
# field
|
||||
reqid: Optional[Union[int, str]] = None
|
||||
|
||||
|
||||
class BrokerdOrder(BaseModel):
|
||||
|
||||
action: str # {buy, sell}
|
||||
oid: str
|
||||
time_ns: int
|
||||
|
||||
# "broker request id": broker specific/internal order id if this is
|
||||
# None, creates a new order otherwise if the id is valid the backend
|
||||
# api must modify the existing matching order. If the broker allows
|
||||
# for setting a unique order id then this value will be relayed back
|
||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||
# field
|
||||
reqid: Optional[Union[int, str]] = None
|
||||
|
||||
symbol: str # symbol.<providername> ?
|
||||
price: float
|
||||
size: float
|
||||
|
||||
|
||||
# emsd <- brokerd
|
||||
# requests *received* to ems from broker backend
|
||||
|
||||
|
||||
class BrokerdOrderAck(BaseModel):
|
||||
'''Immediate reponse to a brokerd order request providing
|
||||
the broker specifci unique order id.
|
||||
|
||||
'''
|
||||
name: str = 'ack'
|
||||
|
||||
# defined and provided by backend
|
||||
reqid: Union[int, str]
|
||||
|
||||
# emsd id originally sent in matching request msg
|
||||
oid: str
|
||||
|
||||
|
||||
class BrokerdStatus(BaseModel):
|
||||
|
||||
name: str = 'status'
|
||||
reqid: Union[int, str]
|
||||
time_ns: int
|
||||
|
||||
# {
|
||||
# 'submitted',
|
||||
# 'cancelled',
|
||||
# 'executed',
|
||||
# }
|
||||
status: str
|
||||
|
||||
filled: float = 0.0
|
||||
reason: str = ''
|
||||
remaining: float = 0.0
|
||||
|
||||
# XXX: better design/name here?
|
||||
# flag that can be set to indicate a message for an order
|
||||
# event that wasn't originated by piker's emsd (eg. some external
|
||||
# trading system which does it's own order control but that you
|
||||
# might want to "track" using piker UIs/systems).
|
||||
src: str | None = None
|
||||
|
||||
# set when a cancel request msg was set for this order flow dialog
|
||||
# but the brokerd dialog isn't yet in a cancelled state.
|
||||
cancel_called: bool = False
|
||||
|
||||
# for relaying a boxed brokerd-dialog-side msg data "through" the
|
||||
# ems layer to clients.
|
||||
brokerd_msg: dict = {}
|
||||
|
||||
|
||||
class Error(Status):
|
||||
resp: str = 'error'
|
||||
|
||||
# TODO: allow re-wrapping from existing (last) status?
|
||||
@classmethod
|
||||
def from_status(
|
||||
cls,
|
||||
msg: Status,
|
||||
) -> Error:
|
||||
...
|
||||
|
||||
|
||||
# ---------------
|
||||
# emsd -> brokerd
|
||||
# ---------------
|
||||
# requests *sent* from ems to respective backend broker daemon
|
||||
|
||||
class BrokerdCancel(Struct):
|
||||
|
||||
oid: str # piker emsd order id
|
||||
time_ns: int
|
||||
|
||||
account: str
|
||||
# "broker request id": broker specific/internal order id if this is
|
||||
# None, creates a new order otherwise if the id is valid the backend
|
||||
# api must modify the existing matching order. If the broker allows
|
||||
# for setting a unique order id then this value will be relayed back
|
||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||
# field
|
||||
reqid: int | str | None = None
|
||||
action: str = 'cancel'
|
||||
|
||||
|
||||
class BrokerdOrder(Struct):
|
||||
|
||||
oid: str
|
||||
account: str
|
||||
time_ns: int
|
||||
|
||||
symbol: str # fqme
|
||||
price: float
|
||||
size: float
|
||||
|
||||
# TODO: if we instead rely on a +ve/-ve size to determine
|
||||
# the action we more or less don't need this field right?
|
||||
action: str = '' # {buy, sell}
|
||||
|
||||
# "broker request id": broker specific/internal order id if this is
|
||||
# None, creates a new order otherwise if the id is valid the backend
|
||||
# api must modify the existing matching order. If the broker allows
|
||||
# for setting a unique order id then this value will be relayed back
|
||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||
# field
|
||||
reqid: int | str | None = None
|
||||
|
||||
|
||||
# ---------------
|
||||
# emsd <- brokerd
|
||||
# ---------------
|
||||
# requests *received* to ems from broker backend
|
||||
|
||||
class BrokerdOrderAck(Struct):
|
||||
'''
|
||||
Immediate reponse to a brokerd order request providing the broker
|
||||
specific unique order id so that the EMS can associate this
|
||||
(presumably differently formatted broker side ID) with our own
|
||||
``.oid`` (which is a uuid4).
|
||||
|
||||
'''
|
||||
|
||||
# defined and provided by backend
|
||||
reqid: int | str
|
||||
|
||||
# emsd id originally sent in matching request msg
|
||||
oid: str
|
||||
# TODO: do we need this?
|
||||
account: str = ''
|
||||
name: str = 'ack'
|
||||
|
||||
|
||||
class BrokerdStatus(Struct):
|
||||
|
||||
time_ns: int
|
||||
reqid: int | str
|
||||
status: Literal[
|
||||
'open',
|
||||
'canceled',
|
||||
'pending',
|
||||
# 'error', # NOTE: use `BrokerdError`
|
||||
'closed',
|
||||
]
|
||||
name: str = 'status'
|
||||
|
||||
oid: str = ''
|
||||
# TODO: do we need this?
|
||||
account: str | None = None,
|
||||
filled: float = 0.0
|
||||
reason: str = ''
|
||||
remaining: float = 0.0
|
||||
|
||||
# external: bool = False
|
||||
external: bool = False
|
||||
|
||||
# XXX: not required schema as of yet
|
||||
broker_details: dict = field(default_factory=lambda: {
|
||||
broker_details: dict = {
|
||||
'name': '',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class BrokerdFill(Struct):
|
||||
'''
|
||||
A single message indicating a "fill-details" event from the
|
||||
broker if avaiable.
|
||||
class BrokerdFill(BaseModel):
|
||||
'''A single message indicating a "fill-details" event from the broker
|
||||
if avaiable.
|
||||
|
||||
'''
|
||||
name: str = 'fill'
|
||||
reqid: Union[int, str]
|
||||
time_ns: int
|
||||
|
||||
# order exeuction related
|
||||
action: str
|
||||
size: float
|
||||
price: float
|
||||
|
||||
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
||||
|
||||
# brokerd timestamp required for order mode arrow placement on x-axis
|
||||
|
||||
# TODO: maybe int if we force ns?
|
||||
# we need to normalize this somehow since backends will use their
|
||||
# own format and likely across many disparate epoch clocks...
|
||||
time_ns: int
|
||||
broker_time: float
|
||||
reqid: int | str
|
||||
|
||||
# order exeuction related
|
||||
size: float
|
||||
price: float
|
||||
|
||||
name: str = 'fill'
|
||||
action: str | None = None
|
||||
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
||||
|
||||
|
||||
class BrokerdError(Struct):
|
||||
'''
|
||||
Optional error type that can be relayed to emsd for error handling.
|
||||
class BrokerdError(BaseModel):
|
||||
'''Optional error type that can be relayed to emsd for error handling.
|
||||
|
||||
This is still a TODO thing since we're not sure how to employ it yet.
|
||||
|
||||
'''
|
||||
reason: str
|
||||
|
||||
# TODO: drop this right?
|
||||
symbol: str | None = None
|
||||
|
||||
oid: str | None = None
|
||||
# if no brokerd order request was actually submitted (eg. we errored
|
||||
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
||||
reqid: str | None = None
|
||||
|
||||
name: str = 'error'
|
||||
reqid: Union[int, str]
|
||||
|
||||
symbol: str
|
||||
reason: str
|
||||
broker_details: dict = {}
|
||||
|
||||
|
||||
# TODO: yeah, so we REALLY need to completely deprecate
|
||||
# this and use the `.accounting.Position` msg-type instead..
|
||||
class BrokerdPosition(Struct):
|
||||
'''
|
||||
Position update event from brokerd.
|
||||
class BrokerdPosition(BaseModel):
|
||||
'''Position update event from brokerd.
|
||||
|
||||
'''
|
||||
name: str = 'position'
|
||||
|
||||
broker: str
|
||||
account: str
|
||||
symbol: str
|
||||
currency: str
|
||||
size: float
|
||||
avg_price: float
|
||||
currency: str = ''
|
||||
name: str = 'position'
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,93 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Sub-sys module commons.
|
||||
|
||||
"""
|
||||
from collections import ChainMap
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
from ..log import (
|
||||
get_logger,
|
||||
get_console_log,
|
||||
)
|
||||
from piker.types import Struct
|
||||
subsys: str = 'piker.clearing'
|
||||
|
||||
log = get_logger(subsys)
|
||||
|
||||
get_console_log = partial(
|
||||
get_console_log,
|
||||
name=subsys,
|
||||
)
|
||||
|
||||
|
||||
class OrderDialogs(Struct):
|
||||
'''
|
||||
Order control dialog (and thus transaction) tracking via
|
||||
message recording.
|
||||
|
||||
Allows easily recording messages associated with a given set of
|
||||
order control transactions and looking up the latest field
|
||||
state using the entire (reverse chronological) msg flow.
|
||||
|
||||
'''
|
||||
_flows: dict[str, ChainMap] = {}
|
||||
|
||||
def add_msg(
|
||||
self,
|
||||
oid: str,
|
||||
msg: dict,
|
||||
) -> None:
|
||||
|
||||
# NOTE: manually enter a new map on the first msg add to
|
||||
# avoid creating one with an empty dict first entry in
|
||||
# `ChainMap.maps` which is the default if none passed at
|
||||
# init.
|
||||
cm: ChainMap = self._flows.get(oid)
|
||||
if cm:
|
||||
cm.maps.insert(0, msg)
|
||||
else:
|
||||
cm = ChainMap(msg)
|
||||
self._flows[oid] = cm
|
||||
|
||||
# TODO: wrap all this in the `collections.abc.Mapping` interface?
|
||||
def get(
|
||||
self,
|
||||
oid: str,
|
||||
|
||||
) -> ChainMap[str, Any]:
|
||||
'''
|
||||
Return the dialog `ChainMap` for provided id.
|
||||
|
||||
'''
|
||||
return self._flows.get(oid, None)
|
||||
|
||||
def pop(
|
||||
self,
|
||||
oid: str,
|
||||
|
||||
) -> ChainMap[str, Any]:
|
||||
'''
|
||||
Pop and thus remove the `ChainMap` containing the msg flow
|
||||
for the given order id.
|
||||
|
||||
'''
|
||||
if (flow := self._flows.pop(oid, None)) is None:
|
||||
log.warning(f'No flow found for oid: {oid}')
|
||||
|
||||
return flow
|
||||
|
|
@ -1,295 +1,84 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet
|
||||
# (in stewardship for pikers, everywhere.)
|
||||
|
||||
# This program is free software: you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Affero General Public
|
||||
# License as published by the Free Software Foundation, either
|
||||
# version 3 of the License, or (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public
|
||||
# License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
"""
|
||||
CLI commons.
|
||||
|
||||
'''
|
||||
"""
|
||||
import os
|
||||
# from contextlib import AsyncExitStack
|
||||
from types import ModuleType
|
||||
|
||||
import click
|
||||
import trio
|
||||
import tractor
|
||||
from tractor._multiaddr import parse_maddr
|
||||
|
||||
from ..log import (
|
||||
get_console_log,
|
||||
get_logger,
|
||||
colorize_json,
|
||||
from ..log import get_console_log, get_logger, colorize_json
|
||||
from ..brokers import get_brokermod, config
|
||||
from .._daemon import _tractor_kwargs
|
||||
|
||||
|
||||
log = get_logger('cli')
|
||||
DEFAULT_BROKER = 'questrade'
|
||||
|
||||
_config_dir = click.get_app_dir('piker')
|
||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||
_context_defaults = dict(
|
||||
default_map={
|
||||
# Questrade specific quote poll rates
|
||||
'monitor': {
|
||||
'rate': 3,
|
||||
},
|
||||
'optschain': {
|
||||
'rate': 1,
|
||||
},
|
||||
}
|
||||
)
|
||||
from ..brokers import get_brokermod
|
||||
from ..service import (
|
||||
_default_registry_host,
|
||||
_default_registry_port,
|
||||
)
|
||||
from .. import config
|
||||
|
||||
|
||||
log = get_logger('piker.cli')
|
||||
|
||||
|
||||
def load_trans_eps(
|
||||
network: dict | None = None,
|
||||
maddrs: list[tuple] | None = None,
|
||||
|
||||
) -> dict[str, dict[str, dict]]:
|
||||
|
||||
# transport-oriented endpoint multi-addresses
|
||||
eps: dict[
|
||||
str, # service name, eg. `pikerd`, `emsd`..
|
||||
|
||||
# libp2p style multi-addresses parsed into prot layers
|
||||
list[dict[str, str | int]]
|
||||
] = {}
|
||||
|
||||
if (
|
||||
network
|
||||
and not maddrs
|
||||
):
|
||||
# load network section and (attempt to) connect all endpoints
|
||||
# which are reachable B)
|
||||
for key, maddrs in network.items():
|
||||
match key:
|
||||
|
||||
# TODO: resolve table across multiple discov
|
||||
# prots Bo
|
||||
case 'resolv':
|
||||
pass
|
||||
|
||||
case 'pikerd':
|
||||
dname: str = key
|
||||
for maddr in maddrs:
|
||||
layers: dict = parse_maddr(maddr)
|
||||
eps.setdefault(
|
||||
dname,
|
||||
[],
|
||||
).append(layers)
|
||||
|
||||
elif maddrs:
|
||||
# presume user is manually specifying the root actor ep.
|
||||
eps['pikerd'] = [parse_maddr(maddr)]
|
||||
|
||||
return eps
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'--loglevel',
|
||||
'-l',
|
||||
default='warning',
|
||||
help='Logging level',
|
||||
)
|
||||
@click.option(
|
||||
'--tl',
|
||||
is_flag=True,
|
||||
help='Enable tractor-runtime logs',
|
||||
)
|
||||
@click.option(
|
||||
'--pdb',
|
||||
is_flag=True,
|
||||
help='Enable tractor debug mode',
|
||||
)
|
||||
@click.option(
|
||||
'--maddr',
|
||||
'-m',
|
||||
default=None,
|
||||
help='Multiaddrs to bind or contact',
|
||||
)
|
||||
# @click.option(
|
||||
# '--tsdb',
|
||||
# is_flag=True,
|
||||
# help='Enable local ``marketstore`` instance'
|
||||
# )
|
||||
# @click.option(
|
||||
# '--es',
|
||||
# is_flag=True,
|
||||
# help='Enable local ``elasticsearch`` instance'
|
||||
# )
|
||||
def pikerd(
|
||||
maddr: list[str] | None,
|
||||
loglevel: str,
|
||||
tl: bool,
|
||||
pdb: bool,
|
||||
# tsdb: bool,
|
||||
# es: bool,
|
||||
):
|
||||
'''
|
||||
Spawn the piker broker-daemon.
|
||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
||||
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
||||
def pikerd(loglevel, host, tl, pdb):
|
||||
"""Spawn the piker broker-daemon.
|
||||
"""
|
||||
from .._daemon import open_pikerd
|
||||
log = get_console_log(loglevel)
|
||||
|
||||
'''
|
||||
from tractor.devx import maybe_open_crash_handler
|
||||
with maybe_open_crash_handler(pdb=pdb):
|
||||
log = get_console_log(loglevel, name='cli')
|
||||
if pdb:
|
||||
log.warning((
|
||||
"\n"
|
||||
"!!! You have enabled daemon DEBUG mode !!!\n"
|
||||
"If a daemon crashes it will likely block"
|
||||
" the service until resumed from console!\n"
|
||||
"\n"
|
||||
))
|
||||
|
||||
if pdb:
|
||||
log.warning((
|
||||
"\n"
|
||||
"!!! YOU HAVE ENABLED DAEMON DEBUG MODE !!!\n"
|
||||
"When a `piker` daemon crashes it will block the "
|
||||
"task-thread until resumed from console!\n"
|
||||
"\n"
|
||||
))
|
||||
async def main():
|
||||
async with open_pikerd(loglevel=loglevel, debug_mode=pdb):
|
||||
await trio.sleep_forever()
|
||||
|
||||
# service-actor registry endpoint socket-address set
|
||||
regaddrs: list[tuple[str, int]] = []
|
||||
|
||||
conf, _ = config.load(
|
||||
conf_name='conf',
|
||||
)
|
||||
network: dict = conf.get('network')
|
||||
if (
|
||||
network is None
|
||||
and not maddr
|
||||
):
|
||||
regaddrs = [(
|
||||
_default_registry_host,
|
||||
_default_registry_port,
|
||||
)]
|
||||
|
||||
else:
|
||||
eps: dict = load_trans_eps(
|
||||
network,
|
||||
maddr,
|
||||
)
|
||||
for layers in eps['pikerd']:
|
||||
regaddrs.append((
|
||||
layers['ipv4']['addr'],
|
||||
layers['tcp']['port'],
|
||||
))
|
||||
|
||||
from .. import service
|
||||
|
||||
async def main():
|
||||
service_mngr: service.Services
|
||||
|
||||
async with (
|
||||
service.open_pikerd(
|
||||
registry_addrs=regaddrs,
|
||||
loglevel=loglevel,
|
||||
debug_mode=pdb,
|
||||
|
||||
) as service_mngr, # normally delivers a ``Services`` handle
|
||||
|
||||
# AsyncExitStack() as stack,
|
||||
):
|
||||
# TODO: spawn all other sub-actor daemons according to
|
||||
# multiaddress endpoint spec defined by user config
|
||||
assert service_mngr
|
||||
|
||||
# if tsdb:
|
||||
# dname, conf = await stack.enter_async_context(
|
||||
# service.marketstore.start_ahab_daemon(
|
||||
# service_mngr,
|
||||
# loglevel=loglevel,
|
||||
# )
|
||||
# )
|
||||
# log.info(f'TSDB `{dname}` up with conf:\n{conf}')
|
||||
|
||||
# if es:
|
||||
# dname, conf = await stack.enter_async_context(
|
||||
# service.elastic.start_ahab_daemon(
|
||||
# service_mngr,
|
||||
# loglevel=loglevel,
|
||||
# )
|
||||
# )
|
||||
# log.info(f'DB `{dname}` up with conf:\n{conf}')
|
||||
|
||||
await trio.sleep_forever()
|
||||
|
||||
trio.run(main)
|
||||
trio.run(main)
|
||||
|
||||
|
||||
@click.group(context_settings=config._context_defaults)
|
||||
@click.group(context_settings=_context_defaults)
|
||||
@click.option(
|
||||
'--brokers', '-b',
|
||||
default=None,
|
||||
default=[DEFAULT_BROKER],
|
||||
multiple=True,
|
||||
help='Broker backend to use'
|
||||
)
|
||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option('--configdir', '-c', help='Configuration directory')
|
||||
@click.option(
|
||||
'--pdb',
|
||||
is_flag=True,
|
||||
help='Enable runtime debug mode ',
|
||||
)
|
||||
@click.option(
|
||||
'--maddr',
|
||||
'-m',
|
||||
default=None,
|
||||
multiple=True,
|
||||
help='Multiaddr to bind',
|
||||
)
|
||||
@click.option(
|
||||
'--regaddr',
|
||||
'-r',
|
||||
default=None,
|
||||
help='Registrar addr to contact',
|
||||
)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
ctx: click.Context,
|
||||
brokers: list[str],
|
||||
loglevel: str,
|
||||
tl: bool,
|
||||
configdir: str,
|
||||
pdb: bool,
|
||||
|
||||
# TODO: make these list[str] with multiple -m maddr0 -m maddr1
|
||||
maddr: list[str],
|
||||
regaddr: str,
|
||||
|
||||
) -> None:
|
||||
def cli(ctx, brokers, loglevel, tl, configdir):
|
||||
if configdir is not None:
|
||||
assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
|
||||
config._override_config_dir(configdir)
|
||||
|
||||
# TODO: for typer see
|
||||
# https://typer.tiangolo.com/tutorial/commands/context/
|
||||
ctx.ensure_object(dict)
|
||||
|
||||
if not brokers:
|
||||
# (try to) load all (supposedly) supported data/broker backends
|
||||
from piker.brokers import __brokers__
|
||||
brokers = __brokers__
|
||||
|
||||
brokermods: dict[str, ModuleType] = {
|
||||
broker: get_brokermod(broker) for broker in brokers
|
||||
}
|
||||
assert brokermods
|
||||
|
||||
# TODO: load endpoints from `conf::[network].pikerd`
|
||||
# - pikerd vs. regd, separate registry daemon?
|
||||
# - expose datad vs. brokerd?
|
||||
# - bind emsd with certain perms on public iface?
|
||||
regaddrs: list[tuple[str, int]] = regaddr or [(
|
||||
_default_registry_host,
|
||||
_default_registry_port,
|
||||
)]
|
||||
|
||||
# TODO: factor [network] section parsing out from pikerd
|
||||
# above and call it here as well.
|
||||
# if maddr:
|
||||
# for addr in maddr:
|
||||
# layers: dict = parse_maddr(addr)
|
||||
if len(brokers) == 1:
|
||||
brokermods = [get_brokermod(brokers[0])]
|
||||
else:
|
||||
brokermods = [get_brokermod(broker) for broker in brokers]
|
||||
|
||||
ctx.obj.update({
|
||||
'brokers': brokers,
|
||||
|
|
@ -297,14 +86,8 @@ def cli(
|
|||
'loglevel': loglevel,
|
||||
'tractorloglevel': None,
|
||||
'log': get_console_log(loglevel),
|
||||
'confdir': config._config_dir,
|
||||
'wl_path': config._watchlists_data_path,
|
||||
'registry_addrs': regaddrs,
|
||||
'pdb': pdb, # debug mode flag
|
||||
|
||||
# TODO: endpoint parsing, pinging and binding
|
||||
# on no existing server.
|
||||
# 'maddrs': maddr,
|
||||
'confdir': _config_dir,
|
||||
'wl_path': _watchlists_data_path,
|
||||
})
|
||||
|
||||
# allow enabling same loglevel in ``tractor`` machinery
|
||||
|
|
@ -314,52 +97,40 @@ def cli(
|
|||
|
||||
@cli.command()
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.argument('ports', nargs=-1, required=False)
|
||||
@click.argument('names', nargs=-1, required=False)
|
||||
@click.pass_obj
|
||||
def services(config, tl, ports):
|
||||
|
||||
from ..service import (
|
||||
open_piker_runtime,
|
||||
_default_registry_port,
|
||||
_default_registry_host,
|
||||
)
|
||||
|
||||
host = _default_registry_host
|
||||
if not ports:
|
||||
ports = [_default_registry_port]
|
||||
def services(config, tl, names):
|
||||
|
||||
async def list_services():
|
||||
nonlocal host
|
||||
async with (
|
||||
open_piker_runtime(
|
||||
name='service_query',
|
||||
loglevel=config['loglevel'] if tl else None,
|
||||
),
|
||||
tractor.get_arbiter(
|
||||
host=host,
|
||||
port=ports[0]
|
||||
) as portal
|
||||
):
|
||||
registry = await portal.run_from_ns('self', 'get_registry')
|
||||
json_d = {}
|
||||
for key, socket in registry.items():
|
||||
host, port = socket
|
||||
json_d[key] = f'{host}:{port}'
|
||||
click.echo(f"{colorize_json(json_d)}")
|
||||
|
||||
trio.run(list_services)
|
||||
async with tractor.get_arbiter(
|
||||
*_tractor_kwargs['arbiter_addr']
|
||||
) as portal:
|
||||
registry = await portal.run('self', 'get_registry')
|
||||
json_d = {}
|
||||
for uid, socket in registry.items():
|
||||
name, uuid = uid
|
||||
host, port = socket
|
||||
json_d[f'{name}.{uuid}'] = f'{host}:{port}'
|
||||
click.echo(
|
||||
f"Available `piker` services:\n{colorize_json(json_d)}"
|
||||
)
|
||||
|
||||
tractor.run(
|
||||
list_services,
|
||||
name='service_query',
|
||||
loglevel=config['loglevel'] if tl else None,
|
||||
arbiter_addr=_tractor_kwargs['arbiter_addr'],
|
||||
)
|
||||
|
||||
|
||||
def _load_clis() -> None:
|
||||
# from ..service import elastic # noqa
|
||||
from ..data import marketstore # noqa
|
||||
from ..data import cli # noqa
|
||||
from ..brokers import cli # noqa
|
||||
from ..ui import cli # noqa
|
||||
from ..watchlists import cli # noqa
|
||||
|
||||
# typer implemented
|
||||
from ..storage import cli # noqa
|
||||
from ..accounting import cli # noqa
|
||||
|
||||
|
||||
# load downstream cli modules
|
||||
_load_clis()
|
||||
|
|
|
|||
389
piker/config.py
389
piker/config.py
|
|
@ -1,389 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Platform configuration (files) mgmt.
|
||||
|
||||
"""
|
||||
import platform
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
from typing import (
|
||||
Callable,
|
||||
MutableMapping,
|
||||
)
|
||||
from pathlib import Path
|
||||
|
||||
from bidict import bidict
|
||||
import tomlkit
|
||||
try:
|
||||
import tomllib
|
||||
except ModuleNotFoundError:
|
||||
import tomli as tomllib
|
||||
|
||||
|
||||
from .log import get_logger
|
||||
|
||||
log = get_logger('broker-config')
|
||||
|
||||
|
||||
# XXX NOTE: taken from ``click`` since apparently they have some
|
||||
# super weirdness with sigint and sudo..no clue
|
||||
# we're probably going to slowly just modify it to our own version over
|
||||
# time..
|
||||
def get_app_dir(
|
||||
app_name: str,
|
||||
roaming: bool = True,
|
||||
force_posix: bool = False,
|
||||
|
||||
) -> str:
|
||||
r"""Returns the config folder for the application. The default behavior
|
||||
is to return whatever is most appropriate for the operating system.
|
||||
|
||||
To give you an idea, for an app called ``"Foo Bar"``, something like
|
||||
the following folders could be returned:
|
||||
|
||||
Mac OS X:
|
||||
``~/Library/Application Support/Foo Bar``
|
||||
Mac OS X (POSIX):
|
||||
``~/.foo-bar``
|
||||
Unix:
|
||||
``~/.config/foo-bar``
|
||||
Unix (POSIX):
|
||||
``~/.foo-bar``
|
||||
Win XP (roaming):
|
||||
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo``
|
||||
Win XP (not roaming):
|
||||
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
|
||||
Win 7 (roaming):
|
||||
``C:\Users\<user>\AppData\Roaming\Foo Bar``
|
||||
Win 7 (not roaming):
|
||||
``C:\Users\<user>\AppData\Local\Foo Bar``
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param app_name: the application name. This should be properly capitalized
|
||||
and can contain whitespace.
|
||||
:param roaming: controls if the folder should be roaming or not on Windows.
|
||||
Has no affect otherwise.
|
||||
:param force_posix: if this is set to `True` then on any POSIX system the
|
||||
folder will be stored in the home folder with a leading
|
||||
dot instead of the XDG config home or darwin's
|
||||
application support folder.
|
||||
"""
|
||||
|
||||
def _posixify(name):
|
||||
return "-".join(name.split()).lower()
|
||||
|
||||
# NOTE: for testing with `pytest` we leverage the `tmp_dir`
|
||||
# fixture to generate (and clean up) a test-request-specific
|
||||
# directory for isolated configuration files such that,
|
||||
# - multiple tests can run (possibly in parallel) without data races
|
||||
# on the config state,
|
||||
# - we don't need to ever worry about leaking configs into the
|
||||
# system thus avoiding needing to manage config cleaup fixtures or
|
||||
# other bothers (since obviously `tmp_dir` cleans up after itself).
|
||||
#
|
||||
# In order to "pass down" the test dir path to all (sub-)actors in
|
||||
# the actor tree we preload the root actor's runtime vars state (an
|
||||
# internal mechanism for inheriting state down an actor tree in
|
||||
# `tractor`) with the testing dir and check for it whenever we
|
||||
# detect `pytest` is being used (which it isn't under normal
|
||||
# operation).
|
||||
# if "pytest" in sys.modules:
|
||||
# import tractor
|
||||
# actor = tractor.current_actor(err_on_no_runtime=False)
|
||||
# if actor: # runtime is up
|
||||
# rvs = tractor._state._runtime_vars
|
||||
# import pdbp; pdbp.set_trace()
|
||||
# testdirpath = Path(rvs['piker_vars']['piker_test_dir'])
|
||||
# assert testdirpath.exists(), 'piker test harness might be borked!?'
|
||||
# app_name = str(testdirpath)
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
||||
folder = os.environ.get(key)
|
||||
if folder is None:
|
||||
folder = os.path.expanduser("~")
|
||||
return os.path.join(folder, app_name)
|
||||
if force_posix:
|
||||
return os.path.join(
|
||||
os.path.expanduser("~/.{}".format(_posixify(app_name))))
|
||||
if sys.platform == "darwin":
|
||||
return os.path.join(
|
||||
os.path.expanduser("~/Library/Application Support"), app_name
|
||||
)
|
||||
return os.path.join(
|
||||
os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
|
||||
_posixify(app_name),
|
||||
)
|
||||
|
||||
|
||||
_click_config_dir: Path = Path(get_app_dir('piker'))
|
||||
_config_dir: Path = _click_config_dir
|
||||
|
||||
# NOTE: when using `sudo` we attempt to determine the non-root user
|
||||
# and still use their normal config dir.
|
||||
if (
|
||||
(_parent_user := os.environ.get('SUDO_USER'))
|
||||
and
|
||||
_parent_user != 'root'
|
||||
):
|
||||
non_root_user_dir = Path(
|
||||
os.path.expanduser(f'~{_parent_user}')
|
||||
)
|
||||
root: str = 'root'
|
||||
_ccds: str = str(_click_config_dir) # click config dir as string
|
||||
i_tail: int = int(_ccds.rfind(root) + len(root))
|
||||
_config_dir = (
|
||||
non_root_user_dir
|
||||
/
|
||||
Path(_ccds[i_tail+1:]) # +1 to capture trailing '/'
|
||||
)
|
||||
|
||||
|
||||
_conf_names: set[str] = {
|
||||
'conf', # god config
|
||||
'brokers', # sec backend deatz
|
||||
'watchlists', # (user defined) market lists
|
||||
}
|
||||
|
||||
# TODO: probably drop all this super legacy, questrade specific,
|
||||
# config stuff XD ?
|
||||
_watchlists_data_path: Path = _config_dir / Path('watchlists.json')
|
||||
_context_defaults = dict(
|
||||
default_map={
|
||||
# Questrade specific quote poll rates
|
||||
'monitor': {
|
||||
'rate': 3,
|
||||
},
|
||||
'optschain': {
|
||||
'rate': 1,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class ConfigurationError(Exception):
|
||||
'Misconfigured settings, likely in a TOML file.'
|
||||
|
||||
|
||||
class NoSignature(ConfigurationError):
|
||||
'No credentials setup for broker backend!'
|
||||
|
||||
|
||||
def _override_config_dir(
|
||||
path: str
|
||||
) -> None:
|
||||
global _config_dir
|
||||
_config_dir = path
|
||||
|
||||
|
||||
def _conf_fn_w_ext(
|
||||
name: str,
|
||||
) -> str:
|
||||
# change this if we ever change the config file format.
|
||||
return f'{name}.toml'
|
||||
|
||||
|
||||
def get_conf_dir() -> Path:
|
||||
'''
|
||||
Return the user configuration directory ``Path``
|
||||
on the local filesystem.
|
||||
|
||||
'''
|
||||
return _config_dir
|
||||
|
||||
|
||||
def get_conf_path(
|
||||
conf_name: str = 'brokers',
|
||||
|
||||
) -> Path:
|
||||
'''
|
||||
Return the top-level default config path normally under
|
||||
``~/.config/piker`` on linux for a given ``conf_name``, the config
|
||||
name.
|
||||
|
||||
Contains files such as:
|
||||
- brokers.toml
|
||||
- watchlists.toml
|
||||
|
||||
# maybe coming soon ;)
|
||||
- signals.toml
|
||||
- strats.toml
|
||||
|
||||
'''
|
||||
if 'account.' not in conf_name:
|
||||
assert str(conf_name) in _conf_names
|
||||
|
||||
fn = _conf_fn_w_ext(conf_name)
|
||||
return _config_dir / Path(fn)
|
||||
|
||||
|
||||
def repodir() -> Path:
|
||||
'''
|
||||
Return the abspath as ``Path`` to the git repo's root dir.
|
||||
|
||||
'''
|
||||
repodir: Path = Path(__file__).absolute().parent.parent
|
||||
confdir: Path = repodir / 'config'
|
||||
|
||||
if not confdir.is_dir():
|
||||
# prolly inside stupid GH actions CI..
|
||||
repodir: Path = Path(os.environ.get('GITHUB_WORKSPACE'))
|
||||
confdir: Path = repodir / 'config'
|
||||
|
||||
assert confdir.is_dir(), f'{confdir} DNE, {repodir} is likely incorrect!'
|
||||
return repodir
|
||||
|
||||
|
||||
def load(
|
||||
# NOTE: always appended with .toml suffix
|
||||
conf_name: str = 'conf',
|
||||
path: Path | None = None,
|
||||
|
||||
decode: Callable[
|
||||
[str | bytes,],
|
||||
MutableMapping,
|
||||
] = tomllib.loads,
|
||||
|
||||
touch_if_dne: bool = False,
|
||||
|
||||
**tomlkws,
|
||||
|
||||
) -> tuple[dict, Path]:
|
||||
'''
|
||||
Load config file by name.
|
||||
|
||||
If desired config is not in the top level piker-user config path then
|
||||
pass the ``path: Path`` explicitly.
|
||||
|
||||
'''
|
||||
# create the $HOME/.config/piker dir if dne
|
||||
if not _config_dir.is_dir():
|
||||
_config_dir.mkdir(
|
||||
parents=True,
|
||||
exist_ok=True,
|
||||
)
|
||||
|
||||
path_provided: bool = path is not None
|
||||
path: Path = path or get_conf_path(conf_name)
|
||||
|
||||
if (
|
||||
not path.is_file()
|
||||
and touch_if_dne
|
||||
):
|
||||
# only do a template if no path provided,
|
||||
# just touch an empty file with same name.
|
||||
if path_provided:
|
||||
with path.open(mode='x'):
|
||||
pass
|
||||
|
||||
# try to copy in a template config to the user's dir if one
|
||||
# exists.
|
||||
else:
|
||||
fn: str = _conf_fn_w_ext(conf_name)
|
||||
template: Path = repodir() / 'config' / fn
|
||||
if template.is_file():
|
||||
shutil.copyfile(template, path)
|
||||
|
||||
elif fn and template:
|
||||
assert template.is_file(), f'{template} is not a file!?'
|
||||
|
||||
assert path.is_file(), f'Config file {path} not created!?'
|
||||
|
||||
with path.open(mode='r') as fp:
|
||||
config: dict = decode(
|
||||
fp.read(),
|
||||
**tomlkws,
|
||||
)
|
||||
|
||||
log.debug(f"Read config file {path}")
|
||||
return config, path
|
||||
|
||||
|
||||
def write(
|
||||
config: dict, # toml config as dict
|
||||
|
||||
name: str | None = None,
|
||||
path: Path | None = None,
|
||||
fail_empty: bool = True,
|
||||
|
||||
**toml_kwargs,
|
||||
|
||||
) -> None:
|
||||
''''
|
||||
Write broker config to disk.
|
||||
|
||||
Create a ``brokers.ini`` file if one does not exist.
|
||||
|
||||
'''
|
||||
if name:
|
||||
path: Path = path or get_conf_path(name)
|
||||
dirname: Path = path.parent
|
||||
if not dirname.is_dir():
|
||||
log.debug(f"Creating config dir {_config_dir}")
|
||||
dirname.mkdir()
|
||||
|
||||
if (
|
||||
not config
|
||||
and fail_empty
|
||||
):
|
||||
raise ValueError(
|
||||
"Watch out you're trying to write a blank config!"
|
||||
)
|
||||
|
||||
log.debug(
|
||||
f"Writing config `{name}` file to:\n"
|
||||
f"{path}"
|
||||
)
|
||||
with path.open(mode='w') as fp:
|
||||
return tomlkit.dump( # preserve style on write B)
|
||||
config,
|
||||
fp,
|
||||
**toml_kwargs,
|
||||
)
|
||||
|
||||
|
||||
def load_accounts(
|
||||
providers: list[str] | None = None
|
||||
|
||||
) -> bidict[str, str | None]:
|
||||
|
||||
conf, path = load(
|
||||
conf_name='brokers',
|
||||
)
|
||||
accounts = bidict()
|
||||
for provider_name, section in conf.items():
|
||||
accounts_section = section.get('accounts')
|
||||
if (
|
||||
providers is None or
|
||||
providers and provider_name in providers
|
||||
):
|
||||
if accounts_section is None:
|
||||
log.warning(f'No accounts named for {provider_name}?')
|
||||
continue
|
||||
else:
|
||||
for label, value in accounts_section.items():
|
||||
accounts[
|
||||
f'{provider_name}.{label}'
|
||||
] = value
|
||||
|
||||
# our default paper engine entry
|
||||
accounts['paper'] = None
|
||||
|
||||
return accounts
|
||||
|
|
@ -22,7 +22,7 @@ and storing data from your brokers as well as
|
|||
sharing live streams over a network.
|
||||
|
||||
"""
|
||||
from .ticktools import iterticks
|
||||
from ._normalize import iterticks
|
||||
from ._sharedmem import (
|
||||
maybe_open_shm_array,
|
||||
attach_shm_array,
|
||||
|
|
@ -30,42 +30,19 @@ from ._sharedmem import (
|
|||
get_shm_token,
|
||||
ShmArray,
|
||||
)
|
||||
from ._source import (
|
||||
def_iohlcv_fields,
|
||||
def_ohlcv_fields,
|
||||
)
|
||||
from .feed import (
|
||||
Feed,
|
||||
open_feed,
|
||||
_setup_persistent_brokerd,
|
||||
)
|
||||
from .flows import Flume
|
||||
from ._symcache import (
|
||||
SymbologyCache,
|
||||
open_symcache,
|
||||
get_symcache,
|
||||
match_from_pairs,
|
||||
)
|
||||
from ._sampling import open_sample_stream
|
||||
from ..types import Struct
|
||||
|
||||
|
||||
__all__: list[str] = [
|
||||
'Flume',
|
||||
'Feed',
|
||||
__all__ = [
|
||||
'open_feed',
|
||||
'ShmArray',
|
||||
'iterticks',
|
||||
'maybe_open_shm_array',
|
||||
'match_from_pairs',
|
||||
'attach_shm_array',
|
||||
'open_shm_array',
|
||||
'get_shm_token',
|
||||
'def_iohlcv_fields',
|
||||
'def_ohlcv_fields',
|
||||
'open_symcache',
|
||||
'open_sample_stream',
|
||||
'get_symcache',
|
||||
'Struct',
|
||||
'SymbologyCache',
|
||||
'types',
|
||||
'_setup_persistent_brokerd',
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,838 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
Pre-(path)-graphics formatted x/y nd/1d rendering subsystem.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import msgspec
|
||||
from msgspec import field
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
|
||||
from ._sharedmem import (
|
||||
ShmArray,
|
||||
)
|
||||
from ._pathops import (
|
||||
path_arrays_from_ohlc,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._dataviz import (
|
||||
Viz,
|
||||
)
|
||||
from piker.toolz import Profiler
|
||||
|
||||
# default gap between bars: "bar gap multiplier"
|
||||
# - 0.5 is no overlap between OC arms,
|
||||
# - 1.0 is full overlap on each neighbor sample
|
||||
BGM: float = 0.16
|
||||
|
||||
|
||||
class IncrementalFormatter(msgspec.Struct):
|
||||
'''
|
||||
Incrementally updating, pre-path-graphics tracking, formatter.
|
||||
|
||||
Allows tracking source data state in an updateable pre-graphics
|
||||
``np.ndarray`` format (in local process memory) as well as
|
||||
incrementally rendering from that format **to** 1d x/y for path
|
||||
generation using ``pg.functions.arrayToQPath()``.
|
||||
|
||||
'''
|
||||
shm: ShmArray
|
||||
viz: Viz
|
||||
|
||||
# the value to be multiplied any any index into the x/y_1d arrays
|
||||
# given the input index is based on the original source data array.
|
||||
flat_index_ratio: float = 1
|
||||
|
||||
@property
|
||||
def index_field(self) -> 'str':
|
||||
'''
|
||||
Value (``str``) used to look up the "index series" from the
|
||||
underlying source ``numpy`` struct-array; delegate directly to
|
||||
the managing ``Viz``.
|
||||
|
||||
'''
|
||||
return self.viz.index_field
|
||||
|
||||
# Incrementally updated xy ndarray formatted data, a pre-1d
|
||||
# format which is updated and cached independently of the final
|
||||
# pre-graphics-path 1d format.
|
||||
x_nd: Optional[np.ndarray] = None
|
||||
y_nd: Optional[np.ndarray] = None
|
||||
|
||||
@property
|
||||
def xy_nd(self) -> tuple[np.ndarray, np.ndarray]:
|
||||
return (
|
||||
self.x_nd[self.xy_slice],
|
||||
self.y_nd[self.xy_slice],
|
||||
)
|
||||
|
||||
@property
|
||||
def xy_slice(self) -> slice:
|
||||
return slice(
|
||||
self.xy_nd_start,
|
||||
self.xy_nd_stop,
|
||||
)
|
||||
|
||||
# indexes which slice into the above arrays (which are allocated
|
||||
# based on source data shm input size) and allow retrieving
|
||||
# incrementally updated data.
|
||||
xy_nd_start: int | None = None
|
||||
xy_nd_stop: int | None = None
|
||||
|
||||
# TODO: eventually incrementally update 1d-pre-graphics path data?
|
||||
x_1d: np.ndarray | None = None
|
||||
y_1d: np.ndarray | None = None
|
||||
|
||||
# incremental view-change state(s) tracking
|
||||
_last_vr: tuple[float, float] | None = None
|
||||
_last_ivdr: tuple[float, float] | None = None
|
||||
|
||||
@property
|
||||
def index_step_size(self) -> float:
|
||||
'''
|
||||
Readonly value computed on first ``.diff()`` call.
|
||||
|
||||
'''
|
||||
return self.viz.index_step()
|
||||
|
||||
def diff(
|
||||
self,
|
||||
new_read: tuple[np.ndarray],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
# TODO:
|
||||
# - can the renderer just call ``Viz.read()`` directly? unpack
|
||||
# latest source data read
|
||||
# - eventually maybe we can implement some kind of
|
||||
# transform on the ``QPainterPath`` that will more or less
|
||||
# detect the diff in "elements" terms? update diff state since
|
||||
# we've now rendered paths.
|
||||
(
|
||||
xfirst,
|
||||
xlast,
|
||||
array,
|
||||
ivl,
|
||||
ivr,
|
||||
in_view,
|
||||
) = new_read
|
||||
|
||||
index = array['index']
|
||||
|
||||
# if the first index in the read array is 0 then
|
||||
# it means the source buffer has bee completely backfilled to
|
||||
# available space.
|
||||
src_start = index[0]
|
||||
src_stop = index[-1] + 1
|
||||
|
||||
# these are the "formatted output data" indices
|
||||
# for the pre-graphics arrays.
|
||||
nd_start = self.xy_nd_start
|
||||
nd_stop = self.xy_nd_stop
|
||||
|
||||
if (
|
||||
nd_start is None
|
||||
):
|
||||
assert nd_stop is None
|
||||
|
||||
# setup to do a prepend of all existing src history
|
||||
nd_start = self.xy_nd_start = src_stop
|
||||
# set us in a zero-to-append state
|
||||
nd_stop = self.xy_nd_stop = src_stop
|
||||
|
||||
# compute the length diffs between the first/last index entry in
|
||||
# the input data and the last indexes we have on record from the
|
||||
# last time we updated the curve index.
|
||||
prepend_length = int(nd_start - src_start)
|
||||
append_length = int(src_stop - nd_stop)
|
||||
|
||||
# blah blah blah
|
||||
# do diffing for prepend, append and last entry
|
||||
return (
|
||||
slice(src_start, nd_start),
|
||||
prepend_length,
|
||||
append_length,
|
||||
slice(nd_stop, src_stop),
|
||||
)
|
||||
|
||||
def _track_inview_range(
|
||||
self,
|
||||
view_range: tuple[int, int],
|
||||
|
||||
) -> bool:
|
||||
# if a view range is passed, plan to draw the
|
||||
# source ouput that's "in view" of the chart.
|
||||
vl, vr = view_range
|
||||
zoom_or_append = False
|
||||
last_vr = self._last_vr
|
||||
|
||||
# incremental in-view data update.
|
||||
if last_vr:
|
||||
lvl, lvr = last_vr # relative slice indices
|
||||
|
||||
# TODO: detecting more specifically the interaction changes
|
||||
# last_ivr = self._last_ivdr or (vl, vr)
|
||||
# al, ar = last_ivr # abs slice indices
|
||||
# left_change = abs(x_iv[0] - al) >= 1
|
||||
# right_change = abs(x_iv[-1] - ar) >= 1
|
||||
|
||||
# likely a zoom/pan view change or data append update
|
||||
if (
|
||||
(vr - lvr) > 2
|
||||
or vl < lvl
|
||||
|
||||
# append / prepend update
|
||||
# we had an append update where the view range
|
||||
# didn't change but the data-viewed (shifted)
|
||||
# underneath, so we need to redraw.
|
||||
# or left_change and right_change and last_vr == view_range
|
||||
|
||||
# not (left_change and right_change) and ivr
|
||||
# (
|
||||
# or abs(x_iv[ivr] - livr) > 1
|
||||
):
|
||||
zoom_or_append = True
|
||||
|
||||
self._last_vr = view_range
|
||||
|
||||
return zoom_or_append
|
||||
|
||||
def format_to_1d(
|
||||
self,
|
||||
new_read: tuple,
|
||||
array_key: str,
|
||||
profiler: Profiler,
|
||||
|
||||
slice_to_inview: bool = True,
|
||||
force_full_realloc: bool = False,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
shm = self.shm
|
||||
|
||||
(
|
||||
_,
|
||||
_,
|
||||
array,
|
||||
ivl,
|
||||
ivr,
|
||||
in_view,
|
||||
|
||||
) = new_read
|
||||
|
||||
(
|
||||
pre_slice,
|
||||
prepend_len,
|
||||
append_len,
|
||||
post_slice,
|
||||
) = self.diff(new_read)
|
||||
|
||||
# we first need to allocate xy data arrays
|
||||
# from the source data.
|
||||
if (
|
||||
self.y_nd is None
|
||||
or force_full_realloc
|
||||
):
|
||||
self.xy_nd_start = shm._first.value
|
||||
self.xy_nd_stop = shm._last.value
|
||||
self.x_nd, self.y_nd = self.allocate_xy_nd(
|
||||
shm,
|
||||
array_key,
|
||||
)
|
||||
profiler('allocated xy history')
|
||||
|
||||
# once allocated we do incremental pre/append
|
||||
# updates from the diff with the source buffer.
|
||||
else:
|
||||
if prepend_len:
|
||||
|
||||
self.incr_update_xy_nd(
|
||||
shm,
|
||||
array_key,
|
||||
|
||||
# this is the pre-sliced, "normally expected"
|
||||
# new data that an updater would normally be
|
||||
# expected to process, however in some cases (like
|
||||
# step curves) the updater routine may want to do
|
||||
# the source history-data reading itself, so we pass
|
||||
# both here.
|
||||
shm._array[pre_slice],
|
||||
pre_slice,
|
||||
prepend_len,
|
||||
|
||||
self.xy_nd_start,
|
||||
self.xy_nd_stop,
|
||||
is_append=False,
|
||||
)
|
||||
|
||||
self.xy_nd_start -= prepend_len
|
||||
profiler('prepended xy history: {prepend_length}')
|
||||
|
||||
if append_len:
|
||||
self.incr_update_xy_nd(
|
||||
shm,
|
||||
array_key,
|
||||
|
||||
shm._array[post_slice],
|
||||
post_slice,
|
||||
append_len,
|
||||
|
||||
self.xy_nd_start,
|
||||
self.xy_nd_stop,
|
||||
is_append=True,
|
||||
)
|
||||
self.xy_nd_stop += append_len
|
||||
profiler('appened xy history: {append_length}')
|
||||
# sanity
|
||||
# slice_ln = post_slice.stop - post_slice.start
|
||||
# assert append_len == slice_ln
|
||||
|
||||
view_changed: bool = False
|
||||
view_range: tuple[int, int] = (ivl, ivr)
|
||||
if slice_to_inview:
|
||||
view_changed = self._track_inview_range(view_range)
|
||||
array = in_view
|
||||
profiler(f'{self.viz.name} view range slice {view_range}')
|
||||
|
||||
# TODO: we need to check if the last-datum-in-view is true and
|
||||
# if so only slice to the 2nd last datumonly slice to the 2nd
|
||||
# last datum.
|
||||
# hist = array[:slice_to_head]
|
||||
|
||||
# XXX: WOA WTF TRACTOR DEBUGGING BUGGG
|
||||
# assert 0
|
||||
|
||||
# xy-path data transform: convert source data to a format
|
||||
# able to be passed to a `QPainterPath` rendering routine.
|
||||
if not len(array):
|
||||
# XXX: this might be why the profiler only has exits?
|
||||
return
|
||||
|
||||
# TODO: hist here should be the pre-sliced
|
||||
# x/y_data in the case where allocate_xy is
|
||||
# defined?
|
||||
x_1d, y_1d, connect = self.format_xy_nd_to_1d(
|
||||
array,
|
||||
array_key,
|
||||
view_range,
|
||||
)
|
||||
# cache/save last 1d outputs for use by other
|
||||
# readers (eg. `Viz.draw_last_datum()` in the
|
||||
# only-draw-last-uppx case).
|
||||
self.x_1d = x_1d
|
||||
self.y_1d = y_1d
|
||||
|
||||
# app_tres = None
|
||||
# if append_len:
|
||||
# appended = array[-append_len-1:slice_to_head]
|
||||
# app_tres = self.format_xy_nd_to_1d(
|
||||
# appended,
|
||||
# array_key,
|
||||
# (
|
||||
# view_range[1] - append_len + slice_to_head,
|
||||
# view_range[1]
|
||||
# ),
|
||||
# )
|
||||
# # assert (len(appended) - 1) == append_len
|
||||
# # assert len(appended) == append_len
|
||||
# print(
|
||||
# f'{self.viz.name} APPEND LEN: {append_len}\n'
|
||||
# f'{self.viz.name} APPENDED: {appended}\n'
|
||||
# f'{self.viz.name} app_tres: {app_tres}\n'
|
||||
# )
|
||||
|
||||
# update the last "in view data range"
|
||||
if len(x_1d):
|
||||
self._last_ivdr = x_1d[0], x_1d[-1]
|
||||
|
||||
profiler('.format_to_1d()')
|
||||
|
||||
return (
|
||||
x_1d,
|
||||
y_1d,
|
||||
connect,
|
||||
prepend_len,
|
||||
append_len,
|
||||
view_changed,
|
||||
# app_tres,
|
||||
)
|
||||
|
||||
###############################
|
||||
# Sub-type override interface #
|
||||
###############################
|
||||
|
||||
x_offset: np.ndarray = np.array([0])
|
||||
|
||||
# optional pre-graphics xy formatted data which
|
||||
# is incrementally updated in sync with the source data.
|
||||
# XXX: was ``.allocate_xy()``
|
||||
def allocate_xy_nd(
|
||||
self,
|
||||
src_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # x
|
||||
np.nd.array # y
|
||||
]:
|
||||
'''
|
||||
Convert the structured-array ``src_shm`` format to
|
||||
a equivalently shaped (and field-less) ``np.ndarray``.
|
||||
|
||||
Eg. a 4 field x N struct-array => (N, 4)
|
||||
|
||||
'''
|
||||
y_nd = src_shm._array[data_field].copy()
|
||||
x_nd = (
|
||||
src_shm._array[self.index_field].copy()
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
return x_nd, y_nd
|
||||
|
||||
# XXX: was ``.update_xy()``
|
||||
def incr_update_xy_nd(
|
||||
self,
|
||||
|
||||
src_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
new_from_src: np.ndarray, # portion of source that was updated
|
||||
|
||||
read_slc: slice,
|
||||
ln: int, # len of updated
|
||||
|
||||
nd_start: int,
|
||||
nd_stop: int,
|
||||
|
||||
is_append: bool,
|
||||
|
||||
) -> None:
|
||||
# write pushed data to flattened copy
|
||||
y_nd_new = new_from_src[data_field]
|
||||
self.y_nd[read_slc] = y_nd_new
|
||||
|
||||
x_nd_new = self.x_nd[read_slc]
|
||||
x_nd_new[:] = (
|
||||
new_from_src[self.index_field]
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
|
||||
# x_nd = self.x_nd[self.xy_slice]
|
||||
# y_nd = self.y_nd[self.xy_slice]
|
||||
# name = self.viz.name
|
||||
# if 'trade_rate' == name:
|
||||
# s = 4
|
||||
# print(
|
||||
# f'{name.upper()}:\n'
|
||||
# 'NEW_FROM_SRC:\n'
|
||||
# f'new_from_src: {new_from_src}\n\n'
|
||||
|
||||
# f'PRE self.x_nd:'
|
||||
# f'\n{list(x_nd[-s:])}\n'
|
||||
|
||||
# f'PRE self.y_nd:\n'
|
||||
# f'{list(y_nd[-s:])}\n\n'
|
||||
|
||||
# f'TO WRITE:\n'
|
||||
|
||||
# f'x_nd_new:\n'
|
||||
# f'{x_nd_new[0]}\n'
|
||||
|
||||
# f'y_nd_new:\n'
|
||||
# f'{y_nd_new}\n'
|
||||
# )
|
||||
|
||||
# XXX: was ``.format_xy()``
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # 1d x
|
||||
np.ndarray, # 1d y
|
||||
np.ndarray | str, # connection array/style
|
||||
]:
|
||||
'''
|
||||
Default xy-nd array to 1d pre-graphics-path render routine.
|
||||
|
||||
Return single field column data verbatim
|
||||
|
||||
'''
|
||||
# NOTE: we don't include the very last datum which is filled in
|
||||
# normally by another graphics object.
|
||||
x_1d = array[self.index_field][:-1]
|
||||
y_1d = array[array_key][:-1]
|
||||
|
||||
# name = self.viz.name
|
||||
# if 'trade_rate' == name:
|
||||
# s = 4
|
||||
# x_nd = list(self.x_nd[self.xy_slice][-s:-1])
|
||||
# y_nd = list(self.y_nd[self.xy_slice][-s:-1])
|
||||
# print(
|
||||
# f'{name}:\n'
|
||||
# f'XY data:\n'
|
||||
# f'x: {x_nd}\n'
|
||||
# f'y: {y_nd}\n\n'
|
||||
# f'x_1d: {list(x_1d[-s:])}\n'
|
||||
# f'y_1d: {list(y_1d[-s:])}\n\n'
|
||||
|
||||
# )
|
||||
return (
|
||||
x_1d,
|
||||
y_1d,
|
||||
|
||||
# 1d connection array or style-key to
|
||||
# ``pg.functions.arrayToQPath()``
|
||||
'all',
|
||||
)
|
||||
|
||||
|
||||
class OHLCBarsFmtr(IncrementalFormatter):
|
||||
|
||||
x_offset: np.ndarray = np.array([
|
||||
-0.5,
|
||||
0,
|
||||
0,
|
||||
0.5,
|
||||
])
|
||||
|
||||
fields: list[str] = field(
|
||||
default_factory=lambda: ['open', 'high', 'low', 'close']
|
||||
)
|
||||
flat_index_ratio: float = 4
|
||||
|
||||
def allocate_xy_nd(
|
||||
self,
|
||||
|
||||
ohlc_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # x
|
||||
np.nd.array # y
|
||||
]:
|
||||
'''
|
||||
Convert an input struct-array holding OHLC samples into a pair of
|
||||
flattened x, y arrays with the same size (datums wise) as the source
|
||||
data.
|
||||
|
||||
'''
|
||||
y_nd = ohlc_shm.ustruct(self.fields)
|
||||
|
||||
# generate an flat-interpolated x-domain
|
||||
x_nd = (
|
||||
np.broadcast_to(
|
||||
ohlc_shm._array[self.index_field][:, None],
|
||||
(
|
||||
ohlc_shm._array.size,
|
||||
# 4, # only ohlc
|
||||
y_nd.shape[1],
|
||||
),
|
||||
)
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
assert y_nd.any()
|
||||
|
||||
# write pushed data to flattened copy
|
||||
return (
|
||||
x_nd,
|
||||
y_nd,
|
||||
)
|
||||
|
||||
def incr_update_xy_nd(
|
||||
self,
|
||||
|
||||
src_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
new_from_src: np.ndarray, # portion of source that was updated
|
||||
|
||||
read_slc: slice,
|
||||
ln: int, # len of updated
|
||||
|
||||
nd_start: int,
|
||||
nd_stop: int,
|
||||
|
||||
is_append: bool,
|
||||
|
||||
) -> None:
|
||||
# write newly pushed data to flattened copy
|
||||
# a struct-arr is always passed in.
|
||||
new_y_nd = rfn.structured_to_unstructured(
|
||||
new_from_src[self.fields]
|
||||
)
|
||||
self.y_nd[read_slc] = new_y_nd
|
||||
|
||||
# generate same-valued-per-row x support based on y shape
|
||||
x_nd_new = self.x_nd[read_slc]
|
||||
x_nd_new[:] = np.broadcast_to(
|
||||
new_from_src[self.index_field][:, None],
|
||||
new_y_nd.shape,
|
||||
) + self.x_offset
|
||||
|
||||
# TODO: can we drop this frame and just use the above?
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
start: int = 0, # XXX: do we need this?
|
||||
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
gap: float = BGM,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
'''
|
||||
More or less direct proxy to the ``numba``-fied
|
||||
``path_arrays_from_ohlc()`` (above) but with closed in kwargs
|
||||
for line spacing.
|
||||
|
||||
'''
|
||||
x, y, c = path_arrays_from_ohlc(
|
||||
array[:-1],
|
||||
start,
|
||||
bar_w=self.index_step_size,
|
||||
bar_gap=gap * self.index_step_size,
|
||||
|
||||
# XXX: don't ask, due to a ``numba`` bug..
|
||||
use_time_index=(self.index_field == 'time'),
|
||||
)
|
||||
return x, y, c
|
||||
|
||||
|
||||
class OHLCBarsAsCurveFmtr(OHLCBarsFmtr):
|
||||
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
str,
|
||||
]:
|
||||
# TODO: in the case of an existing ``.update_xy()``
|
||||
# should we be passing in array as an xy arrays tuple?
|
||||
|
||||
# 2 more datum-indexes to capture zero at end
|
||||
x_flat = self.x_nd[self.xy_nd_start:self.xy_nd_stop-1]
|
||||
y_flat = self.y_nd[self.xy_nd_start:self.xy_nd_stop-1]
|
||||
|
||||
# slice to view
|
||||
ivl, ivr = vr
|
||||
x_iv_flat = x_flat[ivl:ivr]
|
||||
y_iv_flat = y_flat[ivl:ivr]
|
||||
|
||||
# reshape to 1d for graphics rendering
|
||||
y_iv = y_iv_flat.reshape(-1)
|
||||
x_iv = x_iv_flat.reshape(-1)
|
||||
|
||||
return x_iv, y_iv, 'all'
|
||||
|
||||
|
||||
class StepCurveFmtr(IncrementalFormatter):
|
||||
|
||||
x_offset: np.ndarray = np.array([
|
||||
0,
|
||||
1,
|
||||
])
|
||||
|
||||
def allocate_xy_nd(
|
||||
self,
|
||||
|
||||
shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # x
|
||||
np.nd.array # y
|
||||
]:
|
||||
'''
|
||||
Convert an input 1d shm array to a "step array" format
|
||||
for use by path graphics generation.
|
||||
|
||||
'''
|
||||
i = shm._array[self.index_field].copy()
|
||||
out = shm._array[data_field].copy()
|
||||
|
||||
x_out = (
|
||||
np.broadcast_to(
|
||||
i[:, None],
|
||||
(i.size, 2),
|
||||
)
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
|
||||
# fill out Nx2 array to hold each step's left + right vertices.
|
||||
y_out = np.empty(
|
||||
x_out.shape,
|
||||
dtype=out.dtype,
|
||||
)
|
||||
# fill in (current) values from source shm buffer
|
||||
y_out[:] = out[:, np.newaxis]
|
||||
|
||||
# TODO: pretty sure we can drop this?
|
||||
# start y at origin level
|
||||
# y_out[0, 0] = 0
|
||||
# y_out[self.xy_nd_start] = 0
|
||||
return x_out, y_out
|
||||
|
||||
def incr_update_xy_nd(
|
||||
self,
|
||||
|
||||
src_shm: ShmArray,
|
||||
array_key: str,
|
||||
|
||||
new_from_src: np.ndarray, # portion of source that was updated
|
||||
read_slc: slice,
|
||||
ln: int, # len of updated
|
||||
|
||||
nd_start: int,
|
||||
nd_stop: int,
|
||||
|
||||
is_append: bool,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
slice,
|
||||
]:
|
||||
# NOTE: for a step curve we slice from one datum prior
|
||||
# to the current "update slice" to get the previous
|
||||
# "level".
|
||||
#
|
||||
# why this is needed,
|
||||
# - the current new append slice will often have a zero
|
||||
# value in the latest datum-step (at least for zero-on-new
|
||||
# cases like vlm in the) as per configuration of the FSP
|
||||
# engine.
|
||||
# - we need to look back a datum to get the last level which
|
||||
# will be used to terminate/complete the last step x-width
|
||||
# which will be set to pair with the last x-index THIS MEANS
|
||||
#
|
||||
# XXX: this means WE CAN'T USE the append slice since we need to
|
||||
# "look backward" one step to get the needed back-to-zero level
|
||||
# and the update data in ``new_from_src`` will only contain the
|
||||
# latest new data.
|
||||
back_1 = slice(
|
||||
read_slc.start - 1,
|
||||
read_slc.stop,
|
||||
)
|
||||
|
||||
to_write = src_shm._array[back_1]
|
||||
y_nd_new = self.y_nd[back_1]
|
||||
y_nd_new[:] = to_write[array_key][:, None]
|
||||
|
||||
x_nd_new = self.x_nd[read_slc]
|
||||
x_nd_new[:] = (
|
||||
new_from_src[self.index_field][:, None]
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
|
||||
# XXX: uncomment for debugging
|
||||
# x_nd = self.x_nd[self.xy_slice]
|
||||
# y_nd = self.y_nd[self.xy_slice]
|
||||
# name = self.viz.name
|
||||
# if 'dolla_vlm' in name:
|
||||
# s = 4
|
||||
# print(
|
||||
# f'{name}:\n'
|
||||
# 'NEW_FROM_SRC:\n'
|
||||
# f'new_from_src: {new_from_src}\n\n'
|
||||
|
||||
# f'PRE self.x_nd:'
|
||||
# f'\n{x_nd[-s:]}\n'
|
||||
# f'PRE self.y_nd:\n'
|
||||
# f'{y_nd[-s:]}\n\n'
|
||||
|
||||
# f'TO WRITE:\n'
|
||||
# f'x_nd_new:\n'
|
||||
# f'{x_nd_new}\n'
|
||||
# f'y_nd_new:\n'
|
||||
# f'{y_nd_new}\n'
|
||||
# )
|
||||
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
str,
|
||||
]:
|
||||
last_t, last = array[-1][[self.index_field, array_key]]
|
||||
|
||||
start = self.xy_nd_start
|
||||
stop = self.xy_nd_stop
|
||||
|
||||
x_step = self.x_nd[start:stop]
|
||||
y_step = self.y_nd[start:stop]
|
||||
|
||||
# slice out in-view data
|
||||
ivl, ivr = vr
|
||||
|
||||
# NOTE: add an extra step to get the vertical-line-down-to-zero
|
||||
# adjacent to the last-datum graphic (filled rect).
|
||||
x_step_iv = x_step[ivl:ivr+1]
|
||||
y_step_iv = y_step[ivl:ivr+1]
|
||||
|
||||
# flatten to 1d
|
||||
x_1d = x_step_iv.reshape(x_step_iv.size)
|
||||
y_1d = y_step_iv.reshape(y_step_iv.size)
|
||||
|
||||
# debugging
|
||||
# if y_1d.any():
|
||||
# s = 6
|
||||
# print(
|
||||
# f'x_step_iv:\n{x_step_iv[-s:]}\n'
|
||||
# f'y_step_iv:\n{y_step_iv[-s:]}\n\n'
|
||||
# f'x_1d:\n{x_1d[-s:]}\n'
|
||||
# f'y_1d:\n{y_1d[-s:]}\n'
|
||||
# )
|
||||
|
||||
return x_1d, y_1d, 'all'
|
||||
|
|
@ -1,247 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Graphics downsampling using the infamous M4 algorithm.
|
||||
|
||||
This is one of ``piker``'s secret weapons allowing us to boss all other
|
||||
charting platforms B)
|
||||
|
||||
(AND DON'T YOU DARE TAKE THIS CODE WITHOUT CREDIT OR WE'LL SUE UR F#&@* ASS).
|
||||
|
||||
NOTES: this method is a so called "visualization driven data
|
||||
aggregation" approach. It gives error-free line chart
|
||||
downsampling, see
|
||||
further scientific paper resources:
|
||||
- http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
- http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||
|
||||
Details on implementation of this algo are based in,
|
||||
https://github.com/pikers/piker/issues/109
|
||||
|
||||
'''
|
||||
import math
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
from numba import (
|
||||
njit,
|
||||
# float64, optional, int64,
|
||||
)
|
||||
|
||||
from ._util import log
|
||||
|
||||
|
||||
def ds_m4(
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
# units-per-pixel-x(dimension)
|
||||
uppx: float,
|
||||
|
||||
# XXX: troll zone / easter egg..
|
||||
# want to mess with ur pal, pass in the actual
|
||||
# pixel width here instead of uppx-proper (i.e. pass
|
||||
# in our ``pg.GraphicsObject`` derivative's ``.px_width()``
|
||||
# gto mega-trip-out ur bud). Hint, it used to be implemented
|
||||
# (wrongly) using "pixel width", so check the git history ;)
|
||||
|
||||
xrange: Optional[float] = None,
|
||||
|
||||
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Downsample using the M4 algorithm.
|
||||
|
||||
This is more or less an OHLC style sampling of a line-style series.
|
||||
|
||||
'''
|
||||
# XXX: from infinite on downsampling viewable graphics:
|
||||
# "one thing i remembered about the binning - if you are
|
||||
# picking a range within your timeseries the start and end bin
|
||||
# should be one more bin size outside the visual range, then
|
||||
# you get better visual fidelity at the edges of the graph"
|
||||
# "i didn't show it in the sample code, but it's accounted for
|
||||
# in the start and end indices and number of bins"
|
||||
|
||||
# should never get called unless actually needed
|
||||
assert uppx > 1
|
||||
|
||||
# NOTE: if we didn't pre-slice the data to downsample
|
||||
# you could in theory pass these as the slicing params,
|
||||
# do we care though since we can always just pre-slice the
|
||||
# input?
|
||||
x_start = x[0] # x value start/lowest in domain
|
||||
|
||||
if xrange is None:
|
||||
x_end = x[-1] # x end value/highest in domain
|
||||
xrange = (x_end - x_start)
|
||||
|
||||
if xrange < 0:
|
||||
log.error(f'-VE M4 X-RANGE: {x_start} -> {x_end}')
|
||||
# XXX: broken x-range calc-case, likely the x-end points
|
||||
# are wrong and have some default value set (such as
|
||||
# x_end -> <some epoch float> while x_start -> 0.5).
|
||||
# breakpoint()
|
||||
return None
|
||||
|
||||
# XXX: always round up on the input pixels
|
||||
# lnx = len(x)
|
||||
# uppx *= max(4 / (1 + math.log(uppx, 2)), 1)
|
||||
|
||||
pxw = math.ceil(xrange / uppx)
|
||||
|
||||
# scale up the frame "width" directly with uppx
|
||||
w = uppx
|
||||
|
||||
# ensure we make more then enough
|
||||
# frames (windows) for the output pixel
|
||||
frames = pxw
|
||||
|
||||
# if we have more and then exact integer's
|
||||
# (uniform quotient output) worth of datum-domain-points
|
||||
# per windows-frame, add one more window to ensure
|
||||
# we have room for all output down-samples.
|
||||
pts_per_pixel, r = divmod(xrange, frames)
|
||||
if r:
|
||||
# while r:
|
||||
frames += 1
|
||||
pts_per_pixel, r = divmod(xrange, frames)
|
||||
|
||||
# print(
|
||||
# f'uppx: {uppx}\n'
|
||||
# f'xrange: {xrange}\n'
|
||||
# f'pxw: {pxw}\n'
|
||||
# f'frames: {frames}\n'
|
||||
# )
|
||||
assert frames >= (xrange / uppx)
|
||||
|
||||
# call into ``numba``
|
||||
(
|
||||
nb,
|
||||
x_out,
|
||||
y_out,
|
||||
ymn,
|
||||
ymx,
|
||||
) = _m4(
|
||||
x,
|
||||
y,
|
||||
|
||||
frames,
|
||||
|
||||
# TODO: see func below..
|
||||
# x_out,
|
||||
# y_out,
|
||||
|
||||
# first index in x data to start at
|
||||
x_start,
|
||||
# window size for each "frame" of data to downsample (normally
|
||||
# scaled by the ratio of pixels on screen to data in x-range).
|
||||
w,
|
||||
)
|
||||
|
||||
# filter out any overshoot in the input allocation arrays by
|
||||
# removing zero-ed tail entries which should start at a certain
|
||||
# index.
|
||||
x_out = x_out[x_out != 0]
|
||||
y_out = y_out[:x_out.size]
|
||||
|
||||
# print(f'M4 output ymn, ymx: {ymn},{ymx}')
|
||||
return nb, x_out, y_out, ymn, ymx
|
||||
|
||||
|
||||
@njit(
|
||||
nogil=True,
|
||||
)
|
||||
def _m4(
|
||||
|
||||
xs: np.ndarray,
|
||||
ys: np.ndarray,
|
||||
|
||||
frames: int,
|
||||
|
||||
# TODO: using this approach, having the ``.zeros()`` alloc lines
|
||||
# below in pure python, there were segs faults and alloc crashes..
|
||||
# we might need to see how it behaves with shm arrays and consider
|
||||
# allocating them once at startup?
|
||||
|
||||
# pre-alloc array of x indices mapping to the start
|
||||
# of each window used for downsampling in y.
|
||||
# i_win: np.ndarray,
|
||||
# pre-alloc array of output downsampled y values
|
||||
# y_out: np.ndarray,
|
||||
|
||||
x_start: int,
|
||||
step: float,
|
||||
|
||||
) -> tuple[
|
||||
int,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
float,
|
||||
float,
|
||||
]:
|
||||
'''
|
||||
Implementation of the m4 algorithm in ``numba``:
|
||||
http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
|
||||
'''
|
||||
# these are pre-allocated and mutated by ``numba``
|
||||
# code in-place.
|
||||
y_out = np.zeros((frames, 4), ys.dtype)
|
||||
x_out = np.zeros(frames, xs.dtype)
|
||||
|
||||
bincount = 0
|
||||
x_left = x_start
|
||||
|
||||
# Find the first window's starting value which *includes* the
|
||||
# first value in the x-domain array, i.e. the first
|
||||
# "left-side-of-window" **plus** the downsampling step,
|
||||
# creates a window which includes the first x **value**.
|
||||
while xs[0] >= x_left + step:
|
||||
x_left += step
|
||||
|
||||
# set all bins in the left-most entry to the starting left-most x value
|
||||
# (aka a row broadcast).
|
||||
x_out[bincount] = x_left
|
||||
# set all y-values to the first value passed in.
|
||||
y_out[bincount] = ys[0]
|
||||
|
||||
# full input y-data mx and mn
|
||||
mx: float = -np.inf
|
||||
mn: float = np.inf
|
||||
|
||||
# compute OHLC style max / min values per window sized x-frame.
|
||||
for i in range(len(xs)):
|
||||
|
||||
x = xs[i]
|
||||
y = ys[i]
|
||||
|
||||
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||
ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||
ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||
y_out[bincount, 3] = y
|
||||
mx = max(mx, ymx)
|
||||
mn = min(mn, ymn)
|
||||
|
||||
else:
|
||||
# Find the next bin
|
||||
while x >= x_left + step:
|
||||
x_left += step
|
||||
|
||||
bincount += 1
|
||||
x_out[bincount] = x_left
|
||||
y_out[bincount] = y
|
||||
|
||||
return bincount, x_out, y_out, mn, mx
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
@ -14,16 +14,25 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
IB api client data feed reset hack for i3.
|
||||
"""
|
||||
Stream format enforcement.
|
||||
"""
|
||||
|
||||
Delegates to ``i3ipc`` python lib to detect the correct local
|
||||
window to click-activate and ``xdotool`` to send the mouse
|
||||
events to said window.
|
||||
from typing import AsyncIterator, Optional, Tuple
|
||||
|
||||
'''
|
||||
from piker.brokers.ib._util import i3ipc_xdotool_manual_click_hack
|
||||
import numpy as np
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
i3ipc_xdotool_manual_click_hack()
|
||||
def iterticks(
|
||||
quote: dict,
|
||||
types: Tuple[str] = ('trade', 'utrade'),
|
||||
) -> AsyncIterator:
|
||||
"""Iterate through ticks delivered per quote cycle.
|
||||
"""
|
||||
# print(f"{quote}\n\n")
|
||||
ticks = quote.get('ticks', ())
|
||||
if ticks:
|
||||
for tick in ticks:
|
||||
# print(f"{quote['symbol']}: {tick}")
|
||||
if tick.get('type') in types:
|
||||
yield tick
|
||||
|
|
@ -1,281 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Super fast ``QPainterPath`` generation related operator routines.
|
||||
|
||||
"""
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import (
|
||||
# types,
|
||||
njit,
|
||||
float64,
|
||||
int64,
|
||||
# optional,
|
||||
)
|
||||
|
||||
# TODO: for ``numba`` typing..
|
||||
# from ._source import numba_ohlc_dtype
|
||||
from ._m4 import ds_m4
|
||||
|
||||
|
||||
def xy_downsample(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
|
||||
x_spacer: float = 0.5,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
float,
|
||||
float,
|
||||
]:
|
||||
'''
|
||||
Downsample 1D (flat ``numpy.ndarray``) arrays using M4 given an input
|
||||
``uppx`` (units-per-pixel) and add space between discreet datums.
|
||||
|
||||
'''
|
||||
# downsample whenever more then 1 pixels per datum can be shown.
|
||||
# always refresh data bounds until we get diffing
|
||||
# working properly, see above..
|
||||
m4_out = ds_m4(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
)
|
||||
|
||||
if m4_out is not None:
|
||||
bins, x, y, ymn, ymx = m4_out
|
||||
# flatten output to 1d arrays suitable for path-graphics generation.
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
x = (x + np.array(
|
||||
[-x_spacer, 0, 0, x_spacer]
|
||||
)).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
return x, y, ymn, ymx
|
||||
|
||||
# XXX: we accept a None output for the case where the input range
|
||||
# to ``ds_m4()`` is bad (-ve) and we want to catch and debug
|
||||
# that (seemingly super rare) circumstance..
|
||||
return None
|
||||
|
||||
|
||||
@njit(
|
||||
# NOTE: need to construct this manually for readonly
|
||||
# arrays, see https://github.com/numba/numba/issues/4511
|
||||
# (
|
||||
# types.Array(
|
||||
# numba_ohlc_dtype,
|
||||
# 1,
|
||||
# 'C',
|
||||
# readonly=True,
|
||||
# ),
|
||||
# int64,
|
||||
# types.unicode_type,
|
||||
# optional(float64),
|
||||
# ),
|
||||
nogil=True
|
||||
)
|
||||
def path_arrays_from_ohlc(
|
||||
data: np.ndarray,
|
||||
start: int64,
|
||||
bar_w: float64,
|
||||
bar_gap: float64 = 0.16,
|
||||
use_time_index: bool = True,
|
||||
|
||||
# XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622
|
||||
# index_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
'''
|
||||
Generate an array of lines objects from input ohlc data.
|
||||
|
||||
'''
|
||||
size = int(data.shape[0] * 6)
|
||||
|
||||
# XXX: see this for why the dtype might have to be defined outside
|
||||
# the routine.
|
||||
# https://github.com/numba/numba/issues/4098#issuecomment-493914533
|
||||
x = np.zeros(
|
||||
shape=size,
|
||||
dtype=float64,
|
||||
)
|
||||
y, c = x.copy(), x.copy()
|
||||
|
||||
half_w: float = bar_w/2
|
||||
|
||||
# TODO: report bug for assert @
|
||||
# ../piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||
for i, q in enumerate(data[start:], start):
|
||||
|
||||
open = q['open']
|
||||
high = q['high']
|
||||
low = q['low']
|
||||
close = q['close']
|
||||
|
||||
if use_time_index:
|
||||
index = float64(q['time'])
|
||||
else:
|
||||
index = float64(q['index'])
|
||||
|
||||
# XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622
|
||||
# index = float64(q[index_field])
|
||||
# AND this (probably)
|
||||
# open, high, low, close, index = q[
|
||||
# ['open', 'high', 'low', 'close', 'index']]
|
||||
|
||||
istart = i * 6
|
||||
istop = istart + 6
|
||||
|
||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||
mid: float = index + half_w
|
||||
x[istart:istop] = (
|
||||
index + bar_gap,
|
||||
mid,
|
||||
mid,
|
||||
mid,
|
||||
mid,
|
||||
index + bar_w - bar_gap,
|
||||
)
|
||||
y[istart:istop] = (
|
||||
open,
|
||||
open,
|
||||
low,
|
||||
high,
|
||||
close,
|
||||
close,
|
||||
)
|
||||
|
||||
# specifies that the first edge is never connected to the
|
||||
# prior bars last edge thus providing a small "gap"/"space"
|
||||
# between bars determined by ``bar_gap``.
|
||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||
|
||||
return x, y, c
|
||||
|
||||
|
||||
def hl2mxmn(
|
||||
ohlc: np.ndarray,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||
to a "joined" max/min 1-d array.
|
||||
|
||||
'''
|
||||
index = ohlc[index_field]
|
||||
hls = ohlc[[
|
||||
'low',
|
||||
'high',
|
||||
]]
|
||||
|
||||
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||
x = np.empty(2*hls.size, dtype=np.float64)
|
||||
trace_hl(hls, mxmn, x, index[0])
|
||||
x = x + index[0]
|
||||
|
||||
return mxmn, x
|
||||
|
||||
|
||||
@njit(
|
||||
# TODO: the type annots..
|
||||
# float64[:](float64[:],),
|
||||
)
|
||||
def trace_hl(
|
||||
hl: 'np.ndarray',
|
||||
out: np.ndarray,
|
||||
x: np.ndarray,
|
||||
start: int,
|
||||
|
||||
# the "offset" values in the x-domain which
|
||||
# place the 2 output points around each ``int``
|
||||
# master index.
|
||||
margin: float = 0.43,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
"Trace" the outline of the high-low values of an ohlc sequence
|
||||
as a line such that the maximum deviation (aka disperaion) between
|
||||
bars if preserved.
|
||||
|
||||
This routine is expected to modify input arrays in-place.
|
||||
|
||||
'''
|
||||
last_l = hl['low'][0]
|
||||
last_h = hl['high'][0]
|
||||
|
||||
for i in range(hl.size):
|
||||
row = hl[i]
|
||||
lo, hi = row['low'], row['high']
|
||||
|
||||
up_diff = hi - last_l
|
||||
down_diff = last_h - lo
|
||||
|
||||
if up_diff > down_diff:
|
||||
out[2*i + 1] = hi
|
||||
out[2*i] = last_l
|
||||
else:
|
||||
out[2*i + 1] = lo
|
||||
out[2*i] = last_h
|
||||
|
||||
last_l = lo
|
||||
last_h = hi
|
||||
|
||||
x[2*i] = int(i) - margin
|
||||
x[2*i + 1] = int(i) + margin
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def ohlc_flatten(
|
||||
ohlc: np.ndarray,
|
||||
use_mxmn: bool = True,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||
1-d array that is 4 times the size with x-domain values distributed
|
||||
evenly (by 0.5 steps) over each index.
|
||||
|
||||
'''
|
||||
index = ohlc[index_field]
|
||||
|
||||
if use_mxmn:
|
||||
# traces a line optimally over highs to lows
|
||||
# using numba. NOTE: pretty sure this is faster
|
||||
# and looks about the same as the below output.
|
||||
flat, x = hl2mxmn(ohlc)
|
||||
|
||||
else:
|
||||
flat = rfn.structured_to_unstructured(
|
||||
ohlc[['open', 'high', 'low', 'close']]
|
||||
).flatten()
|
||||
|
||||
x = np.linspace(
|
||||
start=index[0] - 0.5,
|
||||
stop=index[-1] + 0.5,
|
||||
num=len(flat),
|
||||
)
|
||||
return x, flat
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
@ -15,57 +15,48 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
NumPy compatible shared memory buffers for real-time IPC streaming.
|
||||
NumPy compatible shared memory buffers for real-time FSP.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass, asdict
|
||||
from sys import byteorder
|
||||
import time
|
||||
from typing import Optional
|
||||
from typing import List, Tuple, Optional
|
||||
from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
||||
from multiprocessing import resource_tracker as mantracker
|
||||
|
||||
if _USE_POSIX:
|
||||
from _posixshmem import shm_unlink
|
||||
|
||||
# import msgspec
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
import tractor
|
||||
import numpy as np
|
||||
|
||||
from ._util import log
|
||||
from ._source import def_iohlcv_fields
|
||||
from piker.types import Struct
|
||||
from ..log import get_logger
|
||||
from ._source import base_ohlc_dtype, base_iohlc_dtype
|
||||
|
||||
|
||||
def cuckoff_mantracker():
|
||||
'''
|
||||
Disable all ``multiprocessing``` "resource tracking" machinery since
|
||||
it's an absolute multi-threaded mess of non-SC madness.
|
||||
|
||||
'''
|
||||
from multiprocessing import resource_tracker as mantracker
|
||||
|
||||
# Tell the "resource tracker" thing to fuck off.
|
||||
class ManTracker(mantracker.ResourceTracker):
|
||||
def register(self, name, rtype):
|
||||
pass
|
||||
|
||||
def unregister(self, name, rtype):
|
||||
pass
|
||||
|
||||
def ensure_running(self):
|
||||
pass
|
||||
|
||||
# "know your land and know your prey"
|
||||
# https://www.dailymotion.com/video/x6ozzco
|
||||
mantracker._resource_tracker = ManTracker()
|
||||
mantracker.register = mantracker._resource_tracker.register
|
||||
mantracker.ensure_running = mantracker._resource_tracker.ensure_running
|
||||
mantracker.unregister = mantracker._resource_tracker.unregister
|
||||
mantracker.getfd = mantracker._resource_tracker.getfd
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
cuckoff_mantracker()
|
||||
# Tell the "resource tracker" thing to fuck off.
|
||||
class ManTracker(mantracker.ResourceTracker):
|
||||
def register(self, name, rtype):
|
||||
pass
|
||||
|
||||
def unregister(self, name, rtype):
|
||||
pass
|
||||
|
||||
def ensure_running(self):
|
||||
pass
|
||||
|
||||
|
||||
# "know your land and know your prey"
|
||||
# https://www.dailymotion.com/video/x6ozzco
|
||||
mantracker._resource_tracker = ManTracker()
|
||||
mantracker.register = mantracker._resource_tracker.register
|
||||
mantracker.ensure_running = mantracker._resource_tracker.ensure_running
|
||||
ensure_running = mantracker._resource_tracker.ensure_running
|
||||
mantracker.unregister = mantracker._resource_tracker.unregister
|
||||
mantracker.getfd = mantracker._resource_tracker.getfd
|
||||
|
||||
|
||||
class SharedInt:
|
||||
|
|
@ -91,46 +82,30 @@ class SharedInt:
|
|||
if _USE_POSIX:
|
||||
# We manually unlink to bypass all the "resource tracker"
|
||||
# nonsense meant for non-SC systems.
|
||||
name = self._shm.name
|
||||
try:
|
||||
shm_unlink(name)
|
||||
except FileNotFoundError:
|
||||
# might be a teardown race here?
|
||||
log.warning(f'Shm for {name} already unlinked?')
|
||||
shm_unlink(self._shm.name)
|
||||
|
||||
|
||||
class _Token(Struct, frozen=True):
|
||||
'''
|
||||
Internal represenation of a shared memory "token"
|
||||
@dataclass
|
||||
class _Token:
|
||||
"""Internal represenation of a shared memory "token"
|
||||
which can be used to key a system wide post shm entry.
|
||||
|
||||
'''
|
||||
"""
|
||||
shm_name: str # this servers as a "key" value
|
||||
shm_first_index_name: str
|
||||
shm_last_index_name: str
|
||||
dtype_descr: tuple
|
||||
size: int # in struct-array index / row terms
|
||||
dtype_descr: List[Tuple[str]]
|
||||
|
||||
@property
|
||||
def dtype(self) -> np.dtype:
|
||||
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
||||
def __post_init__(self):
|
||||
# np.array requires a list for dtype
|
||||
self.dtype_descr = np.dtype(list(map(tuple, self.dtype_descr))).descr
|
||||
|
||||
def as_msg(self):
|
||||
return self.to_dict()
|
||||
return asdict(self)
|
||||
|
||||
@classmethod
|
||||
def from_msg(cls, msg: dict) -> _Token:
|
||||
if isinstance(msg, _Token):
|
||||
return msg
|
||||
def from_msg(self, msg: dict) -> '_Token':
|
||||
return msg if isinstance(msg, _Token) else _Token(**msg)
|
||||
|
||||
# TODO: native struct decoding
|
||||
# return _token_dec.decode(msg)
|
||||
|
||||
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
||||
return _Token(**msg)
|
||||
|
||||
|
||||
# _token_dec = msgspec.msgpack.Decoder(_Token)
|
||||
|
||||
# TODO: this api?
|
||||
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
||||
|
|
@ -150,27 +125,22 @@ def get_shm_token(key: str) -> _Token:
|
|||
|
||||
def _make_token(
|
||||
key: str,
|
||||
size: int,
|
||||
dtype: Optional[np.dtype] = None,
|
||||
) -> _Token:
|
||||
'''
|
||||
Create a serializable token that can be used
|
||||
"""Create a serializable token that can be used
|
||||
to access a shared array.
|
||||
|
||||
'''
|
||||
dtype = def_iohlcv_fields if dtype is None else dtype
|
||||
"""
|
||||
dtype = base_iohlc_dtype if dtype is None else dtype
|
||||
return _Token(
|
||||
shm_name=key,
|
||||
shm_first_index_name=key + "_first",
|
||||
shm_last_index_name=key + "_last",
|
||||
dtype_descr=tuple(np.dtype(dtype).descr),
|
||||
size=size,
|
||||
key,
|
||||
key + "_first",
|
||||
key + "_last",
|
||||
np.dtype(dtype).descr
|
||||
)
|
||||
|
||||
|
||||
class ShmArray:
|
||||
'''
|
||||
A shared memory ``numpy`` (compatible) array API.
|
||||
"""A shared memory ``numpy`` (compatible) array API.
|
||||
|
||||
An underlying shared memory buffer is allocated based on
|
||||
a user specified ``numpy.ndarray``. This fixed size array
|
||||
|
|
@ -180,7 +150,7 @@ class ShmArray:
|
|||
``SharedInt`` interfaces) values such that multiple processes can
|
||||
interact with the same array using a synchronized-index.
|
||||
|
||||
'''
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
shmarr: np.ndarray,
|
||||
|
|
@ -198,25 +168,19 @@ class ShmArray:
|
|||
|
||||
self._len = len(shmarr)
|
||||
self._shm = shm
|
||||
self._post_init: bool = False
|
||||
|
||||
# pushing data does not write the index (aka primary key)
|
||||
dtype = shmarr.dtype
|
||||
if dtype.fields:
|
||||
self._write_fields = list(shmarr.dtype.fields.keys())[1:]
|
||||
else:
|
||||
self._write_fields = None
|
||||
self._write_fields = list(shmarr.dtype.fields.keys())[1:]
|
||||
|
||||
# TODO: ringbuf api?
|
||||
|
||||
@property
|
||||
def _token(self) -> _Token:
|
||||
return _Token(
|
||||
shm_name=self._shm.name,
|
||||
shm_first_index_name=self._first._shm.name,
|
||||
shm_last_index_name=self._last._shm.name,
|
||||
dtype_descr=tuple(self._array.dtype.descr),
|
||||
size=self._len,
|
||||
self._shm.name,
|
||||
self._first._shm.name,
|
||||
self._last._shm.name,
|
||||
self._array.dtype.descr,
|
||||
)
|
||||
|
||||
@property
|
||||
|
|
@ -232,154 +196,44 @@ class ShmArray:
|
|||
|
||||
@property
|
||||
def array(self) -> np.ndarray:
|
||||
'''
|
||||
Return an up-to-date ``np.ndarray`` view of the
|
||||
so-far-written data to the underlying shm buffer.
|
||||
|
||||
'''
|
||||
a = self._array[self._first.value:self._last.value]
|
||||
|
||||
# first, last = self._first.value, self._last.value
|
||||
# a = self._array[first:last]
|
||||
|
||||
# TODO: eventually comment this once we've not seen it in the
|
||||
# wild in a long time..
|
||||
# XXX: race where first/last indexes cause a reader
|
||||
# to load an empty array..
|
||||
if len(a) == 0 and self._post_init:
|
||||
raise RuntimeError('Empty array race condition hit!?')
|
||||
|
||||
return a
|
||||
|
||||
def ustruct(
|
||||
self,
|
||||
fields: Optional[list[str]] = None,
|
||||
|
||||
# type that all field values will be cast to
|
||||
# in the returned view.
|
||||
common_dtype: np.dtype = float,
|
||||
|
||||
) -> np.ndarray:
|
||||
|
||||
array = self._array
|
||||
|
||||
if fields:
|
||||
selection = array[fields]
|
||||
# fcount = len(fields)
|
||||
else:
|
||||
selection = array
|
||||
# fcount = len(array.dtype.fields)
|
||||
|
||||
# XXX: manual ``.view()`` attempt that also doesn't work.
|
||||
# uview = selection.view(
|
||||
# dtype='<f16',
|
||||
# ).reshape(-1, 4, order='A')
|
||||
|
||||
# assert len(selection) == len(uview)
|
||||
|
||||
u = rfn.structured_to_unstructured(
|
||||
selection,
|
||||
# dtype=float,
|
||||
copy=True,
|
||||
)
|
||||
|
||||
# unstruct = np.ndarray(u.shape, dtype=a.dtype, buffer=shm.buf)
|
||||
# array[:] = a[:]
|
||||
return u
|
||||
# return ShmArray(
|
||||
# shmarr=u,
|
||||
# first=self._first,
|
||||
# last=self._last,
|
||||
# shm=self._shm
|
||||
# )
|
||||
return self._array[self._first.value:self._last.value]
|
||||
|
||||
def last(
|
||||
self,
|
||||
length: int = 1,
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Return the last ``length``'s worth of ("row") entries from the
|
||||
array.
|
||||
|
||||
'''
|
||||
return self.array[-length:]
|
||||
|
||||
def push(
|
||||
self,
|
||||
data: np.ndarray,
|
||||
|
||||
field_map: Optional[dict[str, str]] = None,
|
||||
prepend: bool = False,
|
||||
update_first: bool = True,
|
||||
start: int | None = None,
|
||||
|
||||
) -> int:
|
||||
'''
|
||||
Ring buffer like "push" to append data
|
||||
"""Ring buffer like "push" to append data
|
||||
into the buffer and return updated "last" index.
|
||||
|
||||
NB: no actual ring logic yet to give a "loop around" on overflow
|
||||
condition, lel.
|
||||
|
||||
'''
|
||||
"""
|
||||
length = len(data)
|
||||
|
||||
if prepend:
|
||||
index = (start or self._first.value) - length
|
||||
|
||||
if index < 0:
|
||||
raise ValueError(
|
||||
f'Array size of {self._len} was overrun during prepend.\n'
|
||||
f'You have passed {abs(index)} too many datums.'
|
||||
)
|
||||
|
||||
index = self._first.value - length
|
||||
else:
|
||||
index = start if start is not None else self._last.value
|
||||
index = self._last.value
|
||||
|
||||
end = index + length
|
||||
|
||||
if field_map:
|
||||
src_names, dst_names = zip(*field_map.items())
|
||||
else:
|
||||
dst_names = src_names = self._write_fields
|
||||
fields = self._write_fields
|
||||
|
||||
try:
|
||||
self._array[
|
||||
list(dst_names)
|
||||
][index:end] = data[list(src_names)][:]
|
||||
|
||||
# NOTE: there was a race here between updating
|
||||
# the first and last indices and when the next reader
|
||||
# tries to access ``.array`` (which due to the index
|
||||
# overlap will be empty). Pretty sure we've fixed it now
|
||||
# but leaving this here as a reminder.
|
||||
if (
|
||||
prepend
|
||||
and update_first
|
||||
and length
|
||||
):
|
||||
assert index < self._first.value
|
||||
|
||||
if (
|
||||
index < self._first.value
|
||||
and update_first
|
||||
):
|
||||
assert prepend, 'prepend=True not passed but index decreased?'
|
||||
self._array[fields][index:end] = data[fields][:]
|
||||
if prepend:
|
||||
self._first.value = index
|
||||
|
||||
elif not prepend:
|
||||
else:
|
||||
self._last.value = end
|
||||
|
||||
self._post_init = True
|
||||
return end
|
||||
|
||||
except ValueError as err:
|
||||
if field_map:
|
||||
raise
|
||||
|
||||
# should raise if diff detected
|
||||
# shoudl raise if diff detected
|
||||
self.diff_err_fields(data)
|
||||
|
||||
raise err
|
||||
|
||||
def diff_err_fields(
|
||||
|
|
@ -404,7 +258,6 @@ class ShmArray:
|
|||
f"Input array has unknown field(s): {only_in_theirs}"
|
||||
)
|
||||
|
||||
# TODO: support "silent" prepends that don't update ._first.value?
|
||||
def prepend(
|
||||
self,
|
||||
data: np.ndarray,
|
||||
|
|
@ -431,20 +284,21 @@ class ShmArray:
|
|||
...
|
||||
|
||||
|
||||
def open_shm_array(
|
||||
size: int,
|
||||
key: str | None = None,
|
||||
dtype: np.dtype | None = None,
|
||||
append_start_index: int | None = None,
|
||||
readonly: bool = False,
|
||||
# how much is probably dependent on lifestyle
|
||||
_secs_in_day = int(60 * 60 * 12)
|
||||
_default_size = 2 * _secs_in_day
|
||||
|
||||
def open_shm_array(
|
||||
key: Optional[str] = None,
|
||||
size: int = _default_size,
|
||||
dtype: Optional[np.dtype] = None,
|
||||
readonly: bool = False,
|
||||
) -> ShmArray:
|
||||
'''Open a memory shared ``numpy`` using the standard library.
|
||||
"""Open a memory shared ``numpy`` using the standard library.
|
||||
|
||||
This call unlinks (aka permanently destroys) the buffer on teardown
|
||||
and thus should be used from the parent-most accessor (process).
|
||||
|
||||
'''
|
||||
"""
|
||||
# create new shared mem segment for which we
|
||||
# have write permission
|
||||
a = np.zeros(size, dtype=dtype)
|
||||
|
|
@ -455,18 +309,13 @@ def open_shm_array(
|
|||
create=True,
|
||||
size=a.nbytes
|
||||
)
|
||||
array = np.ndarray(
|
||||
a.shape,
|
||||
dtype=a.dtype,
|
||||
buffer=shm.buf
|
||||
)
|
||||
array = np.ndarray(a.shape, dtype=a.dtype, buffer=shm.buf)
|
||||
array[:] = a[:]
|
||||
array.setflags(write=int(not readonly))
|
||||
|
||||
token = _make_token(
|
||||
key=key,
|
||||
size=size,
|
||||
dtype=dtype,
|
||||
dtype=dtype
|
||||
)
|
||||
|
||||
# create single entry arrays for storing an first and last indices
|
||||
|
|
@ -486,27 +335,7 @@ def open_shm_array(
|
|||
)
|
||||
)
|
||||
|
||||
# start the "real-time" updated section after 3-days worth of 1s
|
||||
# sampled OHLC. this allows appending up to a days worth from
|
||||
# tick/quote feeds before having to flush to a (tsdb) storage
|
||||
# backend, and looks something like,
|
||||
# -------------------------
|
||||
# | | i
|
||||
# _________________________
|
||||
# <-------------> <------->
|
||||
# history real-time
|
||||
#
|
||||
# Once fully "prepended", the history section will leave the
|
||||
# ``ShmArray._start.value: int = 0`` and the yet-to-be written
|
||||
# real-time section will start at ``ShmArray.index: int``.
|
||||
|
||||
# this sets the index to nearly 2/3rds into the the length of
|
||||
# the buffer leaving at least a "days worth of second samples"
|
||||
# for the real-time section.
|
||||
if append_start_index is None:
|
||||
append_start_index = round(size * 0.616)
|
||||
|
||||
last.value = first.value = append_start_index
|
||||
last.value = first.value = int(_secs_in_day)
|
||||
|
||||
shmarr = ShmArray(
|
||||
array,
|
||||
|
|
@ -520,55 +349,34 @@ def open_shm_array(
|
|||
|
||||
# "unlink" created shm on process teardown by
|
||||
# pushing teardown calls onto actor context stack
|
||||
stack = tractor.current_actor().lifetime_stack
|
||||
stack.callback(shmarr.close)
|
||||
stack.callback(shmarr.destroy)
|
||||
tractor._actor._lifetime_stack.callback(shmarr.close)
|
||||
tractor._actor._lifetime_stack.callback(shmarr.destroy)
|
||||
|
||||
return shmarr
|
||||
|
||||
|
||||
def attach_shm_array(
|
||||
token: tuple[str, str, tuple[str, str]],
|
||||
token: Tuple[str, str, Tuple[str, str]],
|
||||
size: int = _default_size,
|
||||
readonly: bool = True,
|
||||
|
||||
) -> ShmArray:
|
||||
'''
|
||||
Attach to an existing shared memory array previously
|
||||
"""Attach to an existing shared memory array previously
|
||||
created by another process using ``open_shared_array``.
|
||||
|
||||
No new shared mem is allocated but wrapper types for read/write
|
||||
access are constructed.
|
||||
|
||||
'''
|
||||
"""
|
||||
token = _Token.from_msg(token)
|
||||
key = token.shm_name
|
||||
|
||||
if key in _known_tokens:
|
||||
assert _Token.from_msg(_known_tokens[key]) == token, "WTF"
|
||||
|
||||
# XXX: ugh, looks like due to the ``shm_open()`` C api we can't
|
||||
# actually place files in a subdir, see discussion here:
|
||||
# https://stackoverflow.com/a/11103289
|
||||
|
||||
# attach to array buffer and view as per dtype
|
||||
_err: Optional[Exception] = None
|
||||
for _ in range(3):
|
||||
try:
|
||||
shm = SharedMemory(
|
||||
name=key,
|
||||
create=False,
|
||||
)
|
||||
break
|
||||
except OSError as oserr:
|
||||
_err = oserr
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
if _err:
|
||||
raise _err
|
||||
|
||||
shm = SharedMemory(name=key)
|
||||
shmarr = np.ndarray(
|
||||
(token.size,),
|
||||
dtype=token.dtype,
|
||||
(size,),
|
||||
dtype=token.dtype_descr,
|
||||
buffer=shm.buf
|
||||
)
|
||||
shmarr.setflags(write=int(not readonly))
|
||||
|
|
@ -606,23 +414,18 @@ def attach_shm_array(
|
|||
if key not in _known_tokens:
|
||||
_known_tokens[key] = token
|
||||
|
||||
# "close" attached shm on actor teardown
|
||||
tractor.current_actor().lifetime_stack.callback(sha.close)
|
||||
# "close" attached shm on process teardown
|
||||
tractor._actor._lifetime_stack.callback(sha.close)
|
||||
|
||||
return sha
|
||||
|
||||
|
||||
def maybe_open_shm_array(
|
||||
key: str,
|
||||
size: int,
|
||||
dtype: np.dtype | None = None,
|
||||
append_start_index: int | None = None,
|
||||
readonly: bool = False,
|
||||
dtype: Optional[np.dtype] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> tuple[ShmArray, bool]:
|
||||
'''
|
||||
Attempt to attach to a shared memory block using a "key" lookup
|
||||
) -> Tuple[ShmArray, bool]:
|
||||
"""Attempt to attach to a shared memory block using a "key" lookup
|
||||
to registered blocks in the users overall "system" registry
|
||||
(presumes you don't have the block's explicit token).
|
||||
|
||||
|
|
@ -636,74 +439,23 @@ def maybe_open_shm_array(
|
|||
|
||||
If you know the explicit ``_Token`` for your memory segment instead
|
||||
use ``attach_shm_array``.
|
||||
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
# see if we already know this key
|
||||
token = _known_tokens[key]
|
||||
return (
|
||||
attach_shm_array(
|
||||
token=token,
|
||||
readonly=readonly,
|
||||
),
|
||||
False,
|
||||
)
|
||||
return attach_shm_array(token=token, **kwargs), False
|
||||
except KeyError:
|
||||
log.debug(f"Could not find {key} in shms cache")
|
||||
log.warning(f"Could not find {key} in shms cache")
|
||||
if dtype:
|
||||
token = _make_token(
|
||||
key,
|
||||
size=size,
|
||||
dtype=dtype,
|
||||
)
|
||||
token = _make_token(key, dtype)
|
||||
try:
|
||||
return attach_shm_array(token=token, **kwargs), False
|
||||
except FileNotFoundError:
|
||||
log.debug(f"Could not attach to shm with token {token}")
|
||||
log.warning(f"Could not attach to shm with token {token}")
|
||||
|
||||
# This actor does not know about memory
|
||||
# associated with the provided "key".
|
||||
# Attempt to open a block and expect
|
||||
# to fail if a block has been allocated
|
||||
# on the OS by someone else.
|
||||
return (
|
||||
open_shm_array(
|
||||
key=key,
|
||||
size=size,
|
||||
dtype=dtype,
|
||||
append_start_index=append_start_index,
|
||||
readonly=readonly,
|
||||
),
|
||||
True,
|
||||
)
|
||||
|
||||
def try_read(
|
||||
array: np.ndarray
|
||||
|
||||
) -> Optional[np.ndarray]:
|
||||
'''
|
||||
Try to read the last row from a shared mem array or ``None``
|
||||
if the array read returns a zero-length array result.
|
||||
|
||||
Can be used to check for backfilling race conditions where an array
|
||||
is currently being (re-)written by a writer actor but the reader is
|
||||
unaware and reads during the window where the first and last indexes
|
||||
are being updated.
|
||||
|
||||
'''
|
||||
try:
|
||||
return array[-1]
|
||||
except IndexError:
|
||||
# XXX: race condition with backfilling shm.
|
||||
#
|
||||
# the underlying issue is that a backfill (aka prepend) and subsequent
|
||||
# shm array first/last index update could result in an empty array
|
||||
# read here since the indices may be updated in such a way that
|
||||
# a read delivers an empty array (though it seems like we
|
||||
# *should* be able to prevent that?). also, as and alt and
|
||||
# something we need anyway, maybe there should be some kind of
|
||||
# signal that a prepend is taking place and this consumer can
|
||||
# respond (eg. redrawing graphics) accordingly.
|
||||
|
||||
# the array read was emtpy
|
||||
return None
|
||||
return open_shm_array(key=key, dtype=dtype, **kwargs), True
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
@ -17,60 +17,52 @@
|
|||
"""
|
||||
numpy data source coversion helpers.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import Dict, Any, List
|
||||
import decimal
|
||||
|
||||
from bidict import bidict
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pydantic import BaseModel
|
||||
# from numba import from_dtype
|
||||
|
||||
|
||||
def_iohlcv_fields: list[tuple[str, type]] = [
|
||||
|
||||
# YES WE KNOW, this isn't needed in polars but we use it for doing
|
||||
# ring-buffer like pre/append ops our our `ShmArray` real-time
|
||||
# numpy-array buffering system such that there is a master index
|
||||
# that can be used for index-arithmetic when write data to the
|
||||
# "middle" of the array. See the ``tractor.ipc.shm`` pkg for more
|
||||
# details.
|
||||
('index', int),
|
||||
|
||||
# presume int for epoch stamps since it's most common
|
||||
# and makes the most sense to avoid float rounding issues.
|
||||
# TODO: if we want higher reso we should use the new
|
||||
# ``time.time_ns()`` in python 3.10+
|
||||
('time', int),
|
||||
ohlc_fields = [
|
||||
('time', float),
|
||||
('open', float),
|
||||
('high', float),
|
||||
('low', float),
|
||||
('close', float),
|
||||
('volume', float),
|
||||
|
||||
# TODO: can we elim this from default field set to save on mem?
|
||||
# i think only kraken really uses this in terms of what we get from
|
||||
# their ohlc history API?
|
||||
# ('bar_wap', float), # shouldn't be default right?
|
||||
('volume', int),
|
||||
('bar_wap', float),
|
||||
]
|
||||
|
||||
# remove index field
|
||||
def_ohlcv_fields: list[tuple[str, type]] = def_iohlcv_fields.copy()
|
||||
def_ohlcv_fields.pop(0)
|
||||
assert (len(def_iohlcv_fields) - len(def_ohlcv_fields)) == 1
|
||||
ohlc_with_index = ohlc_fields.copy()
|
||||
ohlc_with_index.insert(0, ('index', int))
|
||||
|
||||
# our minimum structured array layout for ohlc data
|
||||
base_iohlc_dtype = np.dtype(ohlc_with_index)
|
||||
base_ohlc_dtype = np.dtype(ohlc_fields)
|
||||
|
||||
# TODO: for now need to construct this manually for readonly arrays, see
|
||||
# https://github.com/numba/numba/issues/4511
|
||||
# from numba import from_dtype
|
||||
# base_ohlc_dtype = np.dtype(def_ohlc_fields)
|
||||
# numba_ohlc_dtype = from_dtype(base_ohlc_dtype)
|
||||
|
||||
# map time frame "keys" to seconds values
|
||||
tf_in_1s = bidict({
|
||||
1: '1s',
|
||||
60: '1m',
|
||||
60*5: '5m',
|
||||
60*15: '15m',
|
||||
60*30: '30m',
|
||||
60*60: '1h',
|
||||
60*60*24: '1d',
|
||||
})
|
||||
# map time frame "keys" to minutes values
|
||||
tf_in_1m = {
|
||||
'1m': 1,
|
||||
'5m': 5,
|
||||
'15m': 15,
|
||||
'30m': 30,
|
||||
'1h': 60,
|
||||
'4h': 240,
|
||||
'1d': 1440,
|
||||
}
|
||||
|
||||
|
||||
def float_digits(
|
||||
value: float,
|
||||
) -> int:
|
||||
return int(-decimal.Decimal(str(value)).as_tuple().exponent)
|
||||
|
||||
|
||||
def ohlc_zeros(length: int) -> np.ndarray:
|
||||
|
|
@ -83,6 +75,96 @@ def ohlc_zeros(length: int) -> np.ndarray:
|
|||
return np.zeros(length, dtype=base_ohlc_dtype)
|
||||
|
||||
|
||||
class Symbol(BaseModel):
|
||||
"""I guess this is some kinda container thing for dealing with
|
||||
all the different meta-data formats from brokers?
|
||||
|
||||
Yah, i guess dats what it izz.
|
||||
"""
|
||||
key: str
|
||||
tick_size: float = 0.01
|
||||
lot_tick_size: float = 0.01 # "volume" precision as min step value
|
||||
broker_info: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
# specifies a "class" of financial instrument
|
||||
# ex. stock, futer, option, bond etc.
|
||||
type_key: str
|
||||
|
||||
@property
|
||||
def brokers(self) -> List[str]:
|
||||
return list(self.broker_info.keys())
|
||||
|
||||
def digits(self) -> int:
|
||||
"""Return the trailing number of digits specified by the min
|
||||
tick size for the instrument.
|
||||
|
||||
"""
|
||||
return float_digits(self.tick_size)
|
||||
|
||||
def lot_digits(self) -> int:
|
||||
return float_digits(self.lot_tick_size)
|
||||
|
||||
def nearest_tick(self, value: float) -> float:
|
||||
"""Return the nearest tick value based on mininum increment.
|
||||
|
||||
"""
|
||||
mult = 1 / self.tick_size
|
||||
return round(value * mult) / mult
|
||||
|
||||
|
||||
def from_df(
|
||||
df: pd.DataFrame,
|
||||
source=None,
|
||||
default_tf=None
|
||||
) -> np.recarray:
|
||||
"""Convert OHLC formatted ``pandas.DataFrame`` to ``numpy.recarray``.
|
||||
|
||||
"""
|
||||
df.reset_index(inplace=True)
|
||||
|
||||
# hackery to convert field names
|
||||
date = 'Date'
|
||||
if 'date' in df.columns:
|
||||
date = 'date'
|
||||
|
||||
# convert to POSIX time
|
||||
df[date] = [d.timestamp() for d in df[date]]
|
||||
|
||||
# try to rename from some camel case
|
||||
columns = {
|
||||
'Date': 'time',
|
||||
'date': 'time',
|
||||
'Open': 'open',
|
||||
'High': 'high',
|
||||
'Low': 'low',
|
||||
'Close': 'close',
|
||||
'Volume': 'volume',
|
||||
|
||||
# most feeds are providing this over sesssion anchored
|
||||
'vwap': 'bar_wap',
|
||||
|
||||
# XXX: ib_insync calls this the "wap of the bar"
|
||||
# but no clue what is actually is...
|
||||
# https://github.com/pikers/piker/issues/119#issuecomment-729120988
|
||||
'average': 'bar_wap',
|
||||
}
|
||||
|
||||
df = df.rename(columns=columns)
|
||||
|
||||
for name in df.columns:
|
||||
# if name not in base_ohlc_dtype.names[1:]:
|
||||
if name not in base_ohlc_dtype.names:
|
||||
del df[name]
|
||||
|
||||
# TODO: it turns out column access on recarrays is actually slower:
|
||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||
# it might make sense to make these structured arrays?
|
||||
array = df.to_records(index=False)
|
||||
_nan_to_closest_num(array)
|
||||
|
||||
return array
|
||||
|
||||
|
||||
def _nan_to_closest_num(array: np.ndarray):
|
||||
"""Return interpolated values instead of NaN.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,510 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Mega-simple symbology cache via TOML files.
|
||||
|
||||
Allow backend data providers and/or brokers to stash their
|
||||
symbology sets (aka the meta data we normalize into our
|
||||
`.accounting.MktPair` type) to the filesystem for faster lookup and
|
||||
offline usage.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
)
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import (
|
||||
Any,
|
||||
Sequence,
|
||||
Hashable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
from types import ModuleType
|
||||
|
||||
from rapidfuzz import process as fuzzy
|
||||
import tomli_w # for fast symbol cache writing
|
||||
import tractor
|
||||
import trio
|
||||
try:
|
||||
import tomllib
|
||||
except ModuleNotFoundError:
|
||||
import tomli as tomllib
|
||||
from msgspec import field
|
||||
|
||||
from piker.log import get_logger
|
||||
from piker import config
|
||||
from piker.types import Struct
|
||||
from piker.brokers import (
|
||||
open_cached_client,
|
||||
get_brokermod,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..accounting import (
|
||||
Asset,
|
||||
MktPair,
|
||||
)
|
||||
|
||||
log = get_logger('data.cache')
|
||||
|
||||
|
||||
class SymbologyCache(Struct):
|
||||
'''
|
||||
Asset meta-data cache which holds lookup tables for 3 sets of
|
||||
market-symbology related struct-types required by the
|
||||
`.accounting` and `.data` subsystems.
|
||||
|
||||
'''
|
||||
mod: ModuleType
|
||||
fp: Path
|
||||
|
||||
# all asset-money-systems descriptions as minimally defined by
|
||||
# in `.accounting.Asset`
|
||||
assets: dict[str, Asset] = field(default_factory=dict)
|
||||
|
||||
# backend-system pairs loaded in provider (schema) specific
|
||||
# structs.
|
||||
pairs: dict[str, Struct] = field(default_factory=dict)
|
||||
# serialized namespace path to the backend's pair-info-`Struct`
|
||||
# defn B)
|
||||
pair_ns_path: tractor.msg.NamespacePath | None = None
|
||||
|
||||
# TODO: piker-normalized `.accounting.MktPair` table?
|
||||
# loaded from the `.pairs` and a normalizer
|
||||
# provided by the backend pkg.
|
||||
mktmaps: dict[str, MktPair] = field(default_factory=dict)
|
||||
|
||||
def write_config(self) -> None:
|
||||
|
||||
# put the backend's pair-struct type ref at the top
|
||||
# of file if possible.
|
||||
cachedict: dict[str, Any] = {
|
||||
'pair_ns_path': str(self.pair_ns_path) or '',
|
||||
}
|
||||
|
||||
# serialize all tables as dicts for TOML.
|
||||
for key, table in {
|
||||
'assets': self.assets,
|
||||
'pairs': self.pairs,
|
||||
'mktmaps': self.mktmaps,
|
||||
}.items():
|
||||
if not table:
|
||||
log.warning(
|
||||
f'Asset cache table for `{key}` is empty?'
|
||||
)
|
||||
continue
|
||||
|
||||
dct = cachedict[key] = {}
|
||||
for key, struct in table.items():
|
||||
dct[key] = struct.to_dict(include_non_members=False)
|
||||
|
||||
try:
|
||||
with self.fp.open(mode='wb') as fp:
|
||||
tomli_w.dump(cachedict, fp)
|
||||
except TypeError:
|
||||
self.fp.unlink()
|
||||
raise
|
||||
|
||||
async def load(self) -> None:
|
||||
'''
|
||||
Explicitly load the "symbology set" for this provider by using
|
||||
2 required `Client` methods:
|
||||
|
||||
- `.get_assets()`: returning a table of `Asset`s
|
||||
- `.get_mkt_pairs()`: returning a table of pair-`Struct`
|
||||
types, custom defined by the particular backend.
|
||||
|
||||
AND, the required `.get_mkt_info()` module-level endpoint
|
||||
which maps `fqme: str` -> `MktPair`s.
|
||||
|
||||
These tables are then used to fill out the `.assets`, `.pairs` and
|
||||
`.mktmaps` tables on this cache instance, respectively.
|
||||
|
||||
'''
|
||||
async with open_cached_client(self.mod.name) as client:
|
||||
|
||||
if get_assets := getattr(client, 'get_assets', None):
|
||||
assets: dict[str, Asset] = await get_assets()
|
||||
for bs_mktid, asset in assets.items():
|
||||
self.assets[bs_mktid] = asset
|
||||
else:
|
||||
log.warning(
|
||||
'No symbology cache `Asset` support for `{provider}`..\n'
|
||||
'Implement `Client.get_assets()`!'
|
||||
)
|
||||
|
||||
if get_mkt_pairs := getattr(client, 'get_mkt_pairs', None):
|
||||
|
||||
pairs: dict[str, Struct] = await get_mkt_pairs()
|
||||
for bs_fqme, pair in pairs.items():
|
||||
|
||||
# NOTE: every backend defined pair should
|
||||
# declare it's ns path for roundtrip
|
||||
# serialization lookup.
|
||||
if not getattr(pair, 'ns_path', None):
|
||||
raise TypeError(
|
||||
f'Pair-struct for {self.mod.name} MUST define a '
|
||||
'`.ns_path: str`!\n'
|
||||
f'{pair}'
|
||||
)
|
||||
|
||||
entry = await self.mod.get_mkt_info(pair.bs_fqme)
|
||||
if not entry:
|
||||
continue
|
||||
|
||||
mkt: MktPair
|
||||
pair: Struct
|
||||
mkt, _pair = entry
|
||||
assert _pair is pair, (
|
||||
f'`{self.mod.name}` backend probably has a '
|
||||
'keying-symmetry problem between the pair-`Struct` '
|
||||
'returned from `Client.get_mkt_pairs()`and the '
|
||||
'module level endpoint: `.get_mkt_info()`\n\n'
|
||||
"Here's the struct diff:\n"
|
||||
f'{_pair - pair}'
|
||||
)
|
||||
# NOTE XXX: this means backends MUST implement
|
||||
# a `Struct.bs_mktid: str` field to provide
|
||||
# a native-keyed map to their own symbol
|
||||
# set(s).
|
||||
self.pairs[pair.bs_mktid] = pair
|
||||
|
||||
# NOTE: `MktPair`s are keyed here using piker's
|
||||
# internal FQME schema so that search,
|
||||
# accounting and feed init can be accomplished
|
||||
# a sane, uniform, normalized basis.
|
||||
self.mktmaps[mkt.fqme] = mkt
|
||||
|
||||
self.pair_ns_path: str = tractor.msg.NamespacePath.from_ref(
|
||||
pair,
|
||||
)
|
||||
|
||||
else:
|
||||
log.warning(
|
||||
'No symbology cache `Pair` support for `{provider}`..\n'
|
||||
'Implement `Client.get_mkt_pairs()`!'
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_dict(
|
||||
cls: type,
|
||||
data: dict,
|
||||
**kwargs,
|
||||
) -> SymbologyCache:
|
||||
|
||||
# normal init inputs
|
||||
cache = cls(**kwargs)
|
||||
|
||||
# XXX WARNING: this may break if backend namespacing
|
||||
# changes (eg. `Pair` class def is moved to another
|
||||
# module) in which case you can manually update the
|
||||
# `pair_ns_path` in the symcache file and try again.
|
||||
# TODO: probably a verbose error about this?
|
||||
Pair: type = tractor.msg.NamespacePath(
|
||||
str(data['pair_ns_path'])
|
||||
).load_ref()
|
||||
|
||||
pairtable = data.pop('pairs')
|
||||
for key, pairtable in pairtable.items():
|
||||
|
||||
# allow each serialized pair-dict-table to declare its
|
||||
# specific struct type's path in cases where a backend
|
||||
# supports multiples (normally with different
|
||||
# schemas..) and we are storing them in a flat `.pairs`
|
||||
# table.
|
||||
ThisPair = Pair
|
||||
if this_pair_type := pairtable.get('ns_path'):
|
||||
ThisPair: type = tractor.msg.NamespacePath(
|
||||
str(this_pair_type)
|
||||
).load_ref()
|
||||
|
||||
pair: Struct = ThisPair(**pairtable)
|
||||
cache.pairs[key] = pair
|
||||
|
||||
from ..accounting import (
|
||||
Asset,
|
||||
MktPair,
|
||||
)
|
||||
|
||||
# load `dict` -> `Asset`
|
||||
assettable = data.pop('assets')
|
||||
for name, asdict in assettable.items():
|
||||
cache.assets[name] = Asset.from_msg(asdict)
|
||||
|
||||
# load `dict` -> `MktPair`
|
||||
dne: list[str] = []
|
||||
mkttable = data.pop('mktmaps')
|
||||
for fqme, mktdict in mkttable.items():
|
||||
|
||||
mkt = MktPair.from_msg(mktdict)
|
||||
assert mkt.fqme == fqme
|
||||
|
||||
# sanity check asset refs from those (presumably)
|
||||
# loaded asset set above.
|
||||
src: Asset = cache.assets[mkt.src.name]
|
||||
assert src == mkt.src
|
||||
dst: Asset
|
||||
if not (dst := cache.assets.get(mkt.dst.name)):
|
||||
dne.append(mkt.dst.name)
|
||||
continue
|
||||
else:
|
||||
assert dst.name == mkt.dst.name
|
||||
|
||||
cache.mktmaps[fqme] = mkt
|
||||
|
||||
log.warning(
|
||||
f'These `MktPair.dst: Asset`s DNE says `{cache.mod.name}`?\n'
|
||||
f'{pformat(dne)}'
|
||||
)
|
||||
return cache
|
||||
|
||||
@staticmethod
|
||||
async def from_scratch(
|
||||
mod: ModuleType,
|
||||
fp: Path,
|
||||
**kwargs,
|
||||
|
||||
) -> SymbologyCache:
|
||||
'''
|
||||
Generate (a) new symcache (contents) entirely from scratch
|
||||
including all (TOML) serialized data and file.
|
||||
|
||||
'''
|
||||
log.info(f'GENERATING symbology cache for `{mod.name}`')
|
||||
cache = SymbologyCache(
|
||||
mod=mod,
|
||||
fp=fp,
|
||||
**kwargs,
|
||||
)
|
||||
await cache.load()
|
||||
cache.write_config()
|
||||
return cache
|
||||
|
||||
def search(
|
||||
self,
|
||||
pattern: str,
|
||||
table: str = 'mktmaps'
|
||||
|
||||
) -> dict[str, Struct]:
|
||||
'''
|
||||
(Fuzzy) search this cache's `.mktmaps` table, which is
|
||||
keyed by FQMEs, for `pattern: str` and return the best
|
||||
matches in a `dict` including the `MktPair` values.
|
||||
|
||||
'''
|
||||
matches = fuzzy.extract(
|
||||
pattern,
|
||||
getattr(self, table),
|
||||
score_cutoff=50,
|
||||
)
|
||||
|
||||
# repack in dict[fqme, MktPair] form
|
||||
return {
|
||||
item[0].fqme: item[0]
|
||||
for item in matches
|
||||
}
|
||||
|
||||
|
||||
# actor-process-local in-mem-cache of symcaches (by backend).
|
||||
_caches: dict[str, SymbologyCache] = {}
|
||||
|
||||
|
||||
def mk_cachefile(
|
||||
provider: str,
|
||||
) -> Path:
|
||||
cachedir: Path = config.get_conf_dir() / '_cache'
|
||||
if not cachedir.is_dir():
|
||||
log.info(f'Creating `nativedb` director: {cachedir}')
|
||||
cachedir.mkdir()
|
||||
|
||||
cachefile: Path = cachedir / f'{str(provider)}.symcache.toml'
|
||||
cachefile.touch()
|
||||
return cachefile
|
||||
|
||||
|
||||
@acm
|
||||
async def open_symcache(
|
||||
mod_or_name: ModuleType | str,
|
||||
|
||||
reload: bool = False,
|
||||
only_from_memcache: bool = False, # no API req
|
||||
_no_symcache: bool = False, # no backend support
|
||||
|
||||
) -> SymbologyCache:
|
||||
|
||||
if isinstance(mod_or_name, str):
|
||||
mod = get_brokermod(mod_or_name)
|
||||
else:
|
||||
mod: ModuleType = mod_or_name
|
||||
|
||||
provider: str = mod.name
|
||||
cachefile: Path = mk_cachefile(provider)
|
||||
|
||||
# NOTE: certain backends might not support a symbology cache
|
||||
# (easily) and thus we allow for an empty instance to be loaded
|
||||
# and manually filled in at the whim of the caller presuming
|
||||
# the backend pkg-module is annotated appropriately.
|
||||
if (
|
||||
getattr(mod, '_no_symcache', False)
|
||||
or _no_symcache
|
||||
):
|
||||
yield SymbologyCache(
|
||||
mod=mod,
|
||||
fp=cachefile,
|
||||
)
|
||||
# don't do nuttin
|
||||
return
|
||||
|
||||
# actor-level cache-cache XD
|
||||
global _caches
|
||||
if not reload:
|
||||
try:
|
||||
yield _caches[provider]
|
||||
except KeyError:
|
||||
msg: str = (
|
||||
f'No asset info cache exists yet for `{provider}`'
|
||||
)
|
||||
if only_from_memcache:
|
||||
raise RuntimeError(msg)
|
||||
else:
|
||||
log.warning(msg)
|
||||
|
||||
# if no cache exists or an explicit reload is requested, load
|
||||
# the provider API and call appropriate endpoints to populate
|
||||
# the mkt and asset tables.
|
||||
if (
|
||||
reload
|
||||
or not cachefile.is_file()
|
||||
):
|
||||
cache = await SymbologyCache.from_scratch(
|
||||
mod=mod,
|
||||
fp=cachefile,
|
||||
)
|
||||
|
||||
else:
|
||||
log.info(
|
||||
f'Loading EXISTING `{mod.name}` symbology cache:\n'
|
||||
f'> {cachefile}'
|
||||
)
|
||||
import time
|
||||
now = time.time()
|
||||
with cachefile.open('rb') as existing_fp:
|
||||
data: dict[str, dict] = tomllib.load(existing_fp)
|
||||
log.runtime(f'SYMCACHE TOML LOAD TIME: {time.time() - now}')
|
||||
|
||||
# if there's an empty file for some reason we need
|
||||
# to do a full reload as well!
|
||||
if not data:
|
||||
cache = await SymbologyCache.from_scratch(
|
||||
mod=mod,
|
||||
fp=cachefile,
|
||||
)
|
||||
else:
|
||||
cache = SymbologyCache.from_dict(
|
||||
data,
|
||||
mod=mod,
|
||||
fp=cachefile,
|
||||
)
|
||||
|
||||
|
||||
# TODO: use a real profiling sys..
|
||||
# https://github.com/pikers/piker/issues/337
|
||||
log.info(f'SYMCACHE LOAD TIME: {time.time() - now}')
|
||||
|
||||
yield cache
|
||||
|
||||
# TODO: write only when changes detected? but that should
|
||||
# never happen right except on reload?
|
||||
# cache.write_config()
|
||||
|
||||
|
||||
def get_symcache(
|
||||
provider: str,
|
||||
force_reload: bool = False,
|
||||
|
||||
) -> SymbologyCache:
|
||||
'''
|
||||
Get any available symbology/assets cache from sync code by
|
||||
(maybe) manually running `trio` to do the work.
|
||||
|
||||
'''
|
||||
# spawn tractor runtime and generate cache
|
||||
# if not existing.
|
||||
async def sched_gen_symcache():
|
||||
async with (
|
||||
# only for runtime's debug mode
|
||||
tractor.open_nursery(debug_mode=True),
|
||||
|
||||
open_symcache(
|
||||
get_brokermod(provider),
|
||||
reload=force_reload,
|
||||
) as symcache,
|
||||
):
|
||||
return symcache
|
||||
|
||||
try:
|
||||
symcache: SymbologyCache = trio.run(sched_gen_symcache)
|
||||
assert symcache
|
||||
except BaseException:
|
||||
import pdbp
|
||||
pdbp.xpm()
|
||||
|
||||
return symcache
|
||||
|
||||
|
||||
def match_from_pairs(
|
||||
pairs: dict[str, Struct],
|
||||
query: str,
|
||||
score_cutoff: int = 50,
|
||||
**extract_kwargs,
|
||||
|
||||
) -> dict[str, Struct]:
|
||||
'''
|
||||
Fuzzy search over a "pairs table" maintained by most backends
|
||||
as part of their symbology-info caching internals.
|
||||
|
||||
Scan the native symbol key set and return best ranked
|
||||
matches back in a new `dict`.
|
||||
|
||||
'''
|
||||
|
||||
# TODO: somehow cache this list (per call) like we were in
|
||||
# `open_symbol_search()`?
|
||||
keys: list[str] = list(pairs)
|
||||
matches: list[tuple[
|
||||
Sequence[Hashable], # matching input key
|
||||
Any, # scores
|
||||
Any,
|
||||
]] = fuzzy.extract(
|
||||
# NOTE: most backends provide keys uppercased
|
||||
query=query,
|
||||
choices=keys,
|
||||
score_cutoff=score_cutoff,
|
||||
**extract_kwargs,
|
||||
)
|
||||
|
||||
# pop and repack pairs in output dict
|
||||
matched_pairs: dict[str, Struct] = {}
|
||||
for item in matches:
|
||||
pair_key: str = item[0]
|
||||
matched_pairs[pair_key] = pairs[pair_key]
|
||||
|
||||
return matched_pairs
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
@ -18,30 +18,13 @@
|
|||
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
)
|
||||
from itertools import count
|
||||
from functools import partial
|
||||
from contextlib import asynccontextmanager, AsyncExitStack
|
||||
from types import ModuleType
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Callable,
|
||||
AsyncContextManager,
|
||||
AsyncGenerator,
|
||||
Iterable,
|
||||
)
|
||||
from typing import Any, Callable
|
||||
import json
|
||||
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
from trio_websocket import (
|
||||
WebSocketConnection,
|
||||
open_websocket_url,
|
||||
)
|
||||
from wsproto.utilities import LocalProtocolError
|
||||
import trio_websocket
|
||||
from trio_websocket._impl import (
|
||||
ConnectionClosed,
|
||||
DisconnectionTimeout,
|
||||
|
|
@ -50,71 +33,73 @@ from trio_websocket._impl import (
|
|||
ConnectionTimeout,
|
||||
)
|
||||
|
||||
from piker.types import Struct
|
||||
from ._util import log
|
||||
from ..log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class NoBsWs:
|
||||
'''
|
||||
Make ``trio_websocket`` sockets stay up no matter the bs.
|
||||
"""Make ``trio_websocket`` sockets stay up no matter the bs.
|
||||
|
||||
A shim interface that allows client code to stream from some
|
||||
``WebSocketConnection`` but where any connectivy bs is handled
|
||||
automatcially and entirely in the background.
|
||||
|
||||
NOTE: this type should never be created directly but instead is
|
||||
provided via the ``open_autorecon_ws()`` factor below.
|
||||
|
||||
'''
|
||||
# apparently we can QoS for all sorts of reasons..so catch em.
|
||||
"""
|
||||
recon_errors = (
|
||||
ConnectionClosed,
|
||||
DisconnectionTimeout,
|
||||
ConnectionRejected,
|
||||
HandshakeError,
|
||||
ConnectionTimeout,
|
||||
LocalProtocolError,
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
rxchan: trio.MemoryReceiveChannel,
|
||||
msg_recv_timeout: float,
|
||||
|
||||
serializer: ModuleType = json
|
||||
stack: AsyncExitStack,
|
||||
fixture: Callable,
|
||||
serializer: ModuleType = json,
|
||||
):
|
||||
self.url = url
|
||||
self._rx = rxchan
|
||||
self._timeout = msg_recv_timeout
|
||||
self.fixture = fixture
|
||||
self._stack = stack
|
||||
self._ws: 'WebSocketConnection' = None # noqa
|
||||
|
||||
# signaling between caller and relay task which determines when
|
||||
# socket is connected (and subscribed).
|
||||
self._connected: trio.Event = trio.Event()
|
||||
async def _connect(
|
||||
self,
|
||||
tries: int = 1000,
|
||||
) -> None:
|
||||
while True:
|
||||
try:
|
||||
await self._stack.aclose()
|
||||
except (DisconnectionTimeout, RuntimeError):
|
||||
await trio.sleep(0.5)
|
||||
else:
|
||||
break
|
||||
|
||||
# dynamically reset by the bg relay task
|
||||
self._ws: WebSocketConnection | None = None
|
||||
self._cs: trio.CancelScope | None = None
|
||||
last_err = None
|
||||
for i in range(tries):
|
||||
try:
|
||||
self._ws = await self._stack.enter_async_context(
|
||||
trio_websocket.open_websocket_url(self.url)
|
||||
)
|
||||
# rerun user code fixture
|
||||
ret = await self._stack.enter_async_context(
|
||||
self.fixture(self)
|
||||
)
|
||||
assert ret is None
|
||||
|
||||
# interchange codec methods
|
||||
# TODO: obviously the method API here may be different
|
||||
# for another interchange format..
|
||||
self._dumps: Callable = serializer.dumps
|
||||
self._loads: Callable = serializer.loads
|
||||
log.info(f'Connection success: {self.url}')
|
||||
return self._ws
|
||||
|
||||
def connected(self) -> bool:
|
||||
return self._connected.is_set()
|
||||
|
||||
async def reset(self) -> None:
|
||||
'''
|
||||
Reset the underlying ws connection by cancelling
|
||||
the bg relay task and waiting for it to signal
|
||||
a new connection.
|
||||
|
||||
'''
|
||||
self._connected = trio.Event()
|
||||
self._cs.cancel()
|
||||
await self._connected.wait()
|
||||
except self.recon_errors as err:
|
||||
last_err = err
|
||||
log.error(
|
||||
f'{self} connection bail with '
|
||||
f'{type(err)}...retry attempt {i}'
|
||||
)
|
||||
await trio.sleep(0.5)
|
||||
continue
|
||||
else:
|
||||
log.exception('ws connection fail...')
|
||||
raise last_err
|
||||
|
||||
async def send_msg(
|
||||
self,
|
||||
|
|
@ -122,397 +107,36 @@ class NoBsWs:
|
|||
) -> None:
|
||||
while True:
|
||||
try:
|
||||
msg: Any = self._dumps(data)
|
||||
return await self._ws.send_message(msg)
|
||||
return await self._ws.send_message(json.dumps(data))
|
||||
except self.recon_errors:
|
||||
await self.reset()
|
||||
await self._connect()
|
||||
|
||||
async def recv_msg(self) -> Any:
|
||||
msg: Any = await self._rx.receive()
|
||||
data = self._loads(msg)
|
||||
return data
|
||||
|
||||
def __aiter__(self):
|
||||
return self
|
||||
|
||||
async def __anext__(self):
|
||||
return await self.recv_msg()
|
||||
|
||||
def set_recv_timeout(
|
||||
async def recv_msg(
|
||||
self,
|
||||
timeout: float,
|
||||
) -> None:
|
||||
self._timeout = timeout
|
||||
|
||||
|
||||
async def _reconnect_forever(
|
||||
url: str,
|
||||
snd: trio.MemorySendChannel,
|
||||
nobsws: NoBsWs,
|
||||
reset_after: int, # msg recv timeout before reset attempt
|
||||
|
||||
fixture: AsyncContextManager | None = None,
|
||||
task_status: TaskStatus = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
|
||||
# TODO: can we just report "where" in the call stack
|
||||
# the client code is using the ws stream?
|
||||
# Maybe we can just drop this since it's already in the log msg
|
||||
# orefix?
|
||||
if fixture is not None:
|
||||
src_mod: str = fixture.__module__
|
||||
else:
|
||||
src_mod: str = 'unknown'
|
||||
|
||||
async def proxy_msgs(
|
||||
ws: WebSocketConnection,
|
||||
pcs: trio.CancelScope, # parent cancel scope
|
||||
):
|
||||
'''
|
||||
Receive (under `timeout` deadline) all msgs from from underlying
|
||||
websocket and relay them to (calling) parent task via ``trio``
|
||||
mem chan.
|
||||
|
||||
'''
|
||||
# after so many msg recv timeouts, reset the connection
|
||||
timeouts: int = 0
|
||||
|
||||
) -> Any:
|
||||
while True:
|
||||
with trio.move_on_after(
|
||||
# can be dynamically changed by user code
|
||||
nobsws._timeout,
|
||||
) as cs:
|
||||
try:
|
||||
msg: Any = await ws.get_message()
|
||||
await snd.send(msg)
|
||||
except nobsws.recon_errors:
|
||||
log.exception(
|
||||
f'{src_mod}\n'
|
||||
f'{url} connection bail with:'
|
||||
)
|
||||
await trio.sleep(0.5)
|
||||
pcs.cancel()
|
||||
|
||||
# go back to reonnect loop in parent task
|
||||
return
|
||||
|
||||
if cs.cancelled_caught:
|
||||
timeouts += 1
|
||||
if timeouts > reset_after:
|
||||
log.error(
|
||||
f'{src_mod}\n'
|
||||
'WS feed seems down and slow af.. reconnecting\n'
|
||||
)
|
||||
pcs.cancel()
|
||||
|
||||
# go back to reonnect loop in parent task
|
||||
return
|
||||
|
||||
async def open_fixture(
|
||||
fixture: AsyncContextManager,
|
||||
nobsws: NoBsWs,
|
||||
task_status: TaskStatus = trio.TASK_STATUS_IGNORED,
|
||||
):
|
||||
'''
|
||||
Open user provided `@acm` and sleep until any connection
|
||||
reset occurs.
|
||||
|
||||
'''
|
||||
async with fixture(nobsws) as ret:
|
||||
assert ret is None
|
||||
task_status.started()
|
||||
await trio.sleep_forever()
|
||||
|
||||
# last_err = None
|
||||
nobsws._connected = trio.Event()
|
||||
task_status.started()
|
||||
|
||||
while not snd._closed:
|
||||
log.info(
|
||||
f'{src_mod}\n'
|
||||
f'{url} trying (RE)CONNECT'
|
||||
)
|
||||
|
||||
ws: WebSocketConnection
|
||||
try:
|
||||
async with (
|
||||
trio.open_nursery() as n,
|
||||
open_websocket_url(url) as ws,
|
||||
):
|
||||
cs = nobsws._cs = n.cancel_scope
|
||||
nobsws._ws = ws
|
||||
log.info(
|
||||
f'{src_mod}\n'
|
||||
f'Connection success: {url}'
|
||||
)
|
||||
|
||||
# begin relay loop to forward msgs
|
||||
n.start_soon(
|
||||
proxy_msgs,
|
||||
ws,
|
||||
cs,
|
||||
)
|
||||
|
||||
if fixture is not None:
|
||||
log.info(
|
||||
f'{src_mod}\n'
|
||||
f'Entering fixture: {fixture}'
|
||||
)
|
||||
|
||||
# TODO: should we return an explicit sub-cs
|
||||
# from this fixture task?
|
||||
await n.start(
|
||||
open_fixture,
|
||||
fixture,
|
||||
nobsws,
|
||||
)
|
||||
|
||||
# indicate to wrapper / opener that we are up and block
|
||||
# to let tasks run **inside** the ws open block above.
|
||||
nobsws._connected.set()
|
||||
await trio.sleep_forever()
|
||||
except HandshakeError:
|
||||
log.exception('Retrying connection')
|
||||
|
||||
# ws & nursery block ends
|
||||
|
||||
nobsws._connected = trio.Event()
|
||||
if cs.cancelled_caught:
|
||||
log.cancel(
|
||||
f'{url} connection cancelled!'
|
||||
)
|
||||
# if wrapper cancelled us, we expect it to also
|
||||
# have re-assigned a new event
|
||||
assert (
|
||||
nobsws._connected
|
||||
and not nobsws._connected.is_set()
|
||||
)
|
||||
|
||||
# -> from here, move to next reconnect attempt iteration
|
||||
# in the while loop above Bp
|
||||
|
||||
else:
|
||||
log.exception(
|
||||
f'{src_mod}\n'
|
||||
'ws connection closed by client...'
|
||||
)
|
||||
try:
|
||||
return json.loads(await self._ws.get_message())
|
||||
except self.recon_errors:
|
||||
await self._connect()
|
||||
|
||||
|
||||
@acm
|
||||
@asynccontextmanager
|
||||
async def open_autorecon_ws(
|
||||
url: str,
|
||||
|
||||
fixture: AsyncContextManager | None = None,
|
||||
# TODO: proper type annot smh
|
||||
fixture: Callable,
|
||||
):
|
||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
||||
|
||||
# time in sec between msgs received before
|
||||
# we presume connection might need a reset.
|
||||
msg_recv_timeout: float = 16,
|
||||
|
||||
# count of the number of above timeouts before connection reset
|
||||
reset_after: int = 3,
|
||||
|
||||
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||
'''
|
||||
An auto-reconnect websocket (wrapper API) around
|
||||
``trio_websocket.open_websocket_url()`` providing automatic
|
||||
re-connection on network errors, msg latency and thus roaming.
|
||||
|
||||
Here we implement a re-connect websocket interface where a bg
|
||||
nursery runs ``WebSocketConnection.receive_message()``s in a loop
|
||||
and restarts the full http(s) handshake on catches of certain
|
||||
connetivity errors, or some user defined recv timeout.
|
||||
|
||||
You can provide a ``fixture`` async-context-manager which will be
|
||||
entered/exitted around each connection reset; eg. for (re)requesting
|
||||
subscriptions without requiring streaming setup code to rerun.
|
||||
|
||||
'''
|
||||
snd: trio.MemorySendChannel
|
||||
rcv: trio.MemoryReceiveChannel
|
||||
snd, rcv = trio.open_memory_channel(616)
|
||||
|
||||
async with trio.open_nursery() as n:
|
||||
nobsws = NoBsWs(
|
||||
url,
|
||||
rcv,
|
||||
msg_recv_timeout=msg_recv_timeout,
|
||||
)
|
||||
await n.start(
|
||||
partial(
|
||||
_reconnect_forever,
|
||||
url,
|
||||
snd,
|
||||
nobsws,
|
||||
fixture=fixture,
|
||||
reset_after=reset_after,
|
||||
)
|
||||
)
|
||||
await nobsws._connected.wait()
|
||||
assert nobsws._cs
|
||||
assert nobsws.connected()
|
||||
"""
|
||||
async with AsyncExitStack() as stack:
|
||||
ws = NoBsWs(url, stack, fixture=fixture)
|
||||
await ws._connect()
|
||||
|
||||
try:
|
||||
yield nobsws
|
||||
yield ws
|
||||
|
||||
finally:
|
||||
n.cancel_scope.cancel()
|
||||
|
||||
|
||||
'''
|
||||
JSONRPC response-request style machinery for transparent multiplexing
|
||||
of msgs over a `NoBsWs`.
|
||||
|
||||
'''
|
||||
|
||||
|
||||
class JSONRPCResult(Struct):
|
||||
id: int
|
||||
jsonrpc: str = '2.0'
|
||||
result: Optional[dict] = None
|
||||
error: Optional[dict] = None
|
||||
|
||||
|
||||
@acm
|
||||
async def open_jsonrpc_session(
|
||||
url: str,
|
||||
start_id: int = 0,
|
||||
response_type: type = JSONRPCResult,
|
||||
msg_recv_timeout: float = float('inf'),
|
||||
# ^NOTE, since only `deribit` is using this jsonrpc stuff atm
|
||||
# and options mkts are generally "slow moving"..
|
||||
#
|
||||
# FURTHER if we break the underlying ws connection then since we
|
||||
# don't pass a `fixture` to the task that manages `NoBsWs`, i.e.
|
||||
# `_reconnect_forever()`, the jsonrpc "transport pipe" get's
|
||||
# broken and never restored with wtv init sequence is required to
|
||||
# re-establish a working req-resp session.
|
||||
|
||||
) -> Callable[[str, dict], dict]:
|
||||
'''
|
||||
Init a json-RPC-over-websocket connection to the provided `url`.
|
||||
|
||||
A `json_rpc: Callable[[str, dict], dict` is delivered to the
|
||||
caller for sending requests and a bg-`trio.Task` handles
|
||||
processing of response msgs including error reporting/raising in
|
||||
the parent/caller task.
|
||||
|
||||
'''
|
||||
# NOTE, store all request msgs so we can raise errors on the
|
||||
# caller side!
|
||||
req_msgs: dict[int, dict] = {}
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as tn,
|
||||
open_autorecon_ws(
|
||||
url=url,
|
||||
msg_recv_timeout=msg_recv_timeout,
|
||||
) as ws
|
||||
):
|
||||
rpc_id: Iterable[int] = count(start_id)
|
||||
rpc_results: dict[int, dict] = {}
|
||||
|
||||
async def json_rpc(
|
||||
method: str,
|
||||
params: dict,
|
||||
) -> dict:
|
||||
'''
|
||||
perform a json rpc call and wait for the result, raise exception in
|
||||
case of error field present on response
|
||||
'''
|
||||
nonlocal req_msgs
|
||||
|
||||
req_id: int = next(rpc_id)
|
||||
msg = {
|
||||
'jsonrpc': '2.0',
|
||||
'id': req_id,
|
||||
'method': method,
|
||||
'params': params
|
||||
}
|
||||
_id = msg['id']
|
||||
|
||||
result = rpc_results[_id] = {
|
||||
'result': None,
|
||||
'error': None,
|
||||
'event': trio.Event(), # signal caller resp arrived
|
||||
}
|
||||
req_msgs[_id] = msg
|
||||
|
||||
await ws.send_msg(msg)
|
||||
|
||||
# wait for reponse before unblocking requester code
|
||||
await rpc_results[_id]['event'].wait()
|
||||
|
||||
if (maybe_result := result['result']):
|
||||
ret = maybe_result
|
||||
del rpc_results[_id]
|
||||
|
||||
else:
|
||||
err = result['error']
|
||||
raise Exception(
|
||||
f'JSONRPC request failed\n'
|
||||
f'req: {msg}\n'
|
||||
f'resp: {err}\n'
|
||||
)
|
||||
|
||||
if ret.error is not None:
|
||||
raise Exception(json.dumps(ret.error, indent=4))
|
||||
|
||||
return ret
|
||||
|
||||
async def recv_task():
|
||||
'''
|
||||
receives every ws message and stores it in its corresponding
|
||||
result field, then sets the event to wakeup original sender
|
||||
tasks. also recieves responses to requests originated from
|
||||
the server side.
|
||||
|
||||
'''
|
||||
nonlocal req_msgs
|
||||
async for msg in ws:
|
||||
match msg:
|
||||
case {
|
||||
'result': _,
|
||||
'id': mid,
|
||||
} if res_entry := rpc_results.get(mid):
|
||||
|
||||
res_entry['result'] = response_type(**msg)
|
||||
res_entry['event'].set()
|
||||
|
||||
case {
|
||||
'result': _,
|
||||
'id': mid,
|
||||
} if not rpc_results.get(mid):
|
||||
log.warning(
|
||||
f'Unexpected ws msg: {json.dumps(msg, indent=4)}'
|
||||
)
|
||||
|
||||
case {
|
||||
'method': _,
|
||||
'params': _,
|
||||
}:
|
||||
log.debug(f'Recieved\n{msg}')
|
||||
|
||||
case {
|
||||
'error': error
|
||||
}:
|
||||
# retreive orig request msg, set error
|
||||
# response in original "result" msg,
|
||||
# THEN FINALLY set the event to signal caller
|
||||
# to raise the error in the parent task.
|
||||
req_id: int = error['id']
|
||||
req_msg: dict = req_msgs[req_id]
|
||||
result: dict = rpc_results[req_id]
|
||||
result['error'] = error
|
||||
result['event'].set()
|
||||
log.error(
|
||||
f'JSONRPC request failed\n'
|
||||
f'req: {req_msg}\n'
|
||||
f'resp: {error}\n'
|
||||
)
|
||||
|
||||
case _:
|
||||
log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
|
||||
|
||||
tn.start_soon(recv_task)
|
||||
yield json_rpc
|
||||
tn.cancel_scope.cancel()
|
||||
await stack.aclose()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,159 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
marketstore cli.
|
||||
"""
|
||||
from typing import List
|
||||
from functools import partial
|
||||
from pprint import pformat
|
||||
|
||||
import trio
|
||||
import tractor
|
||||
import click
|
||||
|
||||
from .marketstore import (
|
||||
get_client,
|
||||
stream_quotes,
|
||||
ingest_quote_stream,
|
||||
_url,
|
||||
_tick_tbk_ids,
|
||||
mk_tbk,
|
||||
)
|
||||
from ..cli import cli
|
||||
from .. import watchlists as wl
|
||||
from ..log import get_logger
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option(
|
||||
'--url',
|
||||
default='ws://localhost:5993/ws',
|
||||
help='HTTP URL of marketstore instance'
|
||||
)
|
||||
@click.argument('names', nargs=-1)
|
||||
@click.pass_obj
|
||||
def ms_stream(config: dict, names: List[str], url: str):
|
||||
"""Connect to a marketstore time bucket stream for (a set of) symbols(s)
|
||||
and print to console.
|
||||
"""
|
||||
async def main():
|
||||
async for quote in stream_quotes(symbols=names):
|
||||
log.info(f"Received quote:\n{quote}")
|
||||
|
||||
trio.run(main)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option(
|
||||
'--url',
|
||||
default=_url,
|
||||
help='HTTP URL of marketstore instance'
|
||||
)
|
||||
@click.argument('names', nargs=-1)
|
||||
@click.pass_obj
|
||||
def ms_destroy(config: dict, names: List[str], url: str) -> None:
|
||||
"""Destroy symbol entries in the local marketstore instance.
|
||||
"""
|
||||
async def main():
|
||||
nonlocal names
|
||||
async with get_client(url) as client:
|
||||
|
||||
if not names:
|
||||
names = await client.list_symbols()
|
||||
|
||||
# default is to wipe db entirely.
|
||||
answer = input(
|
||||
"This will entirely wipe you local marketstore db @ "
|
||||
f"{url} of the following symbols:\n {pformat(names)}"
|
||||
"\n\nDelete [N/y]?\n")
|
||||
|
||||
if answer == 'y':
|
||||
for sym in names:
|
||||
# tbk = _tick_tbk.format(sym)
|
||||
tbk = tuple(sym, *_tick_tbk_ids)
|
||||
print(f"Destroying {tbk}..")
|
||||
await client.destroy(mk_tbk(tbk))
|
||||
else:
|
||||
print("Nothing deleted.")
|
||||
|
||||
tractor.run(main)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option(
|
||||
'--tl',
|
||||
is_flag=True,
|
||||
help='Enable tractor logging')
|
||||
@click.option(
|
||||
'--url',
|
||||
default=_url,
|
||||
help='HTTP URL of marketstore instance'
|
||||
)
|
||||
@click.argument('name', nargs=1, required=True)
|
||||
@click.pass_obj
|
||||
def ms_shell(config, name, tl, url):
|
||||
"""Start an IPython shell ready to query the local marketstore db.
|
||||
"""
|
||||
async def main():
|
||||
async with get_client(url) as client:
|
||||
query = client.query # noqa
|
||||
# TODO: write magics to query marketstore
|
||||
from IPython import embed
|
||||
embed()
|
||||
|
||||
tractor.run(main)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option('--test-file', '-t', help='Test quote stream file')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option(
|
||||
'--url',
|
||||
default=_url,
|
||||
help='HTTP URL of marketstore instance'
|
||||
)
|
||||
@click.argument('name', nargs=1, required=True)
|
||||
@click.pass_obj
|
||||
def ingest(config, name, test_file, tl, url):
|
||||
"""Ingest real-time broker quotes and ticks to a marketstore instance.
|
||||
"""
|
||||
# global opts
|
||||
brokermod = config['brokermod']
|
||||
loglevel = config['loglevel']
|
||||
tractorloglevel = config['tractorloglevel']
|
||||
# log = config['log']
|
||||
|
||||
watchlist_from_file = wl.ensure_watchlists(config['wl_path'])
|
||||
watchlists = wl.merge_watchlist(watchlist_from_file, wl._builtins)
|
||||
symbols = watchlists[name]
|
||||
|
||||
tractor.run(
|
||||
partial(
|
||||
ingest_quote_stream,
|
||||
symbols,
|
||||
brokermod.name,
|
||||
tries=1,
|
||||
loglevel=loglevel,
|
||||
),
|
||||
name='ingest_marketstore',
|
||||
loglevel=tractorloglevel,
|
||||
debug_mode=True,
|
||||
)
|
||||
1123
piker/data/feed.py
1123
piker/data/feed.py
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue